From 68878f855eb47ba00e9deeeaee10de383a0cecef Mon Sep 17 00:00:00 2001 From: John Rampono Date: Tue, 12 Jul 2022 06:19:58 +0800 Subject: [PATCH 001/151] Fixing variable declaration in Deploy.ps1 --- solution/DeploymentV2/Deploy.ps1 | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/solution/DeploymentV2/Deploy.ps1 b/solution/DeploymentV2/Deploy.ps1 index f3bdf7cb..a22731ac 100644 --- a/solution/DeploymentV2/Deploy.ps1 +++ b/solution/DeploymentV2/Deploy.ps1 @@ -27,6 +27,8 @@ # Preparation #Mandatory #------------------------------------------------------------------------------------------------------------ $deploymentFolderPath = (Get-Location).Path +$gitDeploy = ([System.Environment]::GetEnvironmentVariable('gitDeploy') -eq 'true') +$skipTerraformDeployment = ([System.Environment]::GetEnvironmentVariable('skipTerraformDeployment') -eq 'true') Invoke-Expression ./Deploy_0_Prep.ps1 @@ -85,6 +87,10 @@ Invoke-Expression ./Deploy_1_Infra0.ps1 $AddCurrentUserAsWebAppAdmin = if($tout.publish_web_app_addcurrentuserasadmin) {$true} else {$false} Set-Location $deploymentFolderPath + +#------------------------------------------------------------------------------------------------------------ +# Run Each SubModule +#------------------------------------------------------------------------------------------------------------ Invoke-Expression ./Deploy_3_Infra1.ps1 Invoke-Expression ./Deploy_4_PrivateLinks.ps1 Invoke-Expression ./Deploy_5_WebApp.ps1 From 0759cc09b3e535dd1db174ac7bbb01c40aa17114 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Tue, 19 Jul 2022 19:52:22 +0800 Subject: [PATCH 002/151] Adding new SIF Notebooks --- .../sif/SifOpenApi/GenerateScripts.ps1 | 77 +++ .../sif/SifOpenApi/LakeDbTableTemplate.json | 78 +++ solution/SampleFiles/sif/SifOpenApi/test.ps1 | 20 - .../table/ref_abstractcontentelement.json | 78 +++ ...ref_aucodesets0211programavailability.json | 78 +++ ...aucodesets0792identificationprocedure.json | 78 +++ .../table/ref_aucodesetsaccompaniment.json | 78 +++ .../siflake/table/ref_aucodesetsacstrand.json | 78 +++ ...ref_aucodesetsactivityinvolvementcode.json | 78 +++ .../table/ref_aucodesetsactivitytype.json | 78 +++ .../table/ref_aucodesetsaddressrole.json | 78 +++ .../table/ref_aucodesetsaddresstype.json | 78 +++ .../table/ref_aucodesetsagcollection.json | 78 +++ .../ref_aucodesetsagcontextquestion.json | 78 +++ .../ref_aucodesetsagsubmissionstatus.json | 78 +++ ...f_aucodesetsassessmentreportingmethod.json | 78 +++ .../table/ref_aucodesetsassessmenttype.json | 78 +++ .../table/ref_aucodesetsattendancecode.json | 78 +++ .../table/ref_aucodesetsattendancestatus.json | 78 +++ ...aucodesetsaustraliancitizenshipstatus.json | 78 +++ ...cationofculturalandethnicgroupsascceg.json | 78 +++ ...standardclassificationoflanguagesascl.json | 78 +++ ...dclassificationofreligiousgroupsascrg.json | 78 +++ ...tandardgeographicalclassificationasgc.json | 78 +++ .../ref_aucodesetsaustraliantimezone.json | 78 +++ .../ref_aucodesetsbirthdateverification.json | 78 +++ .../siflake/table/ref_aucodesetsboarding.json | 78 +++ .../table/ref_aucodesetscalendarevent.json | 78 +++ .../table/ref_aucodesetscontactmethod.json | 78 +++ .../table/ref_aucodesetsdayvaluecode.json | 78 +++ .../ref_aucodesetsdetentioncategory.json | 78 +++ .../ref_aucodesetsdwellingarrangement.json | 78 +++ .../ref_aucodesetseducationagencytype.json | 78 +++ .../table/ref_aucodesetseducationlevel.json | 78 +++ .../table/ref_aucodesetselectronicidtype.json | 78 +++ .../table/ref_aucodesetsemailtype.json | 78 +++ .../table/ref_aucodesetsemploymenttype.json | 78 +++ .../ref_aucodesetsenglishproficiency.json | 78 +++ .../ref_aucodesetsenrollmenttimeframe.json | 78 +++ .../table/ref_aucodesetsentrytype.json | 78 +++ .../table/ref_aucodesetsequipmenttype.json | 78 +++ .../table/ref_aucodesetseventcategory.json | 78 +++ .../table/ref_aucodesetseventsubcategory.json | 78 +++ .../ref_aucodesetsexitwithdrawalstatus.json | 78 +++ .../ref_aucodesetsexitwithdrawaltype.json | 78 +++ .../ref_aucodesetsfederalelectorate.json | 78 +++ .../table/ref_aucodesetsffposstatuscode.json | 78 +++ .../table/ref_aucodesetsftptstatuscode.json | 78 +++ .../ref_aucodesetsgroupcategorycode.json | 78 +++ ...codesetsimmunisationcertificatestatus.json | 78 +++ .../table/ref_aucodesetsindigenousstatus.json | 78 +++ .../table/ref_aucodesetslanguagetype.json | 78 +++ ...learningstandarditemrelationshiptypes.json | 78 +++ .../ref_aucodesetsmaritalstatusaihw.json | 78 +++ .../ref_aucodesetsmediumofinstruction.json | 78 +++ .../table/ref_aucodesetsnameusagetype.json | 78 +++ .../table/ref_aucodesetsnapjurisdiction.json | 78 +++ .../ref_aucodesetsnapparticipationcode.json | 78 +++ .../ref_aucodesetsnapresponsecorrectness.json | 78 +++ .../table/ref_aucodesetsnaptestdomain.json | 78 +++ .../ref_aucodesetsnaptestitemmarkingtype.json | 78 +++ .../table/ref_aucodesetsnaptestitemtype.json | 78 +++ .../table/ref_aucodesetsnaptesttype.json | 78 +++ .../table/ref_aucodesetsnapwritinggenre.json | 78 +++ .../ref_aucodesetsnonschooleducation.json | 78 +++ .../ref_aucodesetsoperationalstatus.json | 78 +++ ...ref_aucodesetspermanentresidentstatus.json | 78 +++ .../ref_aucodesetspermissioncategorycode.json | 78 +++ .../table/ref_aucodesetspersonalisedplan.json | 78 +++ .../table/ref_aucodesetspicturesource.json | 78 +++ .../siflake/table/ref_aucodesetspnpcode.json | 78 +++ .../table/ref_aucodesetspreprimaryhours.json | 78 +++ ...ef_aucodesetsprogramfundingsourcecode.json | 78 +++ .../table/ref_aucodesetsprogresslevel.json | 78 +++ ...aucodesetspublicschoolcatchmentstatus.json | 78 +++ ...odesetsreceivinglocationofinstruction.json | 78 +++ .../ref_aucodesetsrelationshiptostudent.json | 78 +++ ...ef_aucodesetsresourceusagecontenttype.json | 78 +++ .../ref_aucodesetsscheduledactivitytype.json | 78 +++ .../table/ref_aucodesetsschoolcoedstatus.json | 78 +++ ...ef_aucodesetsschooleducationleveltype.json | 78 +++ .../ref_aucodesetsschoolenrollmenttype.json | 78 +++ .../table/ref_aucodesetsschoolfocuscode.json | 78 +++ .../table/ref_aucodesetsschoollevel.json | 78 +++ .../table/ref_aucodesetsschoollocation.json | 78 +++ .../table/ref_aucodesetsschoolsectorcode.json | 78 +++ .../table/ref_aucodesetsschoolsystem.json | 78 +++ .../table/ref_aucodesetssessiontype.json | 78 +++ .../siflake/table/ref_aucodesetssexcode.json | 78 +++ .../table/ref_aucodesetssourcecodetype.json | 78 +++ .../table/ref_aucodesetsstaffactivity.json | 78 +++ .../table/ref_aucodesetsstaffstatus.json | 78 +++ ...stralianclassificationofcountriessacc.json | 78 +++ .../ref_aucodesetsstateterritorycode.json | 78 +++ ...ef_aucodesetsstudentfamilyprogramtype.json | 78 +++ .../ref_aucodesetssuspensioncategory.json | 78 +++ .../table/ref_aucodesetssystemicstatus.json | 78 +++ .../ref_aucodesetsteachercovercredit.json | 78 +++ ...ref_aucodesetsteachercoversupervision.json | 78 +++ .../ref_aucodesetstelephonenumbertype.json | 78 +++ .../table/ref_aucodesetstravelmode.json | 78 +++ .../ref_aucodesetsvisastudyentitlement.json | 78 +++ .../table/ref_aucodesetsvisasubclass.json | 78 +++ .../ref_aucodesetswellbeingalertcategory.json | 78 +++ .../ref_aucodesetswellbeingappealstatus.json | 78 +++ ...desetswellbeingcharacteristiccategory.json | 78 +++ ...wellbeingcharacteristicclassification.json | 78 +++ ...etswellbeingcharacteristicsubcategory.json | 78 +++ ...aucodesetswellbeingeventcategoryclass.json | 78 +++ .../ref_aucodesetswellbeingeventlocation.json | 78 +++ ...ef_aucodesetswellbeingeventtimeperiod.json | 78 +++ ...f_aucodesetswellbeingresponsecategory.json | 78 +++ .../table/ref_aucodesetswellbeingstatus.json | 78 +++ .../table/ref_aucodesetsyearlevelcode.json | 78 +++ .../table/ref_aucodesetsyesornocategory.json | 78 +++ .../siflake/table/ref_definedprotocols.json | 78 +++ .../siflake/table/ref_genericyesno.json | 78 +++ ...f_iso4217currencynamesandcodeelements.json | 78 +++ .../notebook/sif/SIFDimCalendarDate.ipynb | 213 +++++++ .../sif/SIFDimLoadTeachingGroup.ipynb | 518 ++++++++++++++++++ .../sif/SIFLoadDimStaffPersonal.ipynb | 120 ++-- .../sif/SIFLoadDimStudentPersonal.ipynb | 173 ++---- 122 files changed, 9933 insertions(+), 236 deletions(-) create mode 100644 solution/SampleFiles/sif/SifOpenApi/GenerateScripts.ps1 create mode 100644 solution/SampleFiles/sif/SifOpenApi/LakeDbTableTemplate.json delete mode 100644 solution/SampleFiles/sif/SifOpenApi/test.ps1 create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_abstractcontentelement.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesets0211programavailability.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesets0792identificationprocedure.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsaccompaniment.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsacstrand.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsactivityinvolvementcode.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsactivitytype.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsaddressrole.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsaddresstype.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsagcollection.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsagcontextquestion.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsagsubmissionstatus.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsassessmentreportingmethod.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsassessmenttype.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsattendancecode.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsattendancestatus.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsaustraliancitizenshipstatus.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsaustralianstandardclassificationofculturalandethnicgroupsascceg.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsaustralianstandardclassificationoflanguagesascl.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsaustralianstandardclassificationofreligiousgroupsascrg.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsaustralianstandardgeographicalclassificationasgc.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsaustraliantimezone.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsbirthdateverification.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsboarding.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetscalendarevent.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetscontactmethod.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsdayvaluecode.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsdetentioncategory.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsdwellingarrangement.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetseducationagencytype.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetseducationlevel.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetselectronicidtype.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsemailtype.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsemploymenttype.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsenglishproficiency.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsenrollmenttimeframe.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsentrytype.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsequipmenttype.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetseventcategory.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetseventsubcategory.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsexitwithdrawalstatus.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsexitwithdrawaltype.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsfederalelectorate.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsffposstatuscode.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsftptstatuscode.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsgroupcategorycode.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsimmunisationcertificatestatus.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsindigenousstatus.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetslanguagetype.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetslearningstandarditemrelationshiptypes.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsmaritalstatusaihw.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsmediumofinstruction.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsnameusagetype.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsnapjurisdiction.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsnapparticipationcode.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsnapresponsecorrectness.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsnaptestdomain.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsnaptestitemmarkingtype.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsnaptestitemtype.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsnaptesttype.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsnapwritinggenre.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsnonschooleducation.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsoperationalstatus.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetspermanentresidentstatus.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetspermissioncategorycode.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetspersonalisedplan.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetspicturesource.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetspnpcode.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetspreprimaryhours.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsprogramfundingsourcecode.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsprogresslevel.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetspublicschoolcatchmentstatus.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsreceivinglocationofinstruction.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsrelationshiptostudent.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsresourceusagecontenttype.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsscheduledactivitytype.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsschoolcoedstatus.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsschooleducationleveltype.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsschoolenrollmenttype.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsschoolfocuscode.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsschoollevel.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsschoollocation.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsschoolsectorcode.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsschoolsystem.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetssessiontype.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetssexcode.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetssourcecodetype.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsstaffactivity.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsstaffstatus.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsstandardaustralianclassificationofcountriessacc.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsstateterritorycode.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsstudentfamilyprogramtype.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetssuspensioncategory.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetssystemicstatus.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsteachercovercredit.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsteachercoversupervision.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetstelephonenumbertype.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetstravelmode.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsvisastudyentitlement.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsvisasubclass.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetswellbeingalertcategory.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetswellbeingappealstatus.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetswellbeingcharacteristiccategory.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetswellbeingcharacteristicclassification.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetswellbeingcharacteristicsubcategory.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetswellbeingeventcategoryclass.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetswellbeingeventlocation.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetswellbeingeventtimeperiod.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetswellbeingresponsecategory.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetswellbeingstatus.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsyearlevelcode.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsyesornocategory.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_definedprotocols.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_genericyesno.json create mode 100644 solution/Synapse/Patterns/database/siflake/table/ref_iso4217currencynamesandcodeelements.json create mode 100644 solution/Synapse/Patterns/notebook/sif/SIFDimCalendarDate.ipynb create mode 100644 solution/Synapse/Patterns/notebook/sif/SIFDimLoadTeachingGroup.ipynb diff --git a/solution/SampleFiles/sif/SifOpenApi/GenerateScripts.ps1 b/solution/SampleFiles/sif/SifOpenApi/GenerateScripts.ps1 new file mode 100644 index 00000000..ae4112d8 --- /dev/null +++ b/solution/SampleFiles/sif/SifOpenApi/GenerateScripts.ps1 @@ -0,0 +1,77 @@ + + +$r = Get-Content("./jsonSchemaCreate_AU.json") | ConvertFrom-Json +$template = ( (Get-Content -Path ./LakeDbTableTemplate.json).Replace("{StorageContainer}",$tout.synapse_lakedatabase_container_name).Replace("{StorageAccount}",$tout.adlsstorage_name).Replace("{SynapseWorkSpace}",$tout.synapse_workspace_name) ) +$alldataitems = [System.Collections.ArrayList]::new() + + +foreach ($prop in $r.definitions | Get-Member) +{ + if ($prop.MemberType -eq "NoteProperty") + { + $add = $alldataitems.Add($prop) + $propdetail = $r.definitions | Select-Object -Property $prop.Name -ExpandProperty $prop.Name + $OneOfCheck = $propdetail.oneOf + if(![string]::IsNullOrEmpty($OneOfCheck)) + { + $entity = "ref_" + $prop.Name.ToLower() + #Write-Host "PersistRefData(""$entity"", dict1)" + #Set-Content -Path "../../../Synapse/Patterns/database/siflake/table/$entity.json" -Value $template.Replace("{EntityName}",$entity) + } + + } + + + +} + +$sqlcommand = @" +Select tbl.name TableName, c.name ColName, t.name as ColType +from +sys.tables tbl +inner join sys.columns c on c.object_id = tbl.object_id +inner join sys.types t on c.System_Type_Id = t.System_Type_Id +--where c.object_id = OBJECT_ID('{TableName}') +order by TableName DESC +--for json path +"@ + +$coltemplate = @" +{ + "Name": "{Name}", + "OriginDataTypeName": { + "TypeName": "{Type}", + "IsComplexType": false, + "IsNullable": true, + "Length": {Length}, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } +} +"@ + +$token=$(az account get-access-token --resource=https://database.windows.net --query accessToken --output tsv) +$Columns= Invoke-Sqlcmd -ServerInstance ($tout.synapse_workspace_name + "-ondemand.sql.azuresynapse.net") -Database "sif" -AccessToken $token -query $sqlcommand +$Entities = $Columns | Select-Object -Property "TableName" -unique + +foreach ($e in $Entities) +{ + $cols = @() + $ecols = $Columns | Where-Object {$_.TableName -eq $e.TableName} + foreach ($ecol in $ecols) + { + $type = "string" + $length = 8000 + if($ecol.ColType -eq "varchar") {$type = "string"} + if($ecol.ColType -eq "float") {$type = "float"} + if($ecol.ColType -eq "bigint") + { + $type = "integer" + $length = 0 + } + $cols += ($coltemplate.Replace("{Name}",$ecol.ColName).Replace("{Type}",$type).Replace("{Length}",$length) | ConvertFrom-Json) + } + $cols + +} diff --git a/solution/SampleFiles/sif/SifOpenApi/LakeDbTableTemplate.json b/solution/SampleFiles/sif/SifOpenApi/LakeDbTableTemplate.json new file mode 100644 index 00000000..3a415d47 --- /dev/null +++ b/solution/SampleFiles/sif/SifOpenApi/LakeDbTableTemplate.json @@ -0,0 +1,78 @@ +{ + "name": "{EntityName}", + "properties": { + "Name": "{EntityName}", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://{StorageContainer}@{StorageAccount}.dfs.core.windows.net/synapse/workspaces/{SynapseWorkSpace}/warehouse/sif.db/{EntityName}", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://{StorageContainer}@{StorageAccount}.dfs.core.windows.net/synapse/workspaces/{SynapseWorkSpace}/warehouse/sif.db/{EntityName}", + "Properties": { + "LinkedServiceName": "{SynapseWorkSpace}-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} \ No newline at end of file diff --git a/solution/SampleFiles/sif/SifOpenApi/test.ps1 b/solution/SampleFiles/sif/SifOpenApi/test.ps1 deleted file mode 100644 index 41a71a4c..00000000 --- a/solution/SampleFiles/sif/SifOpenApi/test.ps1 +++ /dev/null @@ -1,20 +0,0 @@ -$r = Get-Content("./jsonSchemaCreate_AU.json") | ConvertFrom-Json -$alldataitems = [System.Collections.ArrayList]::new() -foreach ($prop in $r.definitions | Get-Member) -{ - if ($prop.MemberType -eq "NoteProperty") - { - $add = $alldataitems.Add($prop) - $propdetail = $r.definitions | Select-Object -Property $prop.Name -ExpandProperty $prop.Name - $OneOfCheck = $propdetail.oneOf - if(![string]::IsNullOrEmpty($OneOfCheck)) - { - $entity = $prop.Name - Write-Host "PersistRefData(""$entity"", dict1)" - } - - } - - - -} \ No newline at end of file diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_abstractcontentelement.json b/solution/Synapse/Patterns/database/siflake/table/ref_abstractcontentelement.json new file mode 100644 index 00000000..709aaf0a --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_abstractcontentelement.json @@ -0,0 +1,78 @@ +{ + "name": "ref_abstractcontentelement", + "properties": { + "Name": "ref_abstractcontentelement", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_abstractcontentelement", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_abstractcontentelement", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesets0211programavailability.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesets0211programavailability.json new file mode 100644 index 00000000..07f357e6 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesets0211programavailability.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesets0211programavailability", + "properties": { + "Name": "ref_aucodesets0211programavailability", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesets0211programavailability", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesets0211programavailability", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesets0792identificationprocedure.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesets0792identificationprocedure.json new file mode 100644 index 00000000..c78dba1b --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesets0792identificationprocedure.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesets0792identificationprocedure", + "properties": { + "Name": "ref_aucodesets0792identificationprocedure", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesets0792identificationprocedure", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesets0792identificationprocedure", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsaccompaniment.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsaccompaniment.json new file mode 100644 index 00000000..d05edce0 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsaccompaniment.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsaccompaniment", + "properties": { + "Name": "ref_aucodesetsaccompaniment", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsaccompaniment", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsaccompaniment", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsacstrand.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsacstrand.json new file mode 100644 index 00000000..28e2a4af --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsacstrand.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsacstrand", + "properties": { + "Name": "ref_aucodesetsacstrand", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsacstrand", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsacstrand", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsactivityinvolvementcode.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsactivityinvolvementcode.json new file mode 100644 index 00000000..e93abbe9 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsactivityinvolvementcode.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsactivityinvolvementcode", + "properties": { + "Name": "ref_aucodesetsactivityinvolvementcode", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsactivityinvolvementcode", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsactivityinvolvementcode", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsactivitytype.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsactivitytype.json new file mode 100644 index 00000000..11ecc980 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsactivitytype.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsactivitytype", + "properties": { + "Name": "ref_aucodesetsactivitytype", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsactivitytype", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsactivitytype", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsaddressrole.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsaddressrole.json new file mode 100644 index 00000000..bfb4a28c --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsaddressrole.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsaddressrole", + "properties": { + "Name": "ref_aucodesetsaddressrole", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsaddressrole", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsaddressrole", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsaddresstype.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsaddresstype.json new file mode 100644 index 00000000..a2c1c409 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsaddresstype.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsaddresstype", + "properties": { + "Name": "ref_aucodesetsaddresstype", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsaddresstype", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsaddresstype", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsagcollection.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsagcollection.json new file mode 100644 index 00000000..bf974614 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsagcollection.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsagcollection", + "properties": { + "Name": "ref_aucodesetsagcollection", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsagcollection", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsagcollection", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsagcontextquestion.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsagcontextquestion.json new file mode 100644 index 00000000..36771e31 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsagcontextquestion.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsagcontextquestion", + "properties": { + "Name": "ref_aucodesetsagcontextquestion", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsagcontextquestion", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsagcontextquestion", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsagsubmissionstatus.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsagsubmissionstatus.json new file mode 100644 index 00000000..53cc14d8 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsagsubmissionstatus.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsagsubmissionstatus", + "properties": { + "Name": "ref_aucodesetsagsubmissionstatus", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsagsubmissionstatus", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsagsubmissionstatus", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsassessmentreportingmethod.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsassessmentreportingmethod.json new file mode 100644 index 00000000..e3e07d34 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsassessmentreportingmethod.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsassessmentreportingmethod", + "properties": { + "Name": "ref_aucodesetsassessmentreportingmethod", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsassessmentreportingmethod", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsassessmentreportingmethod", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsassessmenttype.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsassessmenttype.json new file mode 100644 index 00000000..d7641ba3 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsassessmenttype.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsassessmenttype", + "properties": { + "Name": "ref_aucodesetsassessmenttype", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsassessmenttype", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsassessmenttype", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsattendancecode.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsattendancecode.json new file mode 100644 index 00000000..34235f44 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsattendancecode.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsattendancecode", + "properties": { + "Name": "ref_aucodesetsattendancecode", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsattendancecode", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsattendancecode", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsattendancestatus.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsattendancestatus.json new file mode 100644 index 00000000..5058d0c7 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsattendancestatus.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsattendancestatus", + "properties": { + "Name": "ref_aucodesetsattendancestatus", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsattendancestatus", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsattendancestatus", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsaustraliancitizenshipstatus.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsaustraliancitizenshipstatus.json new file mode 100644 index 00000000..2564a818 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsaustraliancitizenshipstatus.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsaustraliancitizenshipstatus", + "properties": { + "Name": "ref_aucodesetsaustraliancitizenshipstatus", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsaustraliancitizenshipstatus", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsaustraliancitizenshipstatus", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsaustralianstandardclassificationofculturalandethnicgroupsascceg.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsaustralianstandardclassificationofculturalandethnicgroupsascceg.json new file mode 100644 index 00000000..10fc7797 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsaustralianstandardclassificationofculturalandethnicgroupsascceg.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsaustralianstandardclassificationofculturalandethnicgroupsascceg", + "properties": { + "Name": "ref_aucodesetsaustralianstandardclassificationofculturalandethnicgroupsascceg", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsaustralianstandardclassificationofculturalandethnicgroupsascceg", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsaustralianstandardclassificationofculturalandethnicgroupsascceg", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsaustralianstandardclassificationoflanguagesascl.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsaustralianstandardclassificationoflanguagesascl.json new file mode 100644 index 00000000..71addd5d --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsaustralianstandardclassificationoflanguagesascl.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsaustralianstandardclassificationoflanguagesascl", + "properties": { + "Name": "ref_aucodesetsaustralianstandardclassificationoflanguagesascl", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsaustralianstandardclassificationoflanguagesascl", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsaustralianstandardclassificationoflanguagesascl", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsaustralianstandardclassificationofreligiousgroupsascrg.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsaustralianstandardclassificationofreligiousgroupsascrg.json new file mode 100644 index 00000000..51f6af79 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsaustralianstandardclassificationofreligiousgroupsascrg.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsaustralianstandardclassificationofreligiousgroupsascrg", + "properties": { + "Name": "ref_aucodesetsaustralianstandardclassificationofreligiousgroupsascrg", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsaustralianstandardclassificationofreligiousgroupsascrg", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsaustralianstandardclassificationofreligiousgroupsascrg", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsaustralianstandardgeographicalclassificationasgc.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsaustralianstandardgeographicalclassificationasgc.json new file mode 100644 index 00000000..28b9f791 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsaustralianstandardgeographicalclassificationasgc.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsaustralianstandardgeographicalclassificationasgc", + "properties": { + "Name": "ref_aucodesetsaustralianstandardgeographicalclassificationasgc", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsaustralianstandardgeographicalclassificationasgc", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsaustralianstandardgeographicalclassificationasgc", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsaustraliantimezone.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsaustraliantimezone.json new file mode 100644 index 00000000..20bd43ae --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsaustraliantimezone.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsaustraliantimezone", + "properties": { + "Name": "ref_aucodesetsaustraliantimezone", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsaustraliantimezone", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsaustraliantimezone", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsbirthdateverification.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsbirthdateverification.json new file mode 100644 index 00000000..b6238dda --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsbirthdateverification.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsbirthdateverification", + "properties": { + "Name": "ref_aucodesetsbirthdateverification", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsbirthdateverification", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsbirthdateverification", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsboarding.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsboarding.json new file mode 100644 index 00000000..6b5bc9b1 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsboarding.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsboarding", + "properties": { + "Name": "ref_aucodesetsboarding", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsboarding", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsboarding", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetscalendarevent.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetscalendarevent.json new file mode 100644 index 00000000..893c52ec --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetscalendarevent.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetscalendarevent", + "properties": { + "Name": "ref_aucodesetscalendarevent", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetscalendarevent", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetscalendarevent", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetscontactmethod.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetscontactmethod.json new file mode 100644 index 00000000..b09ea1bd --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetscontactmethod.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetscontactmethod", + "properties": { + "Name": "ref_aucodesetscontactmethod", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetscontactmethod", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetscontactmethod", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsdayvaluecode.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsdayvaluecode.json new file mode 100644 index 00000000..21be4942 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsdayvaluecode.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsdayvaluecode", + "properties": { + "Name": "ref_aucodesetsdayvaluecode", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsdayvaluecode", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsdayvaluecode", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsdetentioncategory.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsdetentioncategory.json new file mode 100644 index 00000000..08813cc3 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsdetentioncategory.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsdetentioncategory", + "properties": { + "Name": "ref_aucodesetsdetentioncategory", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsdetentioncategory", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsdetentioncategory", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsdwellingarrangement.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsdwellingarrangement.json new file mode 100644 index 00000000..e1773cdf --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsdwellingarrangement.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsdwellingarrangement", + "properties": { + "Name": "ref_aucodesetsdwellingarrangement", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsdwellingarrangement", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsdwellingarrangement", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetseducationagencytype.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetseducationagencytype.json new file mode 100644 index 00000000..2237bcd2 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetseducationagencytype.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetseducationagencytype", + "properties": { + "Name": "ref_aucodesetseducationagencytype", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetseducationagencytype", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetseducationagencytype", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetseducationlevel.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetseducationlevel.json new file mode 100644 index 00000000..078d2dfd --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetseducationlevel.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetseducationlevel", + "properties": { + "Name": "ref_aucodesetseducationlevel", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetseducationlevel", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetseducationlevel", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetselectronicidtype.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetselectronicidtype.json new file mode 100644 index 00000000..730154ec --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetselectronicidtype.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetselectronicidtype", + "properties": { + "Name": "ref_aucodesetselectronicidtype", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetselectronicidtype", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetselectronicidtype", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsemailtype.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsemailtype.json new file mode 100644 index 00000000..0b039b22 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsemailtype.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsemailtype", + "properties": { + "Name": "ref_aucodesetsemailtype", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsemailtype", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsemailtype", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsemploymenttype.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsemploymenttype.json new file mode 100644 index 00000000..9f36fc22 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsemploymenttype.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsemploymenttype", + "properties": { + "Name": "ref_aucodesetsemploymenttype", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsemploymenttype", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsemploymenttype", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsenglishproficiency.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsenglishproficiency.json new file mode 100644 index 00000000..bbc0115e --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsenglishproficiency.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsenglishproficiency", + "properties": { + "Name": "ref_aucodesetsenglishproficiency", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsenglishproficiency", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsenglishproficiency", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsenrollmenttimeframe.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsenrollmenttimeframe.json new file mode 100644 index 00000000..7ffefd98 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsenrollmenttimeframe.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsenrollmenttimeframe", + "properties": { + "Name": "ref_aucodesetsenrollmenttimeframe", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsenrollmenttimeframe", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsenrollmenttimeframe", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsentrytype.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsentrytype.json new file mode 100644 index 00000000..9a860e8c --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsentrytype.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsentrytype", + "properties": { + "Name": "ref_aucodesetsentrytype", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsentrytype", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsentrytype", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsequipmenttype.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsequipmenttype.json new file mode 100644 index 00000000..5c8225fa --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsequipmenttype.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsequipmenttype", + "properties": { + "Name": "ref_aucodesetsequipmenttype", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsequipmenttype", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsequipmenttype", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetseventcategory.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetseventcategory.json new file mode 100644 index 00000000..63fae4a7 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetseventcategory.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetseventcategory", + "properties": { + "Name": "ref_aucodesetseventcategory", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetseventcategory", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetseventcategory", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetseventsubcategory.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetseventsubcategory.json new file mode 100644 index 00000000..bed3c3da --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetseventsubcategory.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetseventsubcategory", + "properties": { + "Name": "ref_aucodesetseventsubcategory", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetseventsubcategory", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetseventsubcategory", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsexitwithdrawalstatus.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsexitwithdrawalstatus.json new file mode 100644 index 00000000..74573b1d --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsexitwithdrawalstatus.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsexitwithdrawalstatus", + "properties": { + "Name": "ref_aucodesetsexitwithdrawalstatus", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsexitwithdrawalstatus", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsexitwithdrawalstatus", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsexitwithdrawaltype.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsexitwithdrawaltype.json new file mode 100644 index 00000000..eb83a64e --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsexitwithdrawaltype.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsexitwithdrawaltype", + "properties": { + "Name": "ref_aucodesetsexitwithdrawaltype", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsexitwithdrawaltype", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsexitwithdrawaltype", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsfederalelectorate.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsfederalelectorate.json new file mode 100644 index 00000000..38f15c11 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsfederalelectorate.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsfederalelectorate", + "properties": { + "Name": "ref_aucodesetsfederalelectorate", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsfederalelectorate", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsfederalelectorate", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsffposstatuscode.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsffposstatuscode.json new file mode 100644 index 00000000..d6f7da24 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsffposstatuscode.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsffposstatuscode", + "properties": { + "Name": "ref_aucodesetsffposstatuscode", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsffposstatuscode", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsffposstatuscode", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsftptstatuscode.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsftptstatuscode.json new file mode 100644 index 00000000..d0a4bdb5 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsftptstatuscode.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsftptstatuscode", + "properties": { + "Name": "ref_aucodesetsftptstatuscode", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsftptstatuscode", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsftptstatuscode", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsgroupcategorycode.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsgroupcategorycode.json new file mode 100644 index 00000000..33998e6b --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsgroupcategorycode.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsgroupcategorycode", + "properties": { + "Name": "ref_aucodesetsgroupcategorycode", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsgroupcategorycode", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsgroupcategorycode", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsimmunisationcertificatestatus.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsimmunisationcertificatestatus.json new file mode 100644 index 00000000..e66811ab --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsimmunisationcertificatestatus.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsimmunisationcertificatestatus", + "properties": { + "Name": "ref_aucodesetsimmunisationcertificatestatus", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsimmunisationcertificatestatus", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsimmunisationcertificatestatus", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsindigenousstatus.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsindigenousstatus.json new file mode 100644 index 00000000..9b1a7af2 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsindigenousstatus.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsindigenousstatus", + "properties": { + "Name": "ref_aucodesetsindigenousstatus", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsindigenousstatus", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsindigenousstatus", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetslanguagetype.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetslanguagetype.json new file mode 100644 index 00000000..beb53360 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetslanguagetype.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetslanguagetype", + "properties": { + "Name": "ref_aucodesetslanguagetype", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetslanguagetype", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetslanguagetype", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetslearningstandarditemrelationshiptypes.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetslearningstandarditemrelationshiptypes.json new file mode 100644 index 00000000..db4e0f21 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetslearningstandarditemrelationshiptypes.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetslearningstandarditemrelationshiptypes", + "properties": { + "Name": "ref_aucodesetslearningstandarditemrelationshiptypes", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetslearningstandarditemrelationshiptypes", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetslearningstandarditemrelationshiptypes", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsmaritalstatusaihw.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsmaritalstatusaihw.json new file mode 100644 index 00000000..853d1305 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsmaritalstatusaihw.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsmaritalstatusaihw", + "properties": { + "Name": "ref_aucodesetsmaritalstatusaihw", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsmaritalstatusaihw", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsmaritalstatusaihw", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsmediumofinstruction.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsmediumofinstruction.json new file mode 100644 index 00000000..faad1e2c --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsmediumofinstruction.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsmediumofinstruction", + "properties": { + "Name": "ref_aucodesetsmediumofinstruction", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsmediumofinstruction", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsmediumofinstruction", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsnameusagetype.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsnameusagetype.json new file mode 100644 index 00000000..4883add1 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsnameusagetype.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsnameusagetype", + "properties": { + "Name": "ref_aucodesetsnameusagetype", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsnameusagetype", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsnameusagetype", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsnapjurisdiction.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsnapjurisdiction.json new file mode 100644 index 00000000..6ec0a032 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsnapjurisdiction.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsnapjurisdiction", + "properties": { + "Name": "ref_aucodesetsnapjurisdiction", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsnapjurisdiction", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsnapjurisdiction", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsnapparticipationcode.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsnapparticipationcode.json new file mode 100644 index 00000000..aa6f2beb --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsnapparticipationcode.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsnapparticipationcode", + "properties": { + "Name": "ref_aucodesetsnapparticipationcode", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsnapparticipationcode", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsnapparticipationcode", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsnapresponsecorrectness.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsnapresponsecorrectness.json new file mode 100644 index 00000000..eefc7abc --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsnapresponsecorrectness.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsnapresponsecorrectness", + "properties": { + "Name": "ref_aucodesetsnapresponsecorrectness", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsnapresponsecorrectness", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsnapresponsecorrectness", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsnaptestdomain.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsnaptestdomain.json new file mode 100644 index 00000000..65124c9f --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsnaptestdomain.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsnaptestdomain", + "properties": { + "Name": "ref_aucodesetsnaptestdomain", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsnaptestdomain", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsnaptestdomain", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsnaptestitemmarkingtype.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsnaptestitemmarkingtype.json new file mode 100644 index 00000000..5ea6e75d --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsnaptestitemmarkingtype.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsnaptestitemmarkingtype", + "properties": { + "Name": "ref_aucodesetsnaptestitemmarkingtype", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsnaptestitemmarkingtype", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsnaptestitemmarkingtype", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsnaptestitemtype.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsnaptestitemtype.json new file mode 100644 index 00000000..c1c2ca17 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsnaptestitemtype.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsnaptestitemtype", + "properties": { + "Name": "ref_aucodesetsnaptestitemtype", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsnaptestitemtype", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsnaptestitemtype", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsnaptesttype.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsnaptesttype.json new file mode 100644 index 00000000..cfdeaf5b --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsnaptesttype.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsnaptesttype", + "properties": { + "Name": "ref_aucodesetsnaptesttype", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsnaptesttype", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsnaptesttype", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsnapwritinggenre.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsnapwritinggenre.json new file mode 100644 index 00000000..bf43cd83 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsnapwritinggenre.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsnapwritinggenre", + "properties": { + "Name": "ref_aucodesetsnapwritinggenre", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsnapwritinggenre", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsnapwritinggenre", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsnonschooleducation.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsnonschooleducation.json new file mode 100644 index 00000000..49fb72cb --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsnonschooleducation.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsnonschooleducation", + "properties": { + "Name": "ref_aucodesetsnonschooleducation", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsnonschooleducation", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsnonschooleducation", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsoperationalstatus.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsoperationalstatus.json new file mode 100644 index 00000000..af59c29c --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsoperationalstatus.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsoperationalstatus", + "properties": { + "Name": "ref_aucodesetsoperationalstatus", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsoperationalstatus", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsoperationalstatus", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetspermanentresidentstatus.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetspermanentresidentstatus.json new file mode 100644 index 00000000..92df7c0c --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetspermanentresidentstatus.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetspermanentresidentstatus", + "properties": { + "Name": "ref_aucodesetspermanentresidentstatus", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetspermanentresidentstatus", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetspermanentresidentstatus", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetspermissioncategorycode.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetspermissioncategorycode.json new file mode 100644 index 00000000..0cd1c1b8 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetspermissioncategorycode.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetspermissioncategorycode", + "properties": { + "Name": "ref_aucodesetspermissioncategorycode", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetspermissioncategorycode", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetspermissioncategorycode", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetspersonalisedplan.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetspersonalisedplan.json new file mode 100644 index 00000000..8f4d6a52 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetspersonalisedplan.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetspersonalisedplan", + "properties": { + "Name": "ref_aucodesetspersonalisedplan", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetspersonalisedplan", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetspersonalisedplan", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetspicturesource.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetspicturesource.json new file mode 100644 index 00000000..a1cf7886 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetspicturesource.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetspicturesource", + "properties": { + "Name": "ref_aucodesetspicturesource", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetspicturesource", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetspicturesource", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetspnpcode.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetspnpcode.json new file mode 100644 index 00000000..b2c43f04 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetspnpcode.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetspnpcode", + "properties": { + "Name": "ref_aucodesetspnpcode", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetspnpcode", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetspnpcode", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetspreprimaryhours.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetspreprimaryhours.json new file mode 100644 index 00000000..fe8fc221 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetspreprimaryhours.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetspreprimaryhours", + "properties": { + "Name": "ref_aucodesetspreprimaryhours", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetspreprimaryhours", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetspreprimaryhours", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsprogramfundingsourcecode.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsprogramfundingsourcecode.json new file mode 100644 index 00000000..9d69d204 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsprogramfundingsourcecode.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsprogramfundingsourcecode", + "properties": { + "Name": "ref_aucodesetsprogramfundingsourcecode", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsprogramfundingsourcecode", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsprogramfundingsourcecode", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsprogresslevel.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsprogresslevel.json new file mode 100644 index 00000000..4f322365 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsprogresslevel.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsprogresslevel", + "properties": { + "Name": "ref_aucodesetsprogresslevel", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsprogresslevel", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsprogresslevel", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetspublicschoolcatchmentstatus.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetspublicschoolcatchmentstatus.json new file mode 100644 index 00000000..16534d1c --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetspublicschoolcatchmentstatus.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetspublicschoolcatchmentstatus", + "properties": { + "Name": "ref_aucodesetspublicschoolcatchmentstatus", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetspublicschoolcatchmentstatus", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetspublicschoolcatchmentstatus", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsreceivinglocationofinstruction.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsreceivinglocationofinstruction.json new file mode 100644 index 00000000..d1a8d008 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsreceivinglocationofinstruction.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsreceivinglocationofinstruction", + "properties": { + "Name": "ref_aucodesetsreceivinglocationofinstruction", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsreceivinglocationofinstruction", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsreceivinglocationofinstruction", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsrelationshiptostudent.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsrelationshiptostudent.json new file mode 100644 index 00000000..d563e3f7 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsrelationshiptostudent.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsrelationshiptostudent", + "properties": { + "Name": "ref_aucodesetsrelationshiptostudent", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsrelationshiptostudent", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsrelationshiptostudent", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsresourceusagecontenttype.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsresourceusagecontenttype.json new file mode 100644 index 00000000..cdfb1d0a --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsresourceusagecontenttype.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsresourceusagecontenttype", + "properties": { + "Name": "ref_aucodesetsresourceusagecontenttype", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsresourceusagecontenttype", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsresourceusagecontenttype", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsscheduledactivitytype.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsscheduledactivitytype.json new file mode 100644 index 00000000..4d1f8015 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsscheduledactivitytype.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsscheduledactivitytype", + "properties": { + "Name": "ref_aucodesetsscheduledactivitytype", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsscheduledactivitytype", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsscheduledactivitytype", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsschoolcoedstatus.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsschoolcoedstatus.json new file mode 100644 index 00000000..b734eb53 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsschoolcoedstatus.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsschoolcoedstatus", + "properties": { + "Name": "ref_aucodesetsschoolcoedstatus", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsschoolcoedstatus", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsschoolcoedstatus", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsschooleducationleveltype.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsschooleducationleveltype.json new file mode 100644 index 00000000..0714243e --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsschooleducationleveltype.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsschooleducationleveltype", + "properties": { + "Name": "ref_aucodesetsschooleducationleveltype", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsschooleducationleveltype", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsschooleducationleveltype", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsschoolenrollmenttype.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsschoolenrollmenttype.json new file mode 100644 index 00000000..29983a7d --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsschoolenrollmenttype.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsschoolenrollmenttype", + "properties": { + "Name": "ref_aucodesetsschoolenrollmenttype", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsschoolenrollmenttype", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsschoolenrollmenttype", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsschoolfocuscode.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsschoolfocuscode.json new file mode 100644 index 00000000..602edc2f --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsschoolfocuscode.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsschoolfocuscode", + "properties": { + "Name": "ref_aucodesetsschoolfocuscode", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsschoolfocuscode", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsschoolfocuscode", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsschoollevel.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsschoollevel.json new file mode 100644 index 00000000..b66b2ca8 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsschoollevel.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsschoollevel", + "properties": { + "Name": "ref_aucodesetsschoollevel", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsschoollevel", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsschoollevel", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsschoollocation.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsschoollocation.json new file mode 100644 index 00000000..4caf21cf --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsschoollocation.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsschoollocation", + "properties": { + "Name": "ref_aucodesetsschoollocation", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsschoollocation", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsschoollocation", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsschoolsectorcode.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsschoolsectorcode.json new file mode 100644 index 00000000..9ca3901a --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsschoolsectorcode.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsschoolsectorcode", + "properties": { + "Name": "ref_aucodesetsschoolsectorcode", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsschoolsectorcode", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsschoolsectorcode", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsschoolsystem.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsschoolsystem.json new file mode 100644 index 00000000..5bce8e70 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsschoolsystem.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsschoolsystem", + "properties": { + "Name": "ref_aucodesetsschoolsystem", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsschoolsystem", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsschoolsystem", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetssessiontype.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetssessiontype.json new file mode 100644 index 00000000..08af1be7 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetssessiontype.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetssessiontype", + "properties": { + "Name": "ref_aucodesetssessiontype", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetssessiontype", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetssessiontype", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetssexcode.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetssexcode.json new file mode 100644 index 00000000..97dfc6b6 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetssexcode.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetssexcode", + "properties": { + "Name": "ref_aucodesetssexcode", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetssexcode", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetssexcode", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetssourcecodetype.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetssourcecodetype.json new file mode 100644 index 00000000..d7855eac --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetssourcecodetype.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetssourcecodetype", + "properties": { + "Name": "ref_aucodesetssourcecodetype", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetssourcecodetype", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetssourcecodetype", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsstaffactivity.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsstaffactivity.json new file mode 100644 index 00000000..5b834041 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsstaffactivity.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsstaffactivity", + "properties": { + "Name": "ref_aucodesetsstaffactivity", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsstaffactivity", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsstaffactivity", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsstaffstatus.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsstaffstatus.json new file mode 100644 index 00000000..fcf29154 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsstaffstatus.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsstaffstatus", + "properties": { + "Name": "ref_aucodesetsstaffstatus", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsstaffstatus", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsstaffstatus", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsstandardaustralianclassificationofcountriessacc.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsstandardaustralianclassificationofcountriessacc.json new file mode 100644 index 00000000..04f99e4f --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsstandardaustralianclassificationofcountriessacc.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsstandardaustralianclassificationofcountriessacc", + "properties": { + "Name": "ref_aucodesetsstandardaustralianclassificationofcountriessacc", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsstandardaustralianclassificationofcountriessacc", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsstandardaustralianclassificationofcountriessacc", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsstateterritorycode.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsstateterritorycode.json new file mode 100644 index 00000000..6924aa28 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsstateterritorycode.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsstateterritorycode", + "properties": { + "Name": "ref_aucodesetsstateterritorycode", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsstateterritorycode", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsstateterritorycode", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsstudentfamilyprogramtype.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsstudentfamilyprogramtype.json new file mode 100644 index 00000000..530b2972 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsstudentfamilyprogramtype.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsstudentfamilyprogramtype", + "properties": { + "Name": "ref_aucodesetsstudentfamilyprogramtype", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsstudentfamilyprogramtype", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsstudentfamilyprogramtype", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetssuspensioncategory.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetssuspensioncategory.json new file mode 100644 index 00000000..9ad61175 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetssuspensioncategory.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetssuspensioncategory", + "properties": { + "Name": "ref_aucodesetssuspensioncategory", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetssuspensioncategory", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetssuspensioncategory", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetssystemicstatus.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetssystemicstatus.json new file mode 100644 index 00000000..232ee89b --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetssystemicstatus.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetssystemicstatus", + "properties": { + "Name": "ref_aucodesetssystemicstatus", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetssystemicstatus", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetssystemicstatus", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsteachercovercredit.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsteachercovercredit.json new file mode 100644 index 00000000..1d09196e --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsteachercovercredit.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsteachercovercredit", + "properties": { + "Name": "ref_aucodesetsteachercovercredit", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsteachercovercredit", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsteachercovercredit", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsteachercoversupervision.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsteachercoversupervision.json new file mode 100644 index 00000000..5991b1ce --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsteachercoversupervision.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsteachercoversupervision", + "properties": { + "Name": "ref_aucodesetsteachercoversupervision", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsteachercoversupervision", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsteachercoversupervision", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetstelephonenumbertype.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetstelephonenumbertype.json new file mode 100644 index 00000000..35fab5e0 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetstelephonenumbertype.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetstelephonenumbertype", + "properties": { + "Name": "ref_aucodesetstelephonenumbertype", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetstelephonenumbertype", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetstelephonenumbertype", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetstravelmode.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetstravelmode.json new file mode 100644 index 00000000..0790f8bf --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetstravelmode.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetstravelmode", + "properties": { + "Name": "ref_aucodesetstravelmode", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetstravelmode", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetstravelmode", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsvisastudyentitlement.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsvisastudyentitlement.json new file mode 100644 index 00000000..4e37b1b6 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsvisastudyentitlement.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsvisastudyentitlement", + "properties": { + "Name": "ref_aucodesetsvisastudyentitlement", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsvisastudyentitlement", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsvisastudyentitlement", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsvisasubclass.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsvisasubclass.json new file mode 100644 index 00000000..17fdf285 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsvisasubclass.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsvisasubclass", + "properties": { + "Name": "ref_aucodesetsvisasubclass", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsvisasubclass", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsvisasubclass", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetswellbeingalertcategory.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetswellbeingalertcategory.json new file mode 100644 index 00000000..e6f4efb3 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetswellbeingalertcategory.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetswellbeingalertcategory", + "properties": { + "Name": "ref_aucodesetswellbeingalertcategory", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetswellbeingalertcategory", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetswellbeingalertcategory", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetswellbeingappealstatus.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetswellbeingappealstatus.json new file mode 100644 index 00000000..6f53774c --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetswellbeingappealstatus.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetswellbeingappealstatus", + "properties": { + "Name": "ref_aucodesetswellbeingappealstatus", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetswellbeingappealstatus", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetswellbeingappealstatus", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetswellbeingcharacteristiccategory.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetswellbeingcharacteristiccategory.json new file mode 100644 index 00000000..e3f8b180 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetswellbeingcharacteristiccategory.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetswellbeingcharacteristiccategory", + "properties": { + "Name": "ref_aucodesetswellbeingcharacteristiccategory", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetswellbeingcharacteristiccategory", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetswellbeingcharacteristiccategory", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetswellbeingcharacteristicclassification.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetswellbeingcharacteristicclassification.json new file mode 100644 index 00000000..8051cdbc --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetswellbeingcharacteristicclassification.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetswellbeingcharacteristicclassification", + "properties": { + "Name": "ref_aucodesetswellbeingcharacteristicclassification", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetswellbeingcharacteristicclassification", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetswellbeingcharacteristicclassification", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetswellbeingcharacteristicsubcategory.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetswellbeingcharacteristicsubcategory.json new file mode 100644 index 00000000..eda6b279 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetswellbeingcharacteristicsubcategory.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetswellbeingcharacteristicsubcategory", + "properties": { + "Name": "ref_aucodesetswellbeingcharacteristicsubcategory", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetswellbeingcharacteristicsubcategory", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetswellbeingcharacteristicsubcategory", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetswellbeingeventcategoryclass.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetswellbeingeventcategoryclass.json new file mode 100644 index 00000000..6295d8db --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetswellbeingeventcategoryclass.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetswellbeingeventcategoryclass", + "properties": { + "Name": "ref_aucodesetswellbeingeventcategoryclass", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetswellbeingeventcategoryclass", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetswellbeingeventcategoryclass", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetswellbeingeventlocation.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetswellbeingeventlocation.json new file mode 100644 index 00000000..a700df54 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetswellbeingeventlocation.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetswellbeingeventlocation", + "properties": { + "Name": "ref_aucodesetswellbeingeventlocation", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetswellbeingeventlocation", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetswellbeingeventlocation", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetswellbeingeventtimeperiod.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetswellbeingeventtimeperiod.json new file mode 100644 index 00000000..eb40a400 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetswellbeingeventtimeperiod.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetswellbeingeventtimeperiod", + "properties": { + "Name": "ref_aucodesetswellbeingeventtimeperiod", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetswellbeingeventtimeperiod", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetswellbeingeventtimeperiod", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetswellbeingresponsecategory.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetswellbeingresponsecategory.json new file mode 100644 index 00000000..4b2562e8 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetswellbeingresponsecategory.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetswellbeingresponsecategory", + "properties": { + "Name": "ref_aucodesetswellbeingresponsecategory", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetswellbeingresponsecategory", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetswellbeingresponsecategory", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetswellbeingstatus.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetswellbeingstatus.json new file mode 100644 index 00000000..b8c72e29 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetswellbeingstatus.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetswellbeingstatus", + "properties": { + "Name": "ref_aucodesetswellbeingstatus", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetswellbeingstatus", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetswellbeingstatus", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsyearlevelcode.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsyearlevelcode.json new file mode 100644 index 00000000..bafb3e1b --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsyearlevelcode.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsyearlevelcode", + "properties": { + "Name": "ref_aucodesetsyearlevelcode", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsyearlevelcode", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsyearlevelcode", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsyesornocategory.json b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsyesornocategory.json new file mode 100644 index 00000000..69dfe19f --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_aucodesetsyesornocategory.json @@ -0,0 +1,78 @@ +{ + "name": "ref_aucodesetsyesornocategory", + "properties": { + "Name": "ref_aucodesetsyesornocategory", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsyesornocategory", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_aucodesetsyesornocategory", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_definedprotocols.json b/solution/Synapse/Patterns/database/siflake/table/ref_definedprotocols.json new file mode 100644 index 00000000..646ad8aa --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_definedprotocols.json @@ -0,0 +1,78 @@ +{ + "name": "ref_definedprotocols", + "properties": { + "Name": "ref_definedprotocols", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_definedprotocols", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_definedprotocols", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_genericyesno.json b/solution/Synapse/Patterns/database/siflake/table/ref_genericyesno.json new file mode 100644 index 00000000..391144d3 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_genericyesno.json @@ -0,0 +1,78 @@ +{ + "name": "ref_genericyesno", + "properties": { + "Name": "ref_genericyesno", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_genericyesno", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_genericyesno", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/database/siflake/table/ref_iso4217currencynamesandcodeelements.json b/solution/Synapse/Patterns/database/siflake/table/ref_iso4217currencynamesandcodeelements.json new file mode 100644 index 00000000..f161b310 --- /dev/null +++ b/solution/Synapse/Patterns/database/siflake/table/ref_iso4217currencynamesandcodeelements.json @@ -0,0 +1,78 @@ +{ + "name": "ref_iso4217currencynamesandcodeelements", + "properties": { + "Name": "ref_iso4217currencynamesandcodeelements", + "EntityType": "TABLE", + "Namespace": { + "DatabaseName": "siflake" + }, + "Description": "", + "TableType": "EXTERNAL", + "Origin": { + "Type": "SPARK" + }, + "StorageDescriptor": { + "Columns": [ + { + "Name": "const", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + }, + { + "Name": "title", + "OriginDataTypeName": { + "TypeName": "string", + "IsComplexType": false, + "IsNullable": true, + "Length": 8000, + "Properties": { + "HIVE_TYPE_STRING": "string" + } + } + } + ], + "Format": { + "InputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + "OutputFormat": "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + "FormatType": "parquet", + "SerializeLib": "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + "Properties": { + "path": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_iso4217currencynamesandcodeelements", + "FormatTypeSetToDatabaseDefault": false + } + }, + "Source": { + "Provider": "ADLS", + "Location": "abfss://adsdevdlsadsorgp@adsdevdlsadsorgpadsl.dfs.core.windows.net/synapse/workspaces/adsdevsynwadsorgp/warehouse/sif.db/ref_iso4217currencynamesandcodeelements", + "Properties": { + "LinkedServiceName": "adsdevsynwadsorgp-WorkspaceDefaultStorage", + "LocationSetToDatabaseDefault": false + } + }, + "Properties": { + "textinputformat.record.delimiter": ",", + "compression": "", + "derivedModelAttributeInfo": "{\"attributeReferences\":{}}" + }, + "Compressed": false, + "IsStoredAsSubdirectories": false + }, + "Properties": { + "Description": "", + "DisplayFolderInfo": "{\"name\":\"Others\",\"colorCode\":\"\"}", + "PrimaryKeys": "", + "spark.sql.sources.provider": "parquet" + }, + "Retention": 0, + "Temporary": false, + "IsRewriteEnabled": false + }, + "type": "TABLE" +} diff --git a/solution/Synapse/Patterns/notebook/sif/SIFDimCalendarDate.ipynb b/solution/Synapse/Patterns/notebook/sif/SIFDimCalendarDate.ipynb new file mode 100644 index 00000000..f1c64367 --- /dev/null +++ b/solution/Synapse/Patterns/notebook/sif/SIFDimCalendarDate.ipynb @@ -0,0 +1,213 @@ +{ + "nbformat": 4, + "nbformat_minor": 2, + "cells": [ + { + "cell_type": "code", + "execution_count": 10, + "outputs": [], + "metadata": {}, + "source": [ + "TaskObject = \" \\\r\n", + "{ \\\"TaskInstanceId\\\":53, \\\r\n", + " \\\"TaskMasterId\\\":1, \\\r\n", + " \\\"TaskStatus\\\":\\\"InProgress\\\",\\\r\n", + " \\\"TaskType\\\":\\\"Execute Synapse Notebook\\\",\\\r\n", + " \\\"Enabled\\\":1, \\\r\n", + " \\\"ExecutionUid\\\": \\\"dc872650-b992-4cae-9ae2-c714c95563ee\\\", \\\r\n", + " \\\"NumberOfRetries\\\":2, \\\r\n", + " \\\"DegreeOfCopyParallelism\\\":1, \\\r\n", + " \\\"KeyVaultBaseUrl\\\":\\\"https://ads-dev-kv-ads-orgp.vault.azure.net\\\", \\\r\n", + " \\\"ScheduleMasterId\\\":\\\"-4\\\", \\\r\n", + " \\\"TaskGroupConcurrency\\\":\\\"10\\\", \\\r\n", + " \\\"TaskGroupPriority\\\":0, \\\r\n", + " \\\"TaskExecutionType\\\":\\\"ADF\\\", \\\r\n", + " \\\"Source\\\": {\\\r\n", + " \\\"System\\\": {\\\r\n", + " \\\"SystemId\\\": -10,\\\r\n", + " \\\"SystemServer\\\": \\\"adsdevsynwadsorgp.sql.azuresynapse.net\\\",\\\r\n", + " \\\"AuthenticationType\\\": \\\"MSI\\\",\\\r\n", + " \\\"Type\\\": \\\"Azure Synapse\\\",\\\r\n", + " \\\"Username\\\": null,\\\r\n", + " \\\"Workspace\\\": \\\"adsdevsynwadsorgp\\\",\\\r\n", + " \\\"Database\\\": \\\"adsdevsyndpads\\\"\\\r\n", + " },\\\r\n", + " \\\"Instance\\\":{ \\\r\n", + " \\\"SourceRelativePath\\\":\\\"\\\", \\\r\n", + " \\\"TargetRelativePath\\\":\\\"\\\" \\\r\n", + " }, \\\r\n", + " \\\"DataFileName\\\":\\\"\\\", \\\r\n", + " \\\"RelativePath\\\":\\\"\\\", \\\r\n", + " \\\"SchemaFileName\\\":\\\"\\\", \\\r\n", + " \\\"Type\\\":\\\"Notebook-Optional\\\", \\\r\n", + " \\\"WriteSchemaToPurview\\\":\\\"Disabled\\\" \\\r\n", + " }, \\\r\n", + " \\\"Target\\\":{ \\\r\n", + " \\\"System\\\": {\\\r\n", + " \\\"SystemId\\\": -10,\\\r\n", + " \\\"SystemServer\\\": \\\"adsdevsynwadsorgp.sql.azuresynapse.net\\\",\\\r\n", + " \\\"AuthenticationType\\\": \\\"MSI\\\",\\\r\n", + " \\\"Type\\\": \\\"Azure Synapse\\\",\\\r\n", + " \\\"Username\\\": null,\\\r\n", + " \\\"Workspace\\\": \\\"adsdevsynwadsorgp\\\",\\\r\n", + " \\\"Database\\\": \\\"adsdevsyndpads\\\"\\\r\n", + " },\\\r\n", + " \\\"Instance\\\":{ \\\r\n", + " \\\"SourceRelativePath\\\":\\\"\\\", \\\r\n", + " \\\"TargetRelativePath\\\":\\\"\\\" \\\r\n", + " }, \\\r\n", + " \\\"DataFileName\\\":\\\"\\\", \\\r\n", + " \\\"RelativePath\\\":\\\"\\\", \\\r\n", + " \\\"SchemaFileName\\\":\\\"\\\", \\\r\n", + " \\\"Type\\\":\\\"Notebook-Optional\\\", \\\r\n", + " \\\"WriteSchemaToPurview\\\":\\\"Disabled\\\" \\\r\n", + " }, \\\r\n", + " \\\"TMOptionals\\\":{ \\\r\n", + " \\\"CustomDefinitions\\\": \\\"SparkDatabaseName=sif\\\",\\\r\n", + " \\\"ExecuteNotebook\\\":\\\"SIFLoadDimStaffPersonal\\\", \\\r\n", + " \\\"Purview\\\":\\\"Disabled\\\", \\\r\n", + " \\\"QualifiedIDAssociation\\\":\\\"TaskMasterId\\\", \\\r\n", + " \\\"UseNotebookActivity\\\":\\\"Enabled\\\" \\\r\n", + " } \\\r\n", + " }\" " + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "outputs": [], + "metadata": { + "jupyter": { + "source_hidden": false, + "outputs_hidden": false + }, + "nteract": { + "transient": { + "deleting": false + } + } + }, + "source": [ + "import random\r\n", + "import json\r\n", + "from pyspark.sql import Row\r\n", + "from pyspark.sql.types import *\r\n", + "from pyspark.sql.functions import *\r\n", + "\r\n", + "session_id = random.randint(0,1000000)\r\n", + "#invalid source\r\n", + "##TaskObject = \"{\\\"TaskInstanceId\\\":1,\\\"TaskMasterId\\\":2,\\\"TaskStatus\\\":\\\"InProgress\\\",\\\"TaskType\\\":\\\"TestTask Type Name\\\",\\\"Enabled\\\":1,\\\"ExecutionUid\\\":\\\"8448eabb-9ba4-4779-865b-29e973431273\\\",\\\"NumberOfRetries\\\":0,\\\"DegreeOfCopyParallelism\\\":1,\\\"KeyVaultBaseUrl\\\":\\\"https://ark-stg-kv-ads-irud.vault.azure.net/\\\",\\\"ScheduleMasterId\\\":\\\"-4\\\",\\\"TaskGroupConcurrency\\\":\\\"10\\\",\\\"TaskGroupPriority\\\":0,\\\"TaskExecutionType\\\":\\\"ADF\\\",\\\"ExecutionEngine\\\":{\\\"EngineId\\\":-1,\\\"EngineName\\\":\\\"ark-stg-adf-ads-irud\\\",\\\"SystemType\\\":\\\"Datafactory\\\",\\\"ResourceGroup\\\":\\\"dlzdev04\\\",\\\"SubscriptionId\\\":\\\"ed1206e0-17c7-4bc2-ad4b-f8d4dab9284f\\\",\\\"ADFPipeline\\\":\\\"GPL_AzureSqlTable_NA_AzureBlobFS_Parquet_Azure\\\",\\\"EngineJson\\\":\\\"{}\\\",\\\"TaskDatafactoryIR\\\":\\\"Azure\\\",\\\"JsonProperties\\\":{}},\\\"Source\\\":{\\\"System\\\":{\\\"SystemId\\\":-8,\\\"SystemServer\\\":\\\"https://arkstgdlsadsirudadsl.dfs.core.windows.net\\\",\\\"AuthenticationType\\\":\\\"MSI\\\",\\\"Type\\\":\\\"ADLS\\\",\\\"Username\\\":null,\\\"Container\\\":\\\"datalakelanding\\\"},\\\"Instance\\\":{\\\"TargetRelativePath\\\":\\\"\\\"},\\\"DataFileName\\\":\\\"TestFile.parquet\\\",\\\"RelativePath\\\":\\\"\\\",\\\"SchemaFileName\\\":\\\"TestFile.json\\\"},\\\"Target\\\":{\\\"System\\\":{\\\"SystemId\\\":-8,\\\"SystemServer\\\":\\\"https://arkstgdlsadsirudadsl.dfs.core.windows.net\\\",\\\"AuthenticationType\\\":\\\"MSI\\\",\\\"Type\\\":\\\"ADLS\\\",\\\"Username\\\":null,\\\"Container\\\":\\\"datalakelanding\\\"},\\\"Instance\\\":{\\\"TargetRelativePath\\\":\\\"\\\"},\\\"DataFileName\\\":\\\"TestFile.parquet\\\",\\\"RelativePath\\\":\\\"\\\",\\\"SchemaFileName\\\":\\\"TestFile.json\\\",\\\"Type\\\":\\\"Parquet\\\"}}\"\r\n", + "#valid source\r\n", + "#TaskObject = \"{\\\"TaskInstanceId\\\":1,\\\"TaskMasterId\\\":2,\\\"TaskStatus\\\":\\\"InProgress\\\",\\\"TaskType\\\":\\\"TestTask Type Name\\\", \\\"Enabled\\\":1,\\\"ExecutionUid\\\":\\\"8448eabb-9ba4-4779-865b-29e973431273\\\",\\\"NumberOfRetries\\\":0,\\\"DegreeOfCopyParallelism\\\":1, \\\"KeyVaultBaseUrl\\\":\\\"https://ads-dev-kv-ads-ic038069.vault.azure.net/\\\",\\\"ScheduleMasterId\\\":\\\"-4\\\",\\\"TaskGroupConcurrency\\\":\\\"10\\\", \\\"TaskGroupPriority\\\":0,\\\"TaskExecutionType\\\":\\\"ADF\\\",\\\"ExecutionEngine\\\":{\\\"EngineId\\\":-1,\\\"EngineName\\\":\\\"ads-dev-kv-ads-ic038069\\\", \\\"SystemType\\\":\\\"Microsoft.Synapse/workspaces\\\",\\\"ResourceGroup\\\":\\\"sifgofast\\\",\\\"SubscriptionId\\\":\\\"cd486ba9-eef3-466d-b16c-7f1b2941ae9d\\\", \\\"ADFPipeline\\\":\\\"GPL_AzureSqlTable_NA_AzureBlobFS_Parquet_Azure\\\",\\\"EngineJson\\\":\\\"{}\\\",\\\"TaskDatafactoryIR\\\":\\\"Azure\\\", \\\"JsonProperties\\\":{}},\\\"Source\\\":{\\\"System\\\":{\\\"SystemId\\\":-8,\\\"SystemServer\\\":\\\"https://adsdevdlsadsic03adsl.blob.core.windows.net\\\", \\\"AuthenticationType\\\":\\\"MSI\\\",\\\"Type\\\":\\\"ADLS\\\",\\\"Username\\\":null,\\\"Container\\\":\\\"adsdevdlsadsic03\\\"},\\\"Instance\\\":\\\"\\\",{\\\"TargetRelativePath\\\":\\\"synapse/sif\\\"}, \\\"DataFileName\\\":\\\"StudentPersonal.parquet\\\",\\\"SourceRelativePath\\\":\\\"synapse/sif\\\",\\\"SchemaFileName\\\":\\\"StudentPersonal.json\\\",\\\"Type\\\":\\\"Parquet\\\"}, \\\"Target\\\":{\\\"System\\\":{\\\"SystemId\\\":-8,\\\"SystemServer\\\":\\\"https://adsdevdlsadsic03adsl.blob.core.windows.net\\\", \\\"AuthenticationType\\\":\\\"MSI\\\",\\\"Type\\\":\\\"ABS\\\",\\\"Username\\\":null,\\\"Container\\\":\\\"adsdevdlsadsic03\\\"}, \\\"Instance\\\":{\\\"TargetRelativePath\\\":\\\"\\\"},\\\"DataFileName\\\":\\\"StudentPersonal.parquet\\\",\\\"SourceRelativePath\\\":\\\"synapse\\/sif\\\", \\\"SchemaFileName\\\":\\\"StudentPersonal.json\\\",\\\"Type\\\":\\\"Parquet\\\"}}\"\r\n", + "TaskDict = {}\r\n", + "OutputDict = {}\r\n", + "TaskObjectJson = json.loads(TaskObject)\r\n", + "\r\n", + "##we want to delete EngineJson as it causes issues when converting back to a json and it is not needed as its properties are within JsonProperties as children\r\n", + "try:\r\n", + " del TaskObjectJson['ExecutionEngine']['EngineJson']\r\n", + "except:\r\n", + " print(\"No EngineJson Found\")\r\n", + "\r\n", + "\r\n", + "SifDbName = \"sif\"\r\n", + "tmopts = TaskObjectJson['TMOptionals']['CustomDefinitions'].split(\",\")\r\n", + "for o in tmopts:\r\n", + " opt = o.split(\"=\")\r\n", + " if (opt[0] == \"SparkDatabaseName\"):\r\n", + " SifDbName = opt[1].lower()\r\n", + " break\r\n", + "\r\n", + "print(SifDbName)" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "outputs": [], + "metadata": { + "jupyter": { + "source_hidden": false, + "outputs_hidden": false + }, + "nteract": { + "transient": { + "deleting": false + } + } + }, + "source": [ + "spark.catalog.setCurrentDatabase(SifDbName)" + ] + }, + { + "cell_type": "code", + "execution_count": 67, + "outputs": [], + "metadata": { + "jupyter": { + "source_hidden": false, + "outputs_hidden": false + }, + "nteract": { + "transient": { + "deleting": false + } + } + }, + "source": [ + "from pyspark.sql.functions import *\r\n", + "df_Raw = spark.sql(\"select * from raw_calendardate\")\r\n", + "df_RawWJ = df_Raw.withColumn('json',to_json (struct (col ('*')))).select(\"RefId\", \"json\")\r\n", + "\r\n", + "df_Out = df_RawWJ \\\r\n", + ".withColumn('Date',get_json_object(col('json'), '$.Date')) \\\r\n", + ".withColumn('CalendarSummaryRefId',get_json_object(col('json'), '$.CalendarSummaryRefId')) \\\r\n", + ".withColumn('SchoolInfoRefId',get_json_object(col('json'), '$.SchoolInfoRefId')) \\\r\n", + ".withColumn('SchoolYear',get_json_object(col('json'), '$.SchoolYear')) \\\r\n", + ".withColumn('CalendarDateType',get_json_object(col('json'), '$.CalendarDateType.Code')) \\\r\n", + ".withColumn('CalendarDateNumber',get_json_object(col('json'), '$.CalendarDateNumber')) \\\r\n", + ".withColumn('StudentAttendanceCountsTowardAttendance',get_json_object(col('json'), '$.StudentAttendance.CountsTowardAttendance')) \\\r\n", + ".withColumn('StudentAttendanceValue',get_json_object(col('json'), '$.StudentAttendance.AttendanceValue')) \\\r\n", + ".withColumn('TeacherAttendanceCountsTowardAttendance',get_json_object(col('json'), '$.TeacherAttendance.CountsTowardAttendance')) \\\r\n", + ".withColumn('TeacherAttendanceValue',get_json_object(col('json'), '$.TeacherAttendance.AttendanceValue')) \\\r\n", + ".withColumn('AdministratorAttendanceCountsTowardAttendance',get_json_object(col('json'), '$.AdministratorAttendance.CountsTowardAttendance')) \\\r\n", + ".withColumn('AdministratorAttendanceValue',get_json_object(col('json'), '$.AdministratorAttendance.AttendanceValue')) \\\r\n", + ".select('CalendarDateRefId',\t'Date',\t'CalendarSummaryRefId',\t'SchoolInfoRefId',\t'SchoolYear',\t'CalendarDateType',\t'CalendarDateNumber',\t'StudentAttendanceCountsTowardAttendance',\t'StudentAttendanceValue',\t'TeacherAttendanceCountsTowardAttendance',\t'TeacherAttendanceValue',\t'AdministratorAttendanceCountsTowardAttendance',\t'AdministratorAttendanceValue') \r\n", + "\r\n", + "\r\n", + "" + ] + }, + { + "cell_type": "code", + "execution_count": 68, + "outputs": [], + "metadata": { + "jupyter": { + "source_hidden": false, + "outputs_hidden": false + }, + "nteract": { + "transient": { + "deleting": false + } + } + }, + "source": [ + "df_Out.write.mode(\"overwrite\").saveAsTable(\"dm_calendardate\")\r\n", + "" + ] + } + ], + "metadata": { + "save_output": true, + "kernelspec": { + "name": "synapse_pyspark", + "display_name": "Synapse PySpark" + }, + "language_info": { + "name": "python" + } + } +} \ No newline at end of file diff --git a/solution/Synapse/Patterns/notebook/sif/SIFDimLoadTeachingGroup.ipynb b/solution/Synapse/Patterns/notebook/sif/SIFDimLoadTeachingGroup.ipynb new file mode 100644 index 00000000..c61aa217 --- /dev/null +++ b/solution/Synapse/Patterns/notebook/sif/SIFDimLoadTeachingGroup.ipynb @@ -0,0 +1,518 @@ +{ + "cells": [ + { + "cell_type": "code", + "source": [ + "TaskObject = \" \\\r\n", + "{ \\\"TaskInstanceId\\\":53, \\\r\n", + " \\\"TaskMasterId\\\":1, \\\r\n", + " \\\"TaskStatus\\\":\\\"InProgress\\\",\\\r\n", + " \\\"TaskType\\\":\\\"Execute Synapse Notebook\\\",\\\r\n", + " \\\"Enabled\\\":1, \\\r\n", + " \\\"ExecutionUid\\\": \\\"dc872650-b992-4cae-9ae2-c714c95563ee\\\", \\\r\n", + " \\\"NumberOfRetries\\\":2, \\\r\n", + " \\\"DegreeOfCopyParallelism\\\":1, \\\r\n", + " \\\"KeyVaultBaseUrl\\\":\\\"https://ads-dev-kv-ads-orgp.vault.azure.net\\\", \\\r\n", + " \\\"ScheduleMasterId\\\":\\\"-4\\\", \\\r\n", + " \\\"TaskGroupConcurrency\\\":\\\"10\\\", \\\r\n", + " \\\"TaskGroupPriority\\\":0, \\\r\n", + " \\\"TaskExecutionType\\\":\\\"ADF\\\", \\\r\n", + " \\\"Source\\\": {\\\r\n", + " \\\"System\\\": {\\\r\n", + " \\\"SystemId\\\": -10,\\\r\n", + " \\\"SystemServer\\\": \\\"adsdevsynwadsorgp.sql.azuresynapse.net\\\",\\\r\n", + " \\\"AuthenticationType\\\": \\\"MSI\\\",\\\r\n", + " \\\"Type\\\": \\\"Azure Synapse\\\",\\\r\n", + " \\\"Username\\\": null,\\\r\n", + " \\\"Workspace\\\": \\\"adsdevsynwadsorgp\\\",\\\r\n", + " \\\"Database\\\": \\\"adsdevsyndpads\\\"\\\r\n", + " },\\\r\n", + " \\\"Instance\\\":{ \\\r\n", + " \\\"SourceRelativePath\\\":\\\"\\\", \\\r\n", + " \\\"TargetRelativePath\\\":\\\"\\\" \\\r\n", + " }, \\\r\n", + " \\\"DataFileName\\\":\\\"\\\", \\\r\n", + " \\\"RelativePath\\\":\\\"\\\", \\\r\n", + " \\\"SchemaFileName\\\":\\\"\\\", \\\r\n", + " \\\"Type\\\":\\\"Notebook-Optional\\\", \\\r\n", + " \\\"WriteSchemaToPurview\\\":\\\"Disabled\\\" \\\r\n", + " }, \\\r\n", + " \\\"Target\\\":{ \\\r\n", + " \\\"System\\\": {\\\r\n", + " \\\"SystemId\\\": -10,\\\r\n", + " \\\"SystemServer\\\": \\\"adsdevsynwadsorgp.sql.azuresynapse.net\\\",\\\r\n", + " \\\"AuthenticationType\\\": \\\"MSI\\\",\\\r\n", + " \\\"Type\\\": \\\"Azure Synapse\\\",\\\r\n", + " \\\"Username\\\": null,\\\r\n", + " \\\"Workspace\\\": \\\"adsdevsynwadsorgp\\\",\\\r\n", + " \\\"Database\\\": \\\"adsdevsyndpads\\\"\\\r\n", + " },\\\r\n", + " \\\"Instance\\\":{ \\\r\n", + " \\\"SourceRelativePath\\\":\\\"\\\", \\\r\n", + " \\\"TargetRelativePath\\\":\\\"\\\" \\\r\n", + " }, \\\r\n", + " \\\"DataFileName\\\":\\\"\\\", \\\r\n", + " \\\"RelativePath\\\":\\\"\\\", \\\r\n", + " \\\"SchemaFileName\\\":\\\"\\\", \\\r\n", + " \\\"Type\\\":\\\"Notebook-Optional\\\", \\\r\n", + " \\\"WriteSchemaToPurview\\\":\\\"Disabled\\\" \\\r\n", + " }, \\\r\n", + " \\\"TMOptionals\\\":{ \\\r\n", + " \\\"CustomDefinitions\\\": \\\"SparkDatabaseName=sif\\\",\\\r\n", + " \\\"ExecuteNotebook\\\":\\\"SIFLoadDimStaffPersonal\\\", \\\r\n", + " \\\"Purview\\\":\\\"Disabled\\\", \\\r\n", + " \\\"QualifiedIDAssociation\\\":\\\"TaskMasterId\\\", \\\r\n", + " \\\"UseNotebookActivity\\\":\\\"Enabled\\\" \\\r\n", + " } \\\r\n", + " }\" " + ], + "outputs": [ + { + "output_type": "display_data", + "data": { + "application/vnd.livy.statement-meta+json": { + "spark_pool": "adsdevsynspads", + "session_id": "9", + "statement_id": 2, + "state": "finished", + "livy_statement_state": "available", + "queued_time": "2022-07-19T10:35:51.3362799Z", + "session_start_time": "2022-07-19T10:35:51.3710722Z", + "execution_start_time": "2022-07-19T10:38:38.2527842Z", + "execution_finish_time": "2022-07-19T10:38:38.4140433Z", + "spark_jobs": null + }, + "text/plain": "StatementMeta(adsdevsynspads, 9, 2, Finished, Available)" + }, + "metadata": {} + } + ], + "execution_count": 1, + "metadata": {} + }, + { + "cell_type": "code", + "source": [ + "import random\r\n", + "import json\r\n", + "from pyspark.sql import Row\r\n", + "from pyspark.sql.types import *\r\n", + "from pyspark.sql.functions import *\r\n", + "\r\n", + "session_id = random.randint(0,1000000)\r\n", + "#invalid source\r\n", + "##TaskObject = \"{\\\"TaskInstanceId\\\":1,\\\"TaskMasterId\\\":2,\\\"TaskStatus\\\":\\\"InProgress\\\",\\\"TaskType\\\":\\\"TestTask Type Name\\\",\\\"Enabled\\\":1,\\\"ExecutionUid\\\":\\\"8448eabb-9ba4-4779-865b-29e973431273\\\",\\\"NumberOfRetries\\\":0,\\\"DegreeOfCopyParallelism\\\":1,\\\"KeyVaultBaseUrl\\\":\\\"https://ark-stg-kv-ads-irud.vault.azure.net/\\\",\\\"ScheduleMasterId\\\":\\\"-4\\\",\\\"TaskGroupConcurrency\\\":\\\"10\\\",\\\"TaskGroupPriority\\\":0,\\\"TaskExecutionType\\\":\\\"ADF\\\",\\\"ExecutionEngine\\\":{\\\"EngineId\\\":-1,\\\"EngineName\\\":\\\"ark-stg-adf-ads-irud\\\",\\\"SystemType\\\":\\\"Datafactory\\\",\\\"ResourceGroup\\\":\\\"dlzdev04\\\",\\\"SubscriptionId\\\":\\\"ed1206e0-17c7-4bc2-ad4b-f8d4dab9284f\\\",\\\"ADFPipeline\\\":\\\"GPL_AzureSqlTable_NA_AzureBlobFS_Parquet_Azure\\\",\\\"EngineJson\\\":\\\"{}\\\",\\\"TaskDatafactoryIR\\\":\\\"Azure\\\",\\\"JsonProperties\\\":{}},\\\"Source\\\":{\\\"System\\\":{\\\"SystemId\\\":-8,\\\"SystemServer\\\":\\\"https://arkstgdlsadsirudadsl.dfs.core.windows.net\\\",\\\"AuthenticationType\\\":\\\"MSI\\\",\\\"Type\\\":\\\"ADLS\\\",\\\"Username\\\":null,\\\"Container\\\":\\\"datalakelanding\\\"},\\\"Instance\\\":{\\\"TargetRelativePath\\\":\\\"\\\"},\\\"DataFileName\\\":\\\"TestFile.parquet\\\",\\\"RelativePath\\\":\\\"\\\",\\\"SchemaFileName\\\":\\\"TestFile.json\\\"},\\\"Target\\\":{\\\"System\\\":{\\\"SystemId\\\":-8,\\\"SystemServer\\\":\\\"https://arkstgdlsadsirudadsl.dfs.core.windows.net\\\",\\\"AuthenticationType\\\":\\\"MSI\\\",\\\"Type\\\":\\\"ADLS\\\",\\\"Username\\\":null,\\\"Container\\\":\\\"datalakelanding\\\"},\\\"Instance\\\":{\\\"TargetRelativePath\\\":\\\"\\\"},\\\"DataFileName\\\":\\\"TestFile.parquet\\\",\\\"RelativePath\\\":\\\"\\\",\\\"SchemaFileName\\\":\\\"TestFile.json\\\",\\\"Type\\\":\\\"Parquet\\\"}}\"\r\n", + "#valid source\r\n", + "#TaskObject = \"{\\\"TaskInstanceId\\\":1,\\\"TaskMasterId\\\":2,\\\"TaskStatus\\\":\\\"InProgress\\\",\\\"TaskType\\\":\\\"TestTask Type Name\\\", \\\"Enabled\\\":1,\\\"ExecutionUid\\\":\\\"8448eabb-9ba4-4779-865b-29e973431273\\\",\\\"NumberOfRetries\\\":0,\\\"DegreeOfCopyParallelism\\\":1, \\\"KeyVaultBaseUrl\\\":\\\"https://ads-dev-kv-ads-ic038069.vault.azure.net/\\\",\\\"ScheduleMasterId\\\":\\\"-4\\\",\\\"TaskGroupConcurrency\\\":\\\"10\\\", \\\"TaskGroupPriority\\\":0,\\\"TaskExecutionType\\\":\\\"ADF\\\",\\\"ExecutionEngine\\\":{\\\"EngineId\\\":-1,\\\"EngineName\\\":\\\"ads-dev-kv-ads-ic038069\\\", \\\"SystemType\\\":\\\"Microsoft.Synapse/workspaces\\\",\\\"ResourceGroup\\\":\\\"sifgofast\\\",\\\"SubscriptionId\\\":\\\"cd486ba9-eef3-466d-b16c-7f1b2941ae9d\\\", \\\"ADFPipeline\\\":\\\"GPL_AzureSqlTable_NA_AzureBlobFS_Parquet_Azure\\\",\\\"EngineJson\\\":\\\"{}\\\",\\\"TaskDatafactoryIR\\\":\\\"Azure\\\", \\\"JsonProperties\\\":{}},\\\"Source\\\":{\\\"System\\\":{\\\"SystemId\\\":-8,\\\"SystemServer\\\":\\\"https://adsdevdlsadsic03adsl.blob.core.windows.net\\\", \\\"AuthenticationType\\\":\\\"MSI\\\",\\\"Type\\\":\\\"ADLS\\\",\\\"Username\\\":null,\\\"Container\\\":\\\"adsdevdlsadsic03\\\"},\\\"Instance\\\":\\\"\\\",{\\\"TargetRelativePath\\\":\\\"synapse/sif\\\"}, \\\"DataFileName\\\":\\\"StudentPersonal.parquet\\\",\\\"SourceRelativePath\\\":\\\"synapse/sif\\\",\\\"SchemaFileName\\\":\\\"StudentPersonal.json\\\",\\\"Type\\\":\\\"Parquet\\\"}, \\\"Target\\\":{\\\"System\\\":{\\\"SystemId\\\":-8,\\\"SystemServer\\\":\\\"https://adsdevdlsadsic03adsl.blob.core.windows.net\\\", \\\"AuthenticationType\\\":\\\"MSI\\\",\\\"Type\\\":\\\"ABS\\\",\\\"Username\\\":null,\\\"Container\\\":\\\"adsdevdlsadsic03\\\"}, \\\"Instance\\\":{\\\"TargetRelativePath\\\":\\\"\\\"},\\\"DataFileName\\\":\\\"StudentPersonal.parquet\\\",\\\"SourceRelativePath\\\":\\\"synapse\\/sif\\\", \\\"SchemaFileName\\\":\\\"StudentPersonal.json\\\",\\\"Type\\\":\\\"Parquet\\\"}}\"\r\n", + "TaskDict = {}\r\n", + "OutputDict = {}\r\n", + "TaskObjectJson = json.loads(TaskObject)\r\n", + "\r\n", + "##we want to delete EngineJson as it causes issues when converting back to a json and it is not needed as its properties are within JsonProperties as children\r\n", + "try:\r\n", + " del TaskObjectJson['ExecutionEngine']['EngineJson']\r\n", + "except:\r\n", + " print(\"No EngineJson Found\")\r\n", + "\r\n", + "\r\n", + "SifDbName = \"sif\"\r\n", + "tmopts = TaskObjectJson['TMOptionals']['CustomDefinitions'].split(\",\")\r\n", + "for o in tmopts:\r\n", + " opt = o.split(\"=\")\r\n", + " if (opt[0] == \"SparkDatabaseName\"):\r\n", + " SifDbName = opt[1].lower()\r\n", + " break\r\n", + "\r\n", + "print(SifDbName)" + ], + "outputs": [ + { + "output_type": "display_data", + "data": { + "application/vnd.livy.statement-meta+json": { + "spark_pool": "adsdevsynspads", + "session_id": "9", + "statement_id": 3, + "state": "finished", + "livy_statement_state": "available", + "queued_time": "2022-07-19T10:49:06.0388961Z", + "session_start_time": null, + "execution_start_time": "2022-07-19T10:49:06.3015369Z", + "execution_finish_time": "2022-07-19T10:49:06.4821123Z", + "spark_jobs": null + }, + "text/plain": "StatementMeta(adsdevsynspads, 9, 3, Finished, Available)" + }, + "metadata": {} + }, + { + "output_type": "stream", + "name": "stdout", + "text": [ + "No EngineJson Found\nsif\n" + ] + } + ], + "execution_count": 2, + "metadata": { + "jupyter": { + "source_hidden": false, + "outputs_hidden": false + }, + "nteract": { + "transient": { + "deleting": false + } + } + } + }, + { + "cell_type": "code", + "source": [ + "spark.catalog.setCurrentDatabase(SifDbName)" + ], + "outputs": [ + { + "output_type": "display_data", + "data": { + "application/vnd.livy.statement-meta+json": { + "spark_pool": "adsdevsynspads", + "session_id": "9", + "statement_id": 4, + "state": "finished", + "livy_statement_state": "available", + "queued_time": "2022-07-19T10:49:10.0977617Z", + "session_start_time": null, + "execution_start_time": "2022-07-19T10:49:10.2077785Z", + "execution_finish_time": "2022-07-19T10:49:22.8670019Z", + "spark_jobs": null + }, + "text/plain": "StatementMeta(adsdevsynspads, 9, 4, Finished, Available)" + }, + "metadata": {} + } + ], + "execution_count": 3, + "metadata": { + "jupyter": { + "source_hidden": false, + "outputs_hidden": false + }, + "nteract": { + "transient": { + "deleting": false + } + } + } + }, + { + "cell_type": "markdown", + "source": [ + "# Create Teaching Group " + ], + "metadata": { + "nteract": { + "transient": { + "deleting": false + } + } + } + }, + { + "cell_type": "code", + "source": [ + "from pyspark.sql.functions import *\r\n", + "df_Raw = spark.sql(\"select * from raw_teachinggroup\")\r\n", + "df_RawWJ = df_Raw.withColumn('json',to_json (struct (col ('*')))).select(\"RefId\", \"json\", \"StudentList\")\r\n", + "\r\n", + "df_Out = df_RawWJ \\\r\n", + ".withColumn('SchoolYear',get_json_object(col('json'), '$.SchoolYear')) \\\r\n", + ".withColumn('LocalId',get_json_object(col('json'), '$.LocalId')) \\\r\n", + ".withColumn('ShortName',get_json_object(col('json'), '$.ShortName')) \\\r\n", + ".withColumn('LongName',get_json_object(col('json'), '$.LongName')) \\\r\n", + ".withColumn('GroupType',get_json_object(col('json'), '$.GroupType')) \\\r\n", + ".withColumn('Set',get_json_object(col('json'), '$.Set')) \\\r\n", + ".withColumn('Block',get_json_object(col('json'), '$.Block')) \\\r\n", + ".withColumn('CurriculumLevel',get_json_object(col('json'), '$.CurriculumLevel')) \\\r\n", + ".withColumn('SchoolInfoRefId',get_json_object(col('json'), '$.SchoolInfoRefId')) \\\r\n", + ".withColumn('SchoolLocalId',get_json_object(col('json'), '$.SchoolLocalId')) \\\r\n", + ".withColumn('SchoolCourseInfoRefId',get_json_object(col('json'), '$.SchoolCourseInfoRefId')) \\\r\n", + ".withColumn('SchoolCourseLocalId',get_json_object(col('json'), '$.SchoolCourseLocalId')) \\\r\n", + ".withColumn('TimeTableSubjectRefId',get_json_object(col('json'), '$.TimeTableSubjectRefId')) \\\r\n", + ".withColumn('TimeTableSubjectLocalId',get_json_object(col('json'), '$.TimeTableSubjectLocalId')) \\\r\n", + ".withColumn('KeyLearningArea',get_json_object(col('json'), '$.KeyLearningArea')) \\\r\n", + ".withColumn('Semester',get_json_object(col('json'), '$.Semester')) \\\r\n", + ".withColumn('MinClassSize',get_json_object(col('json'), '$.MinClassSize')) \\\r\n", + ".withColumn('MaxClassSize',get_json_object(col('json'), '$.MaxClassSize')) \\\r\n", + ".select('RefId','SchoolYear',\t'LocalId',\t'ShortName',\t'LongName',\t'GroupType',\t'Set',\t'Block',\t'CurriculumLevel',\t'SchoolInfoRefId',\t'SchoolLocalId',\t'SchoolCourseInfoRefId',\t'SchoolCourseLocalId',\t'TimeTableSubjectRefId',\t'TimeTableSubjectLocalId',\t'KeyLearningArea',\t'Semester',\t'MinClassSize',\t'MaxClassSize'\r\n", + ") \r\n", + "\r\n", + "df_Out.write.mode(\"overwrite\").saveAsTable(\"dm_teachinggroup\")\r\n" + ], + "outputs": [ + { + "output_type": "display_data", + "data": { + "application/vnd.livy.statement-meta+json": { + "spark_pool": "adsdevsynspads", + "session_id": "9", + "statement_id": 48, + "state": "finished", + "livy_statement_state": "available", + "queued_time": "2022-07-19T11:32:17.2384269Z", + "session_start_time": null, + "execution_start_time": "2022-07-19T11:32:17.5599059Z", + "execution_finish_time": "2022-07-19T11:32:21.5777595Z", + "spark_jobs": null + }, + "text/plain": "StatementMeta(adsdevsynspads, 9, 48, Finished, Available)" + }, + "metadata": {} + } + ], + "execution_count": 47, + "metadata": { + "jupyter": { + "source_hidden": false, + "outputs_hidden": false + }, + "nteract": { + "transient": { + "deleting": false + } + } + } + }, + { + "cell_type": "markdown", + "source": [ + "# Create Teaching Group To Student Mapping" + ], + "metadata": { + "nteract": { + "transient": { + "deleting": false + } + } + } + }, + { + "cell_type": "code", + "source": [ + "\r\n", + "json_schema = spark.read.json(df_RawWJ.withColumn('SLS',get_json_object(col('json'), '$.StudentList')).select(\"SLS\").rdd.map(lambda x: x[0])).schema\r\n", + "\r\n", + "\r\n", + "df_TeachingGroupToStudentMap = df_RawWJ \\\r\n", + ".withColumn('StudentList1',from_json(get_json_object(col('json'), '$.StudentList'),json_schema)) \\\r\n", + ".select('RefId',\\\r\n", + " explode('StudentList1.TeachingGroupStudent').alias(\"Student\"), \\\r\n", + ")\r\n", + "\r\n", + "df_TeachingGroupToStudentMap = \\\r\n", + " df_TeachingGroupToStudentMap \\\r\n", + " .withColumn('StudentGivenName',col('Student.Name.GivenName')) \\\r\n", + " .withColumn('StudentFamilyName',col('Student.Name.FamilyName')) \\\r\n", + " .withColumn('StudentNameType',col('Student.Name.Type')) \\\r\n", + " .withColumn('StudentPersonalRefId',col('Student.StudentPersonalRefId')) \\\r\n", + " .withColumn('StudentLocalId',col('Student.StudentLocalId')) \\\r\n", + " .select('RefId','StudentGivenName','StudentFamilyName','StudentNameType','StudentLocalId','StudentPersonalRefId')\r\n", + "\r\n", + "df_TeachingGroupToStudentMap.write.mode(\"overwrite\").saveAsTable(\"dm_TeachingGroupToStudentMap\")" + ], + "outputs": [ + { + "output_type": "display_data", + "data": { + "application/vnd.livy.statement-meta+json": { + "spark_pool": "adsdevsynspads", + "session_id": "9", + "statement_id": 55, + "state": "finished", + "livy_statement_state": "available", + "queued_time": "2022-07-19T11:42:32.5566001Z", + "session_start_time": null, + "execution_start_time": "2022-07-19T11:42:32.6562777Z", + "execution_finish_time": "2022-07-19T11:42:36.6065423Z", + "spark_jobs": null + }, + "text/plain": "StatementMeta(adsdevsynspads, 9, 55, Finished, Available)" + }, + "metadata": {} + } + ], + "execution_count": 54, + "metadata": { + "jupyter": { + "source_hidden": false, + "outputs_hidden": false + }, + "nteract": { + "transient": { + "deleting": false + } + } + } + }, + { + "cell_type": "markdown", + "source": [ + "# Create Teaching Group To Teacher" + ], + "metadata": { + "nteract": { + "transient": { + "deleting": false + } + } + } + }, + { + "cell_type": "code", + "source": [ + "\r\n", + "json_schema = spark.read.json(df_RawWJ.withColumn('SLS',get_json_object(col('json'), '$.TeacherList')).select(\"SLS\").rdd.map(lambda x: x[0])).schema\r\n", + "\r\n", + "\r\n", + "df_TeachingGroupToStudentMap = df_RawWJ \\\r\n", + ".withColumn('TeacherList1',from_json(get_json_object(col('json'), '$.TeacherList'),json_schema)) \\\r\n", + ".select('RefId',\\\r\n", + " explode('TeacherList1.TeachingGroupTeacher').alias(\"Teacher\"), \\\r\n", + ")\r\n", + "\r\n", + "df_TeachingGroupToStudentMap = \\\r\n", + " df_TeachingGroupToStudentMap \\\r\n", + " .withColumn('TeacherGivenName',col('Teacher.Name.GivenName')) \\\r\n", + " .withColumn('TeacherFamilyName',col('Teacher.Name.FamilyName')) \\\r\n", + " .withColumn('TeacherNameType',col('Teacher.Name.Type')) \\\r\n", + " .withColumn('TeacherPersonalRefId',col('Teacher.StaffPersonalRefId')) \\\r\n", + " .withColumn('TeacherLocalId',col('Teacher.StaffLocalId')) \\\r\n", + " .withColumn('TeacherAssociation',col('Teacher.Association')) \\\r\n", + " .select('RefId','TeacherGivenName','TeacherFamilyName','TeacherNameType','TeacherLocalId','TeacherPersonalRefId', 'TeacherAssociation')\r\n", + "\r\n", + "df_TeachingGroupToStudentMap.write.mode(\"overwrite\").saveAsTable(\"dm_TeachingGroupToTeacherMap\")" + ], + "outputs": [ + { + "output_type": "display_data", + "data": { + "application/vnd.livy.statement-meta+json": { + "spark_pool": "adsdevsynspads", + "session_id": "9", + "statement_id": 54, + "state": "finished", + "livy_statement_state": "available", + "queued_time": "2022-07-19T11:42:17.4196407Z", + "session_start_time": null, + "execution_start_time": "2022-07-19T11:42:17.7235441Z", + "execution_finish_time": "2022-07-19T11:42:21.6646523Z", + "spark_jobs": null + }, + "text/plain": "StatementMeta(adsdevsynspads, 9, 54, Finished, Available)" + }, + "metadata": {} + } + ], + "execution_count": 53, + "metadata": { + "jupyter": { + "source_hidden": false, + "outputs_hidden": false + }, + "nteract": { + "transient": { + "deleting": false + } + } + } + }, + { + "cell_type": "markdown", + "source": [ + "# Teaching Group to Period Mapping" + ], + "metadata": { + "nteract": { + "transient": { + "deleting": false + } + } + } + }, + { + "cell_type": "code", + "source": [ + "\r\n", + "json_schema = spark.read.json(df_RawWJ.withColumn('SLS',get_json_object(col('json'), '$.TeachingGroupPeriodList')).select(\"SLS\").rdd.map(lambda x: x[0])).schema\r\n", + "\r\n", + "\r\n", + "df_TeachingGroupPeriodMap = df_RawWJ \\\r\n", + ".withColumn('TeachingGroupPeriod1',from_json(get_json_object(col('json'), '$.TeachingGroupPeriodList'),json_schema)) \\\r\n", + ".select('RefId',\\\r\n", + " explode('TeachingGroupPeriod1.TeachingGroupPeriod').alias(\"TeachingGroupPeriod\"), \\\r\n", + ")\r\n", + "\r\n", + "df_TeachingGroupPeriodMap = \\\r\n", + " df_TeachingGroupPeriodMap \\\r\n", + " .withColumn('TeachingGroupPeriodDayId',col('TeachingGroupPeriod.DayId')) \\\r\n", + " .withColumn('TeachingGroupPeriodPeriodId',col('TeachingGroupPeriod.PeriodId')) \\\r\n", + " .select('RefId','TeachingGroupPeriodDayId','TeachingGroupPeriodPeriodId')\r\n", + "\r\n", + "df_TeachingGroupPeriodMap.write.mode(\"overwrite\").saveAsTable(\"dm_TeachingGroupPeriodMap\")" + ], + "outputs": [ + { + "output_type": "display_data", + "data": { + "application/vnd.livy.statement-meta+json": { + "spark_pool": "adsdevsynspads", + "session_id": "9", + "statement_id": 56, + "state": "finished", + "livy_statement_state": "available", + "queued_time": "2022-07-19T11:47:47.6790432Z", + "session_start_time": null, + "execution_start_time": "2022-07-19T11:47:48.0874056Z", + "execution_finish_time": "2022-07-19T11:47:50.8601268Z", + "spark_jobs": null + }, + "text/plain": "StatementMeta(adsdevsynspads, 9, 56, Finished, Available)" + }, + "metadata": {} + } + ], + "execution_count": 55, + "metadata": { + "jupyter": { + "source_hidden": false, + "outputs_hidden": false + }, + "nteract": { + "transient": { + "deleting": false + } + } + } + } + ], + "metadata": { + "language_info": { + "name": "python" + }, + "kernelspec": { + "name": "synapse_pyspark", + "language": "Python", + "display_name": "Synapse PySpark" + }, + "kernel_info": { + "name": "synapse_pyspark" + }, + "save_output": true, + "synapse_widget": { + "version": "0.1", + "state": {} + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} \ No newline at end of file diff --git a/solution/Synapse/Patterns/notebook/sif/SIFLoadDimStaffPersonal.ipynb b/solution/Synapse/Patterns/notebook/sif/SIFLoadDimStaffPersonal.ipynb index 374d0331..75032bc8 100644 --- a/solution/Synapse/Patterns/notebook/sif/SIFLoadDimStaffPersonal.ipynb +++ b/solution/Synapse/Patterns/notebook/sif/SIFLoadDimStaffPersonal.ipynb @@ -64,26 +64,7 @@ " } \\\r\n", " }\" \r\n" ], - "outputs": [ - { - "output_type": "display_data", - "data": { - "application/vnd.livy.statement-meta+json": { - "execution_finish_time": "2022-07-03T12:04:55.8431069Z", - "execution_start_time": "2022-07-03T12:04:55.6916735Z", - "livy_statement_state": "available", - "queued_time": "2022-07-03T12:04:55.583777Z", - "session_id": 7, - "session_start_time": null, - "spark_pool": "adsdevsynspads", - "state": "finished", - "statement_id": 2 - }, - "text/plain": "StatementMeta(adsdevsynspads, 7, 2, Finished, Available)" - }, - "metadata": {} - } - ], + "outputs": [], "execution_count": null, "metadata": { "jupyter": { @@ -178,62 +159,43 @@ "df_Out = df_RawWJ \\\n", ".withColumn('LocalId',get_json_object(col('json'), '$.LocalId')) \\\n", ".withColumn('StateProvinceId',get_json_object(col('json'), '$.StateProvinceId')) \\\n", - ".withColumn('FirstName',get_json_object(col('json'), '$.FirstName')) \\\n", - ".withColumn('LastName',get_json_object(col('json'), '$.LastName')) \\\n", + ".withColumn('FirstName',get_json_object(col('json'), '$.PersonInfo.Name.GivenName   ')) \\\n", + ".withColumn('LastName',get_json_object(col('json'), '$.PersonInfo.Name.FamilyName   ')) \\\n", ".withColumn('MiddleName',get_json_object(col('json'), '$.MiddleName')) \\\n", - ".withColumn('OtherNames',get_json_object(col('json'), '$.OtherNames')) \\\n", + ".withColumn('OtherNames',get_json_object(col('json'), '$.PersonInfo.OtherNames'')) \\\n", ".withColumn('EmploymentStatus',get_json_object(col('json'), '$.EmploymentStatus')) \\\n", ".withColumn('Title',get_json_object(col('json'), '$.Title')) \\\n", - ".withColumn('IndigenousStatus',get_json_object(col('json'), '$.IndigenousStatus')) \\\n", - ".withColumn('Sex',get_json_object(col('json'), '$.Sex')) \\\n", - ".withColumn('BirthDate',get_json_object(col('json'), '$.BirthDate')) \\\n", - ".withColumn('DateOfDeath',get_json_object(col('json'), '$.DateOfDeath')) \\\n", - ".withColumn('Deceased',get_json_object(col('json'), '$.Deceased')) \\\n", - ".withColumn('BirthDateVerification',get_json_object(col('json'), '$.BirthDateVerification')) \\\n", - ".withColumn('PlaceOfBirth',get_json_object(col('json'), '$.PlaceOfBirth')) \\\n", - ".withColumn('StateOfBirth',get_json_object(col('json'), '$.StateOfBirth')) \\\n", - ".withColumn('CountryOfBirth',get_json_object(col('json'), '$.CountryOfBirth')) \\\n", - ".withColumn('CountryOfCitizenship',get_json_object(col('json'), '$.CountryOfCitizenship')) \\\n", - ".withColumn('CountryOfResidency',get_json_object(col('json'), '$.CountryOfResidency')) \\\n", - ".withColumn('CountryArrivalDate',get_json_object(col('json'), '$.CountryArrivalDate')) \\\n", - ".withColumn('AustralianCitizenshipStatus',get_json_object(col('json'), '$.AustralianCitizenshipStatus')) \\\n", - ".withColumn('EnglishProficiency',get_json_object(col('json'), '$.EnglishProficiency')) \\\n", - ".withColumn('LanguageList',get_json_object(col('json'), '$.LanguageList')) \\\n", - ".withColumn('DwellingArrangement',get_json_object(col('json'), '$.DwellingArrangement')) \\\n", - ".withColumn('Religion',get_json_object(col('json'), '$.Religion')) \\\n", - ".withColumn('ReligiousEventList',get_json_object(col('json'), '$.ReligiousEventList')) \\\n", - ".withColumn('ReligiousRegion',get_json_object(col('json'), '$.ReligiousRegion')) \\\n", - ".withColumn('PermanentResident',get_json_object(col('json'), '$.PermanentResident')) \\\n", - ".withColumn('VisaSubClass',get_json_object(col('json'), '$.VisaSubClass')) \\\n", - ".withColumn('VisaStatisticalCode',get_json_object(col('json'), '$.VisaStatisticalCode')) \\\n", - ".withColumn('EmailList',get_json_object(col('json'), '$.EmailList')) \\\n", - ".withColumn('PhoneNumberList',get_json_object(col('json'), '$.PhoneNumberList')) \\\n", - ".withColumn('AddressList',get_json_object(col('json'), '$.AddressList')) \\\n", + ".withColumn('IndigenousStatus',get_json_object(col('json'), '$.PersonInfo.Demographics.IndigenousStatus'')) \\\n", + ".withColumn('Sex',get_json_object(col('json'), '$.PersonInfo.Demographics.Sex'')) \\\n", + ".withColumn('BirthDate',get_json_object(col('json'), '$.PersonInfo.Demographics.BirthDate'')) \\\n", + ".withColumn('DateOfDeath',get_json_object(col('json'), '$.PersonInfo.Demographics.DateOfDeath'')) \\\n", + ".withColumn('Deceased',get_json_object(col('json'), '$.PersonInfo.Demographics.Deceased'')) \\\n", + ".withColumn('BirthDateVerification',get_json_object(col('json'), '$.PersonInfo.Demographics.BirthDateVerification'')) \\\n", + ".withColumn('PlaceOfBirth',get_json_object(col('json'), '$.PersonInfo.Demographics.PlaceOfBirth'')) \\\n", + ".withColumn('StateOfBirth',get_json_object(col('json'), '$.PersonInfo.Demographics.StateOfBirth'')) \\\n", + ".withColumn('CountryOfBirth',get_json_object(col('json'), '$.PersonInfo.Demographics.CountryOfBirth'')) \\\n", + ".withColumn('CountryOfCitizenship',get_json_object(col('json'), '$.PersonInfo.Demographics.CountriesOfCitizenship'')) \\\n", + ".withColumn('CountryOfResidency',get_json_object(col('json'), '$.PersonInfo.Demographics.CountriesOfResidency'')) \\\n", + ".withColumn('CountryArrivalDate',get_json_object(col('json'), '$.PersonInfo.Demographics.CountryArrivalDate'')) \\\n", + ".withColumn('AustralianCitizenshipStatus',get_json_object(col('json'), '$.PersonInfo.Demographics.AustralianCitizenshipStatus'')) \\\n", + ".withColumn('EnglishProficiency',get_json_object(col('json'), '$.PersonInfo.Demographics.EnglishProficiency.Code'')) \\\n", + ".withColumn('LanguageList',get_json_object(col('json'), '$.PersonInfo.Demographics.LanguageList'')) \\\n", + ".withColumn('DwellingArrangement',get_json_object(col('json'), '$.PersonInfo.Demographics.DwellingArrangement.Code'')) \\\n", + ".withColumn('Religion',get_json_object(col('json'), '$.PersonInfo.Demographics.Religion.Code'')) \\\n", + ".withColumn('ReligiousEventList',get_json_object(col('json'), '$.PersonInfo.Demographics.ReligiousEventList'')) \\\n", + ".withColumn('ReligiousRegion',get_json_object(col('json'), '$.PersonInfo.Demographics.ReligiousRegion'')) \\\n", + ".withColumn('PermanentResident',get_json_object(col('json'), '$.PersonInfo.Demographics.PermanentResident'')) \\\n", + ".withColumn('VisaSubClass',get_json_object(col('json'), '$.PersonInfo.Demographics.VisaSubClass'')) \\\n", + ".withColumn('VisaStatisticalCode',get_json_object(col('json'), '$.PersonInfo.Demographics.VisaStatisticalCode'')) \\\n", + ".withColumn('EmailList',get_json_object(col('json'), '$.PersonInfo.EmailList'')) \\\n", + ".withColumn('PhoneNumberList',get_json_object(col('json'), '$.PersonInfo.PhoneNumberList'')) \\\n", + ".withColumn('AddressList',get_json_object(col('json'), '$.PersonInfo.AddressList'')) \\\n", ".select('RefId',\t'LocalId',\t'StateProvinceId',\t'FirstName',\t'LastName',\t'MiddleName',\t'OtherNames',\t'EmploymentStatus',\t'Title',\t'IndigenousStatus',\t'Sex',\t'BirthDate',\t'DateOfDeath',\t'Deceased',\t'BirthDateVerification',\t'PlaceOfBirth',\t'StateOfBirth',\t'CountryOfBirth',\t'CountryOfCitizenship',\t'CountryOfResidency',\t'CountryArrivalDate',\t'AustralianCitizenshipStatus',\t'EnglishProficiency',\t'LanguageList',\t'DwellingArrangement',\t'Religion',\t'ReligiousEventList',\t'ReligiousRegion',\t'PermanentResident',\t'VisaSubClass',\t'VisaStatisticalCode',\t'EmailList',\t'PhoneNumberList',\t'AddressList',\n", ") \n", "\n", "\n" ], - "outputs": [ - { - "output_type": "display_data", - "data": { - "application/vnd.livy.statement-meta+json": { - "execution_finish_time": "2022-07-03T12:23:33.5774397Z", - "execution_start_time": "2022-07-03T12:23:31.7503941Z", - "livy_statement_state": "available", - "queued_time": "2022-07-03T12:23:31.6527541Z", - "session_id": 7, - "session_start_time": null, - "spark_pool": "adsdevsynspads", - "state": "finished", - "statement_id": 29 - }, - "text/plain": "StatementMeta(adsdevsynspads, 7, 29, Finished, Available)" - }, - "metadata": {} - } - ], + "outputs": [], "execution_count": null, "metadata": { "jupyter": { @@ -252,26 +214,7 @@ "source": [ "df_Out.write.mode(\"overwrite\").saveAsTable(\"dm_staffpersonal\")" ], - "outputs": [ - { - "output_type": "display_data", - "data": { - "application/vnd.livy.statement-meta+json": { - "execution_finish_time": "2022-07-03T12:25:32.2411604Z", - "execution_start_time": "2022-07-03T12:25:19.5170497Z", - "livy_statement_state": "available", - "queued_time": "2022-07-03T12:25:19.4260717Z", - "session_id": 7, - "session_start_time": null, - "spark_pool": "adsdevsynspads", - "state": "finished", - "statement_id": 32 - }, - "text/plain": "StatementMeta(adsdevsynspads, 7, 32, Finished, Available)" - }, - "metadata": {} - } - ], + "outputs": [], "execution_count": null, "metadata": { "jupyter": { @@ -294,7 +237,6 @@ "language_info": { "name": "python" }, - "description": null, "save_output": true, "synapse_widget": { "version": "0.1", diff --git a/solution/Synapse/Patterns/notebook/sif/SIFLoadDimStudentPersonal.ipynb b/solution/Synapse/Patterns/notebook/sif/SIFLoadDimStudentPersonal.ipynb index 78718b57..a34fa051 100644 --- a/solution/Synapse/Patterns/notebook/sif/SIFLoadDimStudentPersonal.ipynb +++ b/solution/Synapse/Patterns/notebook/sif/SIFLoadDimStudentPersonal.ipynb @@ -1,7 +1,25 @@ { + "nbformat": 4, + "nbformat_minor": 2, "cells": [ { "cell_type": "code", + "execution_count": null, + "outputs": [], + "metadata": { + "jupyter": { + "source_hidden": false, + "outputs_hidden": false + }, + "nteract": { + "transient": { + "deleting": false + } + }, + "tags": [ + "parameters" + ] + }, "source": [ "TaskObject = \" \\\r\n", "{ \\\"TaskInstanceId\\\":53, \\\r\n", @@ -63,28 +81,12 @@ " \\\"UseNotebookActivity\\\":\\\"Enabled\\\" \\\r\n", " } \\\r\n", " }\" " - ], - "outputs": [ - { - "output_type": "display_data", - "data": { - "application/vnd.livy.statement-meta+json": { - "spark_pool": "adsdevsynspads", - "session_id": 6, - "statement_id": 2, - "state": "finished", - "livy_statement_state": "available", - "queued_time": "2022-07-04T06:27:30.9804507Z", - "session_start_time": null, - "execution_start_time": "2022-07-04T06:27:31.3209392Z", - "execution_finish_time": "2022-07-04T06:27:31.4771992Z" - }, - "text/plain": "StatementMeta(adsdevsynspads, 6, 2, Finished, Available)" - }, - "metadata": {} - } - ], + ] + }, + { + "cell_type": "code", "execution_count": null, + "outputs": [], "metadata": { "jupyter": { "source_hidden": false, @@ -94,14 +96,8 @@ "transient": { "deleting": false } - }, - "tags": [ - "parameters" - ] - } - }, - { - "cell_type": "code", + } + }, "source": [ "import random\r\n", "import json\r\n", @@ -134,9 +130,12 @@ " break\r\n", "\r\n", "print(SifDbName)" - ], - "outputs": [], + ] + }, + { + "cell_type": "code", "execution_count": null, + "outputs": [], "metadata": { "jupyter": { "source_hidden": false, @@ -147,34 +146,15 @@ "deleting": false } } - } - }, - { - "cell_type": "code", + }, "source": [ "spark.catalog.setCurrentDatabase(SifDbName)" - ], - "outputs": [ - { - "output_type": "display_data", - "data": { - "application/vnd.livy.statement-meta+json": { - "spark_pool": "adsdevsynspads", - "session_id": 6, - "statement_id": 3, - "state": "finished", - "livy_statement_state": "available", - "queued_time": "2022-07-04T06:27:33.1984447Z", - "session_start_time": null, - "execution_start_time": "2022-07-04T06:27:33.3067409Z", - "execution_finish_time": "2022-07-04T06:27:50.1894041Z" - }, - "text/plain": "StatementMeta(adsdevsynspads, 6, 3, Finished, Available)" - }, - "metadata": {} - } - ], + ] + }, + { + "cell_type": "code", "execution_count": null, + "outputs": [], "metadata": { "jupyter": { "source_hidden": false, @@ -185,10 +165,7 @@ "deleting": false } } - } - }, - { - "cell_type": "code", + }, "source": [ "from pyspark.sql.functions import *\r\n", "df_Raw = spark.sql(\"select * from raw_studentpersonal\")\r\n", @@ -266,67 +243,14 @@ ".withColumn('PrivateHealthInsurance',get_json_object(col('json'), '$.PrivateHealthInsurance')) \\\r\n", ".select('RefId',\t'StateProvinceId',\t'NationalUniqueStudentIdentifier',\t'AlertMessages',\t'MedicalAlertMessages',\t'FirstName',\t'LastName',\t'MiddleName',\t'OtherNames',\t'ProjectedGraduationYear',\t'OnTimeGraduationYear',\t'GraduationDate',\t'MostRecent',\t'AcceptableUsePolicy',\t'GiftedTalented',\t'EconomicDisadvantage',\t'ESL',\t'ESLDateAssessed',\t'YoungCarersRole',\t'Disability',\t'IntegrationAide',\t'EducationSupport',\t'HomeSchooledStudent',\t'IndependentStudent',\t'Sensitive',\t'OfflineDelivery',\t'ESLSupport',\t'PrePrimaryEducation',\t'PrePrimaryEducationHours',\t'FirstAUSchoolEnrollment',\t'EmailList',\t'PhoneNumberList',\t'AddressList',\t'IndigenousStatus',\t'Sex',\t'BirthDate',\t'DateOfDeath',\t'Deceased',\t'BirthDateVerification',\t'PlaceOfBirth',\t'StateOfBirth',\t'CountryOfBirth',\t'CountryOfCitizenship',\t'CountryOfResidency',\t'CountryArrivalDate',\t'AustralianCitizenshipStatus',\t'EnglishProficiency',\t'MainLanguageSpokenAtHome', 'SecondLanguage', 'OtherLanguage',\t'DwellingArrangement',\t'Religion',\t'ReligiousEventList',\t'ReligiousRegion',\t'PermanentResident',\t'VisaSubClass',\t'VisaStatisticalCode',\t'VisaSubClassList',\t'PassportNumber', 'PassportExpiryDate', 'PassportCountry',\t'LBOTE',\t'InterpreterRequired',\t'ImmunisationCertificateStatus',\t'CulturalBackground',\t'MaritalStatus',\t'MedicareNumber',\t'MedicarePositionNumber',\t'MedicareCardHolderName',\t'PrivateHealthInsurance') \r\n", "\r\n", - "\r\n" - ], - "outputs": [ - { - "output_type": "display_data", - "data": { - "application/vnd.livy.statement-meta+json": { - "spark_pool": "adsdevsynspads", - "session_id": 6, - "statement_id": 12, - "state": "finished", - "livy_statement_state": "available", - "queued_time": "2022-07-04T06:48:43.7745108Z", - "session_start_time": null, - "execution_start_time": "2022-07-04T06:48:43.9333205Z", - "execution_finish_time": "2022-07-04T06:48:45.775142Z" - }, - "text/plain": "StatementMeta(adsdevsynspads, 6, 12, Finished, Available)" - }, - "metadata": {} - } - ], - "execution_count": null, - "metadata": { - "jupyter": { - "source_hidden": false, - "outputs_hidden": false - }, - "nteract": { - "transient": { - "deleting": false - } - } - } + "\r\n", + "" + ] }, { "cell_type": "code", - "source": [ - "df_Out.write.mode(\"overwrite\").saveAsTable(\"dm_studentpersonal\")" - ], - "outputs": [ - { - "output_type": "display_data", - "data": { - "application/vnd.livy.statement-meta+json": { - "spark_pool": "adsdevsynspads", - "session_id": 6, - "statement_id": 13, - "state": "finished", - "livy_statement_state": "available", - "queued_time": "2022-07-04T06:48:47.3502998Z", - "session_start_time": null, - "execution_start_time": "2022-07-04T06:48:47.4784394Z", - "execution_finish_time": "2022-07-04T06:48:51.4719122Z" - }, - "text/plain": "StatementMeta(adsdevsynspads, 6, 13, Finished, Available)" - }, - "metadata": {} - } - ], "execution_count": null, + "outputs": [], "metadata": { "jupyter": { "source_hidden": false, @@ -337,7 +261,10 @@ "deleting": false } } - } + }, + "source": [ + "df_Out.write.mode(\"overwrite\").saveAsTable(\"dm_studentpersonal\")" + ] } ], "metadata": { @@ -347,14 +274,6 @@ }, "language_info": { "name": "python" - }, - "description": null, - "save_output": true, - "synapse_widget": { - "version": "0.1", - "state": {} } - }, - "nbformat": 4, - "nbformat_minor": 2 + } } \ No newline at end of file From e05a928de2ba9b188754c39f40e39ed06e13da32 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Wed, 20 Jul 2022 05:18:43 +0800 Subject: [PATCH 003/151] Updated SIF Template Excel --- utilities/SIfTemplate.xlsx | Bin 29836 -> 44170 bytes 1 file changed, 0 insertions(+), 0 deletions(-) diff --git a/utilities/SIfTemplate.xlsx b/utilities/SIfTemplate.xlsx index 7292b8d1935d58038b6dda5d43e51db8e5fa2241..37bc98c30b4779f33a0d8b85d73bfa9d06f6e0bf 100644 GIT binary patch delta 29044 zcmZ^~18`+e^e!4Znb@|SOl;e>ZR^CgZQGgHww+9DO{~ew{ORu^zA+bCkyNmRtHfS{8uDSrV#(+Z4L#a5bWch&EE7%{Wg6u0#!k{9uN#3xn*og-5I^!%&l4yC8GJ<(n`Zv z46P)~dT?rC5e61l3A%bfS7}JBL2A8#FCRW&j@(0RmgJgo$>tBV^)F`~AHDGtNHXUO zRmaEr+M-PA@Oe#8TA|18y1ztNAswv{RR-5u0rXVl!I9V^cDh(eXd^B`Q;C>V-AOD3 zMSjIu)lv~t1LX|~I}+#ej4mwl<}DHLRHF$tA;WB>e}QWbYt`idUkH|*KMXamA$ggQQ$9pv#&C<+g*B2GQriMe*ZDDY@`p5F@l_9SB`CY`% zC;43{{uPj1p&GmXEb1{^Mxd*|Nv?bDF;*Mslt1B*PkY|DaEXdbVr;2mK#6G($Ty)@ z4SC+eb)Op;^~w+>t5pX6QrY`TWAH|T1V_-}k5ckou6sx4%vaTK^w%^sWPc{bi=#Cm zf+>SSgI^i>^@9Qdc_rCEV*b0|9EiRQ^G7$*Tn@j(g0xh?Y|6oX?JMd5p+O$5jv_=5%eUQLHc)O$6;OQ>UWwZ24c|yz` zhdv~HjWHv6p#(lU0^sC?on+;5D<<}G(VqFoCU%Wq>kP);j9I}Ju$z&EZu!EGuiDq& zhCL;%oq7{$Lp!f&>{LpvbgLTTH3iA2$lst!ZA5oyZ+oJ=wFYaiIAcq)FuXZ;9cl>4 zuB|yiiEe%uF#GV=^SUAjBP}Q1qDv>%s|6yo`F&O#LtGXc0rDNLQa?78*x?BvKq8zP%ptcYUH$#Z+#T4pwP!wtY*Da=en(ae%o%4pBmD5f+gOGW zX*)UA)yKZY3XqGYgrXgU2KgzqP3gFB3du3}$!Pv7ZCMQn#;dS1{;H_Px@+f(kml0z zPU@hu<&$aYe!G72y#QCO<~lbq#Umv$)9M0p1Koyg9^OQ-%Eyyi7e4Dpb3B^D?G=!e zRnt?*oGEy;?X0s(j-xhH)@dJQ3^M~3sNN+`End4AE+B?>FplR1JQywpLhKm0q@V(% zWv`+|n27Wj`CO|f3|9z#?M5vOo6d-^D@reI{GZFYW4ZmtE@0UPbYZ-!m@ZI~_wM=V z-(7(>!@Jp!+W|-}W1q4!ce1{~DfdyGa{(FNCZ$^r2=oEZpJc+GnVlMkzh(+DmyP2! z{AsxrJOCzib-}o~3KFPBu{y9qxW7VEICJ-61wD;d8)AlJucmNJEz!NDT+XF2{t|rp zm7h2~L#xu#OdUlZd_aSEN>5mdIjK0x%r7}0p^D?a*Y1@lkKz=t zj(45WXdd~!_SYJFD=@^bbnMXGX5dfF3aNS7VNJeeS9RRg`nvv&=b@j3 z@J6AW)tc;0c+j#X_xgIrKT!IP83&+SfEBm?}8ca=KUm#8;JuXN7$IPv{0O)b^USq z7!8N8=diNF8Mw+KEi$s)JAR=8M%$s_Ou@N?u@!uO4v^Xi>gtbdiY#hv`DGn`DZ;XL zR8ilV{eJTv%dNk{E~-f5!+c&ODr>GKp&l^&5kt#A+rYSnOoc{q79SD9^P?#p`FL_I zs0z^%jI$KC$jNjss6A&1>l_FF%3>VxeS?C-8X>f}8vO z--8jx_MIl{3IPbn!YHW^o(K?|#fTVsL-UA7y9>#9k{_?ijG+PV3(!9?B==xD_q zrn>BYgXyF}rIexzjTv-xlefzvgCE-{>RlV7pc@f^VKBs9YBgUY^>NUGeT0c6-I>{1 zc|hiO0KqTQ>z9F>ypzs$f(~53LZ+wPN$0a$&z832VKvE}Zke3AGytg5do`=#ta+Dh zd>kZ{@Rbw|gA^*bF)S}die^CFl-yYdF7%Y9JG5UhvYufbzwVc;{MmUGPSp-Rq|`jn z^QaI_N|URWv*5BGcta9FB`glF4(0BWde*y3DcRr5T-ksh|4DSB{ca4nRCXiUe(Qx5 zHqyWEb^z}x9GEQ;Nd&;sbPW{B4=lh=;ib{L71H13MGgs~PAASAV@8WRwqnMac`C$= zobikmda8wZ{=tx^Xz_>PR+qRh9b!@9=`mw^Dog4K@`LeXycro=ID)n3Y{FH65~~$V zld|0!pm;$AT6}=TO=6~b@|oFe9YXV9>-A2drp*;D+tU}>i37|As_riDvQ>HtyVv-t zh1u&=ub?LzGeES}>PEiV_QMD9fx9v|X_RUi^r1-Z{rzdd;)T0_)}oun5!b~d&yr{k zEhcSeLxzh=(#oPXu0KfaWCV{Y(OV|K19$IuEUJhpLfVxF!gfxDD36Lvs`pYLBT!>5 zz{4c8Ut)<{ObantDDXXoU(lro#%2Y#E%)V6L<&?1K2t;LHyt@3wm zfICsEM!)TiY&yGVt4?k6e%Q^?twR&Az1}6eU~&RDn%sX4`Q+l9NX+>v9X9fGdcX2{ zY}Dbk-z5)w_jGdW2<|~S{b1*mb6dN|czS4SqV;w5y4_qq>rebNCbjN}P8NL5g_jh! zIs6>_!+(VRH%y+r(%hKz+;iceIydZIDYK@1or_Hyw#gM@iV0#$j-+Xkdp@E8TF2(+ z_{<*w^e;Z{?jVY&X|p@J_4VrO9rByTq(9`+Gj_G)X{-D`UAb2?f0EW;JZ>I-BPd|Y zV8m6OYz~JC_I7A(>uw>09@_xi%nlCnd$%7MJk&XdA1@Xcjqc%djz9>!y1Vr22pDp2 zR!+-rDirx^&~dT`Ij4ze&yP1T%nfg{!gPB8e4mX@$M3%l9p1erRXiHEL7rwOmt8*J zx_sL;0lGGJ%gR~rlLA(G7;$wo*nImz_G(c_pH_>hDFh%D1=SznKXSMDczO64ef4Zs zYp^=3L{UsHHV7N`)ebvnlb-X;dRf}QHWhr*)NtZYU#OZ7PUd0(@K-NwbFus(ZXezN zi_vk?UleOn_3kkea(#B8k32uFscLNRFH6VCNcoti$(9$JQ&r>_Ih1&1gCrhV>4=sW zS^d6=4MpA*GEj3T3%j}qW>t+UW?qe|M!Ihxe_sP>RBb$doV1EPS8>rtv`Ub)`V7~$N4e%AI>yX4PSh98uDJO>HLp{pz zZXlwP`jO+vXaR0<(5ue|s22MbeUI{O;%hTh(IFb4?X>q{Gglg;ZAm}oajeHQ$W$DG zG4DIW=Qm|fUuVbrGqch!z=wll4>#31afJ<;||6L!X z-rYJirtHJPuT`CZ_-O&o^p_v55xHbRhd-jq7yy}vAM(tY>8D;ko13zW(bcjO-3Ub@rnhIS_xn1Ai<7+7 z-j5C~JNLAU(QRNJ%u9H82pMRPQKhHd&CS=c;N^Vn5*OJe4~Be0)7n7$I00)j>i{k2 zVj|(x7!oQkj7i{EfNu=N7Zq(7xiGDKi-}Ys60AvvC%p{S_Q_zrKkK}p8{6>yH`tj<++O@)~&KEwed0qtt}H} z@W$sr{poOS{d9Y+ySk^VfE#M0n*WTVj&bb zk6^b$By26{YKitgUW=Vwn8OCUBsB4 zTX2XOFsVGu0AFFTK#c}BJtP;hNjXR_H>CZp0M*|$K41~lK;e}VHsh(Jfn(?-%ponV z$yL<`Z3)$wK@zHrhY9CQRH|L6e035|?SFz&2_6q}4~nqW20w&C=bM(Mi-%EU^ukhJ z=ydZXrSEN$%((G*`Rbqe!iYzF!gAGEH=Yzmm{^F~0nZjHUZ>b!zm!wk(rLbMWedw}4*f&&LePng;+iaURB*iHT?;GUN{wc8_^b>cXKulZ(hU%IL z4Vo{_2C$G}Frv7OwDqn+;jBXOhsSkB6=J#M?__yJn_k5*-HebzlaSJ%O1UIWq~ZP< zE))wj9W-cW8W(65SL<|+;u$<}#2Q&{RQQHOt!jC4S%~rifr~0- z#mgU|4KS!C(We_$=jiH|degc*kb8TP3xI95nu$^eUw?h`By*okdm;OyZ!-yK_<+SLz7Lgm%Kfp+qlWB;pzTg~&=M%m7BFcrlGSTronM zCBUama86?$q@n&W!?Ls=FOd$~B{3p7!~)bsN;gz+40U!)y>jq9;bm1TMR3)= zJlMdH0JdTG2zo%hB2p8Q8;lWta0_<2XN4w%^J9=h={k#i0&Lshk^r(H=2}0(U8afG za`Csi1u<_L;I1K}>Q!!HM0OYi_5r!}{vZcYuzADo)H##^Rd7NOLnFePK`+_l)>>trRanYHsRJrZ{+Btt z{m}wzV^80!p)(s)&rmk`kX=g=Z0yYSn`{Qbee#S0LQfteeG!^C9PgUHY|_Y?%j#R{ zwi8#n`ZiFDD+gm4iU?xx`|MfcT6Qe91j!a%S&-)^6y&i>AXK2k7pE?I)}kV)WpL+_ z3u7I@USGBxq9fAGJ7D(Azj!tqUk&5t_TM<-DGVY8yyj;YCyva;qX5nv!a%|;fLz+> zvCEPG_MROl-qgkN9?$>Q_UHdc8~*=Cn;_!9d4C&ZMHu7ImW^=yNP?Tb2!JMr8u&bM z@wAZ>hc8CDPF?Kp@(Tua<`M)G{PmTMpeGj*MfPp%UoM$2je%c(>`ie!4df_11Q17@ zxh_n_&q>G^b<9*Qw;BBgUFmZPo7P|9Wi`l=BkOO<$=e3KGV7!B&MGeK94xe>A z?B9dvM^_$&=Ob>U?(qb;0kuUZ$VlYmK^s`#C&xjrcn{M8VAFGSEmTRP2nR`XHlf)VW&7t$;~l0Ce+D6a#_ z#=zRkdkYBa7W5&;KihG&-J@aX^pNI8d)7(yem)A^h)|htybq^L0l0gWj`sRcTksO* z2w`?@1Iw=`)%EG9d59wBZuX0LK)7UjVJmh?gnPx6dc{lqHoG$)v>~WtkJki2x-ItP zrbVmdg(5_g(!wL91|cu;wBErnAC5AG#DEYkTAxcaG5jV>wwe7*1w9n|e~Cw#nl9}v zJQ=X24QOKN#Ma1r0+K~lQy;brR)O5?8X)!l20=SGuZ;-H!R!%DFBi8KR7m+VRayRW zBtLP#D!Gosa&MLi=4Mwn1msmC-c0|tN3&p#=)Q+X$!EqoV-i?2@-ROZni|%~Q>H0M z`_kiSPlretd&NtLemE_3MAL9!Y7K%V#A=ST`)SH~lCF+_251mINUWbTvcc!a8bf~E=bG%^<2Zpfx@(;_$9p|6 z*fH}sjx_6Ycu|rpf3TLi*i(0Zv^>2#77C#>sb7#Ed^h8#v5D$e%IZ@@TTK?cm>aq- zb&H#_z5o|klZd>Nb;=2Gs_($` zQiy|6%*MDnu{X9U6qNz>bXg$4qcA=pkJK?;N zOJaD8{3;K9uqYI6e)sW7CUS!PFECY3gn#$U5@2LKB=~;>rn=tB2OP*>)SNfV;FV5O zFA0neaH^6}-q~0N-&qD_XgeJFn+?Prz^P zG74uccNrRhOKdj&tFM&byVrR;f!t1aZf;jr(pUJH5IqvvxNbc8b$qi9>AgM$&Kuxm zG<0}1Jba(~Izinh?2I`<+PB?>It{|-9$h zfMVM=pS8F-5=W5ZeB|!@#8b#BZMG@ zp}r$6%d=U<`U|Vhw=K%;ZrbfWv-+fA;~d1^HP>cwTeUf|Kn^f}in_Nf-1Sy=xMzrE z{D;}UFY54Cy~4VcuY3T z?^py(`WM0W1-gz?vb7T&D>_oB&BIsg@$HT_)(!S0gj+Vs17F;-+5`Vil^sZG*DP0^ z@_}rJJZ_w4gDi{fW68YhYMMC{G7L~gjG+FxtR?qO&`=4vK1S6pbSL~iKshDF2Ogl~=LPoO9GKDSq^w$B#*kG1~ zpd2$R@{^>x@#O*nGK@8&tOCSz3`LKN(hsQmunzGGAr^!Y71PY#!8l% z;yU@Kwty~~Ttot#s{XVvMYS)oobJTBd?~9y>aPW9FPtKRC6U!EkIu-ILv38ZGPA7z zXJYaE_T=grvsB6&>k63WN~j3S!n4UGU0D_+8FMrnuzPD{BH0E(roVU7TJ=Fy+VZML z6#HWE_TAthyPR^fB$gAE+RahBoeqR%b#ruy2EY=FzhQw{%c#WEWJqdKAUq|t0D*?? zJGjt-VEpc(qG5>@UBAG*Pgs(~Txn2Z+SeyFneU&HI!Hi6C(fa!6>DDIjN7DP$;sNV z;PhcwawIV*HQ67Wl1c(eLk9_^W`zrja-flpIXW=JflfZsz{nH}p4s<@O0ZQz>cW>5 zP{WUTH}u!=LX$6H?A|(8qb$k-hyQlU>yjYOmzY;}Oe&as4I}UAnos`vn&Fy% zcXo(?UpM21mwRBBd+z@BY;SSa<7`%4@6i4Bj!)AT;9^n# z;Tf`$6AW4k z7Zq>P?@&<4AGJCdxLFJuSF0$Pss%=X)@mT!|LoQyGYo0~G9RDO<`me|w+50DRUn0; z4{#) zSwk9SR{YpwXRpnJ=f+o`E)E*({-x+%M(Ne#9hN}p_E+vw~r=ch^%31OTWW0wWR}}iUYlCf;Zz7SOp{C z6xWQn0r@zQ4qN~S=blq~)kDg*p-;Zpt_l(LoVX~FCSUL(C@?jb6@f*OTF>M=yLc9S zV~f~IP%RcLQbbw>tT!?Oz-1u%1zr3zykUtR_EH4ByZ;m=)37L(rl|i;qF-_LC@h0A zlw@=0RdP#;i;LDvFtXbvXP#-|&w^-@`~V#UP^%_{_$ICKgiI5V*7j$4ptfZI+?oy! z{KB<2_<5yndVCZr;%sNk6}}ZwSZCtZy+{O!IgSVCEMc@wD=(NM0G-piglH7YlA}bw z8Ee=lt`cUx8XJP-s}?~_JpgGN4YvKS)l$iOsqPO6;zp<3U>qkz$|5+nkKEy^=|y%g za-4;;fW~!5V&(4d>VD(Ej zVcH7^2O(s#m;@(rJmSb(XNQeZM6}jZTSq$4{L{>epXx_D&1Dcm8#f#y+bB+#l=5gd z#JC#%l&b{?E|gIxgoHC_q&cxU@v&-a#B?x%4eUu)wG)+C06CvvRAAhzYf1Jtgh{uN zA6WwBF}5^;&h*L=bE&i{M6+Bn7|062(=y3bN{S^yj0P9wl6-1G$iT_m$8%F-& zFW6QtY;soE4REQ4^9mvxCnHd6as`ouiR{h*aaKi`a*3pOab-=X`D1`5s22uhoW=J+ ziyz!Ygh4I}qF9UkO4rULS?Zf8C%<`I_Cs-O+9soF!L-rH0UZ&O<<+A_BGMJKoq`UV ztvKBW2*uwnRe@b8F8p#!!5r3~DgrJT# z1aS6eu?f^V~0n9 zlWThB9Ds_sKw=7Uy(2ce5J7v?6&+zfxXeO$r((gd=eMSlXSm5lHE==*S8&GJy-Mj1 zWn`}4*q3{e{W_Aca^VlG>z@L%7Q*0}wTgY^hLQ-__Eu};*yIJ}i|NoJ@??lOI*ia6 zN%_vv5>#%7ysikZ73$jgi_M1Us%Maw=nyWL(tuQ?&NSl-3NBSe0w$EWTqz#4T|yXM zysy)U4`cCK#V`Qs4ZUEQ#h{>+dtp9>#-I2oyS=Zs$z8pw%9q7s`LgjFF%c4j2JDKSgBvv63s7KEp{5fA5#3B!*w`$m+( zUGM}IQtE10snw*!>s~1VV?x_~Qwmwg zKLAP7EBSHuq5FKha%YC`gX+`#L$TmkyeQF4uKUBqZS0sa4B{*M5XqAos0)zvF#yRI zqA-QP|0kJ(JEL=4&jjy^M-6eYM1gto$Mv{4bt5w(>WztjzJ&x^>W!_y99_G}9&j9l zLRyW2%=A)zol?+-xrh)1CRa}<1;{6t2%$$G*#3Gvc<^k2fC?Qn|4~W`#M(eB<$#&d zta?Ouq`SV*#`8<~Rqk>TMhT60HQ;t1Zs-?L1x1f8=v_3?32HU>-$UVr zep(Lw%Ctb&s8t|(y_h(X#*JIoA(G#gUC7!-6SD?=ririGJk}>vGEb;QkboVXOt5w2 zFrC|(T-szkHKLNyB!9Dj!x47ReI{@I;Uv2@@7F6g`JQbA>#LKxkdIqapI6T(ZVQ(l zOk!Pn9QajGvm(z4YMiUUdI4EtMNuX$mL%T2f_Y~7H8eUy80(cpbEU@A+s6q3DL&7W zO}*#Y9nJn5pXYI9;tklxPXM1&e_my2mnm^qdlIMAMH#e=6pl(NYHAMjak5#uhZE@_ zST0fy*hW`8@3Y`U97UK4GM2C)Uuft8Cibzh1!WwO1`UDmqaiKB)B(qFMfv_LqbZ9! z>MYY?EC@rTTp;b9khpGq4*p_v2TzMqb1-z6rZk`NgjOJhE=2&7Dd4wPAW~6piEK$j zKw&w;iE7ii4@`LtX03xhS4 z-1XtXaqXdVXSK}CFARTg!J^5+fOw;^A0!dbqA5(meut{;5TvLmTGDwUtBers zU+I91SZz%JlVrsb0bo+mC%p(RhSdiCv&H>yxrvJh!fJ>8Xsb}aLs?9P z^iNb&)}gNA1q8nDB>UlDv&mT0-7|w#enDrwDENUvc#~TR(pvG=prp42sdMJ9CD_ADRb&p*e3LO4^G~? z1!mM0A?u`)($RCtB-K1c(k1PHIs3@61_90sWtRk!K8>a@bJ1jhz%a=Vh{9K^DuokX z?)exSP~pZV2`Z+6jo8QPzhY&oJ`Gyl&R!`is}zWoiVVg`mfxbaZoYsq*I}YvIAS=nFC4=A+2w)j_iyC@b zq%RI2IL8h!!zEHdgA~Id1cN{UYgc20uP6Zc9J+2Oqg5hN{X`@kqwqgpoHmK5^%F3(j8yq88#o{ckf?U#y?_l;NRA;Vf1)X1 zi<;66lFjSJOxaSGx7At;A!<`SVUChTE!pLzYx2nn_Kd0@4eee~CstZj6^*B@xaHLr zN$u$kds1}+?ddDTk?e*nEGauFfc{GWP?8yKDNN$39qT(h5YW8pzx|pdR6-O0Xu2{` zNVSv3Qg>CQt@jlsO0|KE?MZRv%?=Z14XSHOEm$=0=kAn;pL{Mtp{tvWC4DD|;i@>o zMR~PH8~xn#s#7wtIL?A*t4MW13YF&PX(9upCA?Q|T<&mojx_fwIvWfC~e)$GQSV(oq|=RnF8WYEWp(C0TH zln112LaU}Lag|7XM4BBZXWaZTBvyc?yHbKGtS^CsNV&6u1Gqz28tdS5r%a$SSxa>c ztjUeaz3+QQnrE<{)SW%I zWR@c<=GYj(9y!oQF#UhG7wU|7x8`WY2|#`w-OB z?bqa1O{A`5cNvEvaR@5hl&WYAA11KY0B*erLPvc2P#4e<{znv1v)sL;M^~RYIVh%= zy3B}ez4bV23t;H8jmZIEN%(%TL3+CCFw6F*$M3+?rA~Zr%(#HHLBjcnA_#BYPq2c+ z82{`$;}ApGwjvpxT{QPW&og0+7KFH?jT*6o(#F^aN~S559k->sQ8Iu51>es|)Jx(I z_cs9r*~daM_$f)u-{9J{DCmYR!~^U2_J#Lb8rIG6V8}7~!x$FOZ3qPTK9=*p4-42j zV{;<^rP(fi!c=#AMgZ}7~e`}wD>X;pIRh3f4o=u7s&&J~0G=RW4+;a(KEmy`e1$I*{D?W=e^N7r(q>77t@ zzjnz-V+w#(z4YZ^zMb>a(&&VOPu9)vK?mZm#d#bIp!pyUVfpY7%%`B1;b8`9+_F;B zW>wMnZ?T!LqhB3gM$KvygQ9WHc5loMnR$>UvMxZz{{3gtj~j}?B113s%N+c?ysauq zHv6XcdEKTX?-%mhd{c`1u;uf+Fq_s;nB9FC^{UrPCRTppDgNq}&8&50h?=jU#-5Mh zh&abP`<1G+le4swz7I5}92-~)rx=oPyLPR#N6zoe#bIUkoX>}JSv|iu&MfTK>n?b-4XI-j<)ajnnL~e}3ovdU-UW!Z**FHo5bzfB!35 zKIKQE>PJZ4;@h4Sa%a^5&3@e*F=yKJD%=VfVSV!z6NQjAUktP9*4QpMuY{?TN)1Xm zOZ+&wc~>V01N^a;)mI&|N=!>3i{jF{-exG>;`uBblYCo_PkNvsqBvU>kXv8?WEbnd za7gyvIwrPWRvg-ZR@-_GirHd)qkllmR`MI`{bII;-`MLDvkmwL?>7MF#zpLt4T@Rw zW2b%O4D43UC^#*z;V~PyLt?f&hDGeF^@>^gYvt2aHcQ7y-_4NU&4J&|-*WT}ifrh= z14PXFc18nd!@lDI|7mbn4c6r~{RH=5xp|U@s$5PHdG*;h#O~1uEiG1aP3Q1!o~f1f z9M0jV#U?-M8hZfxpQSp4rlO0Y@zH)0u1vpdgl+vg@ig&Ixvfa;8?m2h;)*0z<6rm$ zZG~SVHI#|`Uu6LF6Mf$~pc8jL`k)6Jg(*?|cJZ57v827LtM|=BtfPBN_Iv1-+R&d? zg`#Y&>UiS9oSk z_&IAA-g1SnxMb8z?8MLch>e}r0$^=_)mRcW&G?B95|pzH%BMJ+sY3N$`qm#tWH^CG zib!2@NL}*lPosH1tB&Gmd};YI>+s_;&x4K~tLBrrg7$T zmjMu}z@mVno~#!H*JksF{Ir^%3vmEhCdFx=9n2u3eF6IpFn{hUO9yc_@T!bM?vWsq z)zaiWMJQr}^0|2fmmpL)ZIU0*s)o8FWaeT$5YUezl)^?z&7VN9YHeeJI_mWunAn|? ztlA~74MkLlA}UapB3{76vHF15gRR8eD#(S44M9L6I!&?kObbx7#?L1Jom{KTVhLbQ6pS{(nxEAfOxwsv$zlJU-VgZeYE*w#qAx+R&Br#?M z;sRzjG$V0Fx1${um?ZKrn>v>;uQYS2-?2<1oqyOewH59Xay7cvHfoBd)@m^#NGJ>% z{0P8OV_s;(C3|y@WfAq&on8^}=7oV;H&>BlI!*W*rkbN#I_)3_kvR>{aTf_eDWZ5A ziK!MULoZB&A$+@FSOgrk32a2^%ggSDI~UnvK_4dq-_!c~CLFwnxEzJR)dp0DaOccv z{JqzB%7AtY!M_sx+dO~>OX1TrT?ia*`vV|~vFtkR=x;X;8`hweCUaXaR+*;YdXmSg z*+8|~?d>noNZz#)%}5m4X8qzPbW!GachrSYu_CMq4kszxjQHuC9h`zT1Cou&WW&ep}h7WhcZ&E9_ZIQaTo!m{w-oqSUp&A|G-Z1z&>qi=Oti zIl`t!DEhE7i;3iAJ=w6!{MZYnU#``PUFUD;2Xv#*%YcVXHL<1+e>bxFp8A~o1!GiI zJRKSww4WH}z);m!Bkjm?J?$}71_$8Kgj!b&vnpj@^OBkM+(mT|`WK9-@K%t&gnEGu zbq4a?4l07DsQ!3Q5wa6LAE@LA*wP-^+FB^7`i8kQs+ zT`prJ(+E_r44ay?fT5I*RXKio^7z8aQuio${{EvZzwiaD9)Y*RTiV0~;e9dEnu~*jlR`_&m=E+}0zr|=aFE6Ily!Ge4G=MtP z8XngmFgN|J35}$YU?so5{{opkHEgg%`TiSScN5kC-{D8xs#2A=w0lmI6gK?7$91@1 zZsv{rb5K9;%gg|`lOaT{+ydQG;AfPAriZza*O;Bk7#qpx7`0@NhaI(j8sX!XJl&m>6`J-kRMl*9C6<& z-47XC(cYQygrc(+0FcTb!-Bm3uMX`!v^8sKX8^>)hT(T+EWAfvjN!afNBv47rNu2u z2;JRHye#?na|CX*oqEo7?$8QjFPONqE<@*PS2$(!B?O+L#$ATOl0A&*?6>B0Mxl<0 zSHL#JtW6h#^9U19S&tn~-cvpcNlEHCcTgDSPKM{{`^;sEO-$!0X2l$qPv!m0l=7Ur z8KAdpn1e;BfaN9qASYuq@P@W;N+M*&VEZH|gX@e-dgJ%(uIT7DLnt`R%@Q)`jy?!) zR4im(&wM*0gLIn{aMOi^SElmP>6kY#cauKwn>$FqKv^P$#yKjaxu0Qb31%1LMF@Z1 zkvj6rJj(8q@_t!$ppopsH{j+GDMY0nP+P0bFXc;gL}EGkr$9Ed>gI39)*r=l*b00} z9JM%lRo)J}N6N<#)VN~!s4(`3+DSV6lAidDgl5C5Q>B)oiJHF@W#75cSy6 z4xg(q>|h7dV27eaNY7IR4sW&HP`w?hQbC>Xx!8{?9HwfV5S3HwxDFz(QoCTar*f!5 z@guLY56~8ANKec)x-bQcsduojWr%H3@%D|N0RNh9th_^rf zPxkYF^5{u!w5c##i!RVd-)VGikN?6l0S9nE_M&8579%RqZ|{=VAE`G6t@TtUtdiKS zcB;#6GqFtlFe6KD?5dwPzT}h9$}($xA&K4lMne1E6v&3U_$oPTUy-7!I&#fk}-7d zyT{OUL1(Qvg7H`p!FXgz(YRTm3IZmTMW~Uq$ScyDE3EmhBJ^W>EvED;@^zBzVy{jd zp3+00AuX4ITrpIV@rhO0I#ylx_-7^=%J0M7;$Y@J@GN}2QU9gTN8SjN?z3+e86*TL zJ0+iA`G;P&HvBP;o3pmGqY4q-ewYR$-dM1MLM|uiV!VFXk zbvx1tkb3bHo{bu`jgov{VdjfkeK4KX@yD_u`U6f4QY{XoXs&nYh!6LBC+q5D7A8f07z`p_izS}|16cw?^wms)Eal3zV@%?xv88MT~^ z)dvbN8*&LlVI&c0$`fOrgct(UN@Z)^_0|x}ur|{1&fZ3YR2=9Oa+1qxwKjA|3hu0wM z)mbuo``G73Ht+aC6EG$07l;uSo~~%5{z!!}Q%%d&2F4fuGN(Gnv1EU?-__Fx@O6Lq zcy@i=t@8im$45AJxPN+{eE#{|CV#MZyea>jwzu+mrEjd?$qOKQz+M%oW{m}#PO!oGH?*nnmbb8PE@@u`31MiK8GjD36 z=rLF=YK4D}OQ+iXdQqSBloYz>^Y!uK!o4n`5&~R+Ej1$oNe6-rl*DW*6OJuV(=eH(8(x3v=sG?@6o^^_$ol z>GDR4qeTE#4__s@_G2W$G6BQ909@soNtMiEk8g;Nj#5B0N{7b7=hDA^$D zY&`Vb$SLo4=Wo$~`?6thpa@9tTE$Ql1`In zl~BP;&mbS?l) z&BcKs8its!8b)^6Dih`tDg>F$=c+Z=kNXS**dtL%CL|Dz#4J>WJ(1g_H2#Gmqg39n zB;gG^K}2juqnz#^L9G%WM?T7J`6lnFy(y*3>ZF6-6VS(6LKq*6ggJQ$AxBW4M!r-46c}3v? zS3(V-ZA6cj6%JPFX2n)m0wXA4MHH_pLGoCd`fxZEVWAQNO2!73>fBa1CsC0K@=3lT z4YiX_bNw8p1!>rTguP5$?=uwyfNJioB zo0sdGxBGfE7m!VvwM4IBz6Ek=k=ymcc>!HYMFX063WjiBqzl5`rpID5@? z{V-^C)i{#FB;m{%oXR4X-AEY=P=fc*$@%_;=XkNP;(91q)L_3Pn3r&ZwU6xY;gwQl zfs)QkrirdLn636@&Wo?Jw>FeUiB}nAM(GNR%1V*=V&(W}Zrd$=-(y%2@oCJ$3GMDpoGXyR-?x7CDF0yX6VvCMD1S>eT;v7)^yl1ZiG!H5z~y8zitKA|gQMT+l{ zEzH^6bZFvuALxo?vYBKkA{;XwTvu0VG!lKB@EM{Rlb70X~W15YTWqDW*t88gFQ2wVY z(0SOh36xJoS4I=Oz;?FZPb}Eqg*M4-AYdD?U>i5&3Iqj!(g?ZyZ{3z)AT1=~PLV}W zAd(Lt0Z*$)<^L^O{`aCSLVN!FF$~|Z){`S@CnHxf9~^!TIdqaY!h6{e(dM$Qw9nktNoUz`BSEc2|srx zYz^rRm@jB&+_!yqIHU;%`F3Y_t+VNo?X;`bz4{~{RK&P{3kqb>duy(iUJB$WGXMi* zRmXky?MN3SxTe%}ptD@oX~1@JU-j-&GiM;W3hdvm=%EG7Qv;P-!*KBe#U^{JI=SDn z>R&#cg4C?raoNs7U5HF+nt8qm1NJQn@boQ&P6CegUAZQUlz_IY=Dff`*CVl}|h(Z7eept8~f0N^Gq{`CYlf({cXS z$O^EUL30rNUwQ zfeTwfU$0_8^qjF!UybQ^_Y(ap(rP^ZcGG+e*S^{ExxP~uJYby3;66Y$-ox+NaAZ|> zQdCAp>SR?x6{y=98K|lCn?uHer3xHm7*uq$eg1B3@owcJ&d#8rV5KrU2`H||F}u4J z#hWFI?|-)s{@>hCuIB2093KKHYl)n${1~@B*W*-F=9CnH&^@WkIhE^qXz;+YX*H3b zXT1_F9T9-hyiBxQ;d}`mTCnVxL8T^Xr(!<4dpiocL~c7C(Z%X-e50IKnV9pF7$SLd z36tC$W|O{CFrUcrL&}}{ZeB*lHc4)VW`2^T^zOqGt)dyP7PGd{KvuSnm;Qr5!p;!9=>IW znZHUw+n%knEU;Zr(PXfN%+ZoHmzdAaH8o{rlp{V|77kgs1PMjdUSy3$yG+-rsxbqs zVFEnO>QZvoEegt}w4X3EnMC>KHTo2k1tIbK6sx~dCkPw%{zyr%w+Xh zrTH0m8zZL1j=RPaG^L?_BJCWmS)9TC$gD9z2q4&={LLeg4E|SLb95aY`(2=UPzvzj9zW8@6_zd|5e#nN5!=){o?K}gS$Hn z?t$R0!69gHCoouWA0#-zAvnRE-~V?%!1J+TFEl zcU5(@*Fo1A%ss_#Us6521PBqN?#*efi~t4oog4}Z;#Y$5cHnS>gydVf{f-3wssGMt znIG#}%=ASPt0q3qm~}DLtVKS{hB3jaKLh$fLGjC&EH!$R!ZE?aiEvU9UDtACn&+y+ zmqeQ%q$eAzXF%_!A&o8Ki!4r(jPaCrBp(M@soY62y2M{q=Gbn-Ou>PQVm2q=Y_7Z) zcsvW2WKU&!27c8e>JHxhB#3MNqp3n9X9I0V*CCGSk=sl}8pil;W@HagK$|9`Z`abe zTFWg{S|)f|e~U*?ys6sXj$iK(2S*G}jwsr+BGF#~i5JBbxS*7?12FpsdJmlE^-dKc zNLG`;i685;l>6{c8x}y8Qs(WIj;8PgQT8g`cUD<|wYpWlWx~e~St8L}*3_%k7>Ky9 zWF7k|@~C-X)wI|pUn1*Nd6UG0!bkPLNEM^&t*wNDE1wk+#9%(&{Zwb3|-B#dTKn&0x^7KZ}S3mc$qjn+=?RMve@RgYa2hjsEz z`Vq9Mc|rIT?;c&2y-=7o=3=*XsifJRTRIsu0Q&Loum@S!ih=V3q4;WuRU*ssKF1w8 zz3q{+l5^(UFqTtRsDX5xr_tqplUcp{Iso zhoYZMOsjv=oCkH%1{Sz3If(cWdTwYwCe)66i#uNaM_0OA4(4)Z>J zKNpM4HlRiVp+>Gk4%KvxFI=Q%SLNF98_K!>gIQ7#KJJ8})#pf@*mQ(90Ak&V!RXqL z0Qfk~OeCT=Jhl0AjUxq-BJTT52(H^^@Z;np^5@CS@~rW!T58Q#L8Bh-d6* z$32Nh(Y5JYE&S_^EE-q`L%I=VDVw1_S(H@{ zoBL$%Zvegh)cpYlX z?Mhk3U?G|w4Z7%7Ph`p;U;SnUNsh$8`BbrcZ=BDyuCu-%pv7hJI$>;l=;pP>s+B4X z|MH9p0?R%%%oKFBzj6vCSD8DkLM*I*36z&}{`sb1)6jEEuEObOzw zXDa?UdbK(d9tQOpWFZADjgOoQ%n)kAybCbktSCR!J< ze^955YP4>$!=$8=Fvzq6xXP{K5ExZjJS3}yBYJ*g*P>GN5d7lNl~e1vMgjMXsA3a` z!PnB276{cLTl#Pq!Fgo8SWCItoh=In(bv{}Gw8JrIbFk%%R;@r#9ZI}Hh%mnr13Dr zksk(3s=hcsVsG<{5%+jwg*?MaMR_=!e(xwYdsW?ct&-n0&_bZxu2T% zXwPra-hD^wOl{o)nVNVvjWtGvR zJmb&vwhxs0~Zt&-v6i^8#7lcOHFrITSx0Znx~L-*^#v6szU;zcu<;A zwK`1*8wsNv7?wtA&6r*hN}6)=B=@}yD@fo`S;5@b{rNROQM`0*QhGdFWIJYBgy`&a zrJ5Gg|9SKVNFt(1rQgdD=C|DT6?#V!2gKdkN)({C5~C6hn~p=C#JF*3ek-+enl>u_ za`0xv9jf__G1|Os>z$9a9D+RwZcDZUV|a=cwRuu+wRr=LKyZ>;6_fIojWx*Y#+!jY z1`>0bbarX(?fsq?V43AVwom&~2YS!lE|T{1T%_ZD-l(m2C)haWq!jaRP#-5@2=pwX z`dZ;c_WYZ-#PQAzExhdOOUa9j5`B3eA#L%6By}@xVmYnCCM|`!{O@nti>P3U+c^c4 zhAu@T%jm>*Ad_S$EG%MzNjKxt*7w$CWy81fvS=Urf zaZ9EclNZWfKMVz?_4V*xL_or&SR_dA)I7M|N)gLNl3Jo9Rk%7Nlz|Ji>Lcw2&jyp; zN};o8E>&)Fo&{6SYyR0a%8xp@)RdhRNki17AN}8AE3%oDP;M#MXvXQ=DB`>gGS0Hj zRHkNiYgaTE!kj}sMT@57q02o_kjgKebd&hTl*Vc`L2%Qy!)=~rJcUH?fle6oahPhZ zBXGu)3!7uU2&PyMX{e1BSLfS7H#ul1rG1w=GM^|i+Ft&`W}wjO(*s{J_(uOR9mT;1 z5C$U>#TK-&5Oq!qW}fF!tP^~4WWEZEbdQDqa_`yHo=)h-18hKs2G&0vBohY09bS5)0&4005{@Vc0Wf9^J%egl73wnLo%^Sxc#_X-8qGn z9{^tmh1VZ^42@$Bzq9(q`%Dx(KY^7`bcV7SWDfU>=?gbT^M_9rHUXg^Deeg|Rch)= z?-_fnvIMwtBPE=+EZ(UpOX#}tZ>IM30$$PfZOw&R5^y~ei1&OJz3j2mz_2hD5ByAt zv{8%n&8kdG^^Bm*wS@Ii1qt$vcQ@K- zQ7(*a(m0T2k~<4%73MWT?2{9T;*ib5vw%npV3HObr8K9ZowK?#YA% z0GfO`p-)~C>XTd9peI!z1O`vQZT8#X!gH3fmO zZzu;vLUD7O^A&-Kp#sF=bHUsqBuv1;%`+qg@t^>7Qf$DJrMsdx0^$w&M;w8HUk5fY z^Jq*R*{asVQ7(w}_3Gm`VjcQecRK_sn;1P9$$Oh63H_5-AT9MCOM9py%a(6m5{&CV zeoS99l|%|bhs%_{4G%dcRBEG&$CF%dezQ9)IT<0eAk03imd7~#6r9|Nm5#W+W>U|& z_BLMES(|1fx!f^GCZfO|LlGxAd~qHOoSai9#Ij4{pfhx77@iMR%RyXHiQ2PI_>zOF z5X@KOgX*`44pMp4OK*g~DXh-QGIMq|w+PhL3pV_@K7+u)4&xj&?%>Tvj?eh`3B$-L z+tCWM0TSD1-PgldR8Ybc>+;mRKl&Rj-lJiF{(SO@@|Fh_UyY>m=Ha+j~? ztb|)28~k>_xQB0a zlxQXl?*lAm`L{@n1L^^^PzHY+mx1fy*V+>9LHq0O<*P={8jgMNLX|kpq+iM^n&z_S z8dRdAGBzuiOg9qmaZu%$3W^gN7<3YMs6B-qE8K5kt_|54KNGBuRdI4LyiV?aKe2+b zom*)n1$w@N3K#1Wr`X7iADb|9@IPx*pS65?!li5#GJg%nGxjo_Wjt@`i*JP0;B*H! z8E1fmmEEtjIAEA}$v!w|OgbwzdD^j$7SjA;E`(3Ib zghpbfOrNJ#W$ezIY-qiYS3=k&UC>86U$Wg~UGKzJ@#GXB>Lx2(8aBA;@y!tQ28b*D zh^2V;{qiuq<{Vv)Z4B>eBaIKMCSJt1Lh|`&1jUYGLqPGi9NpdB@?zR=r++l8SG_qo z07L=%2rM~Z$lZ3zr@h^T{b zDm;W_;E)H1hB0&7m>Ci|6+XIlpqRd60R;GMnPO<-dk|cQ&9H6xe&Y(SO_~UA+V;M)pd7gQzH5iCYT_Boo zi4kV_iog>Q_(VeXL=x&>1}gSbu%@9hd>Utd6|!Z$lKXu0?A3V--MuyhH0kqLtZMH` z1Q7Yz#4~E+S=MCa7I@`^pUMPkIl~3sia$m2Qy@{w#O^^o9W-M5CuhbZ#Gvt=*AGIC z43=i_J`ts%td13Mfqi8~mQKjAFhDtrvn+Z(rKWTfzS3t*9LKX0b!26g=>j|eH=a;i zH<`C}^|yyYzoL`q`6_%D>KphcUUfA*$g+?pGSK*LhB>n_xT;*aiauO8#@?==kxq-G z(Fmj4i94O0jhZx@`gR($DDC#uxY~_lu!VJO;d$~M4+4yatW+SBRFo z%akrndb?rykcQIv-$jg*W*14;dY2hznpW9CmSf2xyjt=u44xgnXUjE&&b^GCXCv?# zyl5J@QVghqk>ib9nbT~Jw^IyJ)XhVmL0BtoxUQZXlcE|DiJq+=RY*NVsvO=VggRtz z4pJLA>6z3;@GUWUt;6hZa}0{e^eXGG{m~nYE3a;$Y}20L=|=-vftM*cAxtHVsy7ui zn)KIWPjce)s^}sx=jWh8VPb=!q!;0AVq8$SGdoPzI9|_OT=m}A))+0GayakbXefDm zjG7w!SvJ4Pk>G8SVVQPS$>9=zn#$^c*|&8%DiPQZ*3j7`)W&XD`<1x4u;TM|8tS%t zo7+<^h|iN7=b+yS&M+jqZcP^<@1rA-Xi6F=@RSQFSiuDaRFT-D@|6=!;za&YdRzE) z8?2Pg0XvedE)m6gkt6Apkh%lbgI}=cr)gUDEY{`o3vkD(KU@7>AvITR_#D4!g!Caz zJoyid2`@}jnMA)VTyQup^3T@g%wyECQV;p!kNsLG(c!Q8#Dy5l^5Of7mEH(TGo#um zkM47ac>#vsX=UvGNLfSM*HNlJ5l!~*f_4M$wB9*+l7tcMGku8vpjTbt95tBEj5R8M z*t6s&JN~Ibn_$Hw{#snrHL^m>_;mxhfzB&JQLn_NYoa^3@0DVc{;=RgIXsM4_hdMK z=l5Uhzu8@_9NhlF_5A-2*Rk)qvBGEM$VJ=9a!$y*BdBkH4D^@`I13(40?ULM>PHDm zT+wBC1ejpE0C$yM8Pq7{^k&a6Edt}2jMOPBv=d;Gedgb|UJDk?a$S8C5Js0qyG-sK zl&a9x0rqbx;YV1}UYQ=ZQ14AHF{A!!9o+qYavj>27`62GUe5eiFDt-GLV7v)g>(DY zw?=sE0|yT3%knGO?~6ZwtvkN`C{qhMk>_w^@;<{$N=TEdTlF6U|Erf%{_rbHF7j`F zy?}Io_-**Zui8;U%)j_8_`|O}&mVs8wcbM5?fK1af}xhcSO-fN@g)1lw`)F(502=& z&LbWdYd>dhxF$7GD0pg(^*8Y1akVK^gJwb+_M)=B4(RAR&x`8393(1Q7M&}UUrp9= zw(hq?zqg1_1ktW;!X69!9@TKtsn`Wze_e6tWN?RW7lsr5x#!>62l4$j_5Hik|LZ1L z8fwW1Ek`4D*gXXvICy$F3-BoAFLZeo5;ozrd!xJ_8l_ou=-M5AuaWq1p=adJP^ZSk z_;EcU02%6{UI7Y97;&^U@2ac$Ie2|4rMU-Ki#7BBG39sI>b>&ygqhqf$CqW_jEwCK z5Wb1EKLk0i^4{nL@Z=eBj#as-d9IRtd;WN|+qGDBVv$#ZC-&C7qXuS{g1%hB1VICF zE{E9_rKxcQ#`s$AsYH%FF=d1W5>+?ayI&D*eU~jN^PJe%kGS+_#z}on#NP;!YGz0l zgx?Vy5F_V1D|a(XGk3E;2@jG+A^B*rWN+Z_G2w2wcbb&}(i*0;r_Aud^ysZ*VCiT^ z3YgbL{87(-avH}%!&NM{x6N)R#5w|g){CeEmci9`?{!?mj1jCJt9I9N`1_OUlY3Zd zdMw5;Ki(=|oPnfwI75QbV&LAF`((N`;ZP3~kQM6hPfo{SOi31V|B^YR4pv|@_tbfl zyi??CDw2z*<3~CxOl)n5#fc!%LKP zjVO-Nk~@)Cp**Opp594G$-;!->%_7tg<(O7fk9|QxO8VL(G3F1H6T zP*9=}BliDT6Wq$}Ih&bAXr*SayDgD@SB5fn_w3zqyB`2}va zDpBX!ZfCf{3=r7-HMfo$=1&`Rm13tgC1A@)DXlSK1L9m(nuTl`m~ZU0s3jd+-&mBk z;Krt&5sQbudR@hQ^0|nnyLLlLRw4X8)_~7BPC}NJI6s||o-t02agqLr7aKvsxh24a zu);3?yt^@}G35-Zvofyxg11a>Oy@mgH(sMb5wd8MI*2DF>PMln9r>piUGqghR&3qY z%Rna1&u7Jp&+aw5211^Bhe8CriMHL8-`OH+j+qbg zj_KvZ*{2E)G6PP|zXRMR9)~7&SuY16RfXbyG3uu_%DAPZ(ytpvDj6MzMy-szaOaAi zxW9-t?TjEKjNW(lBPi%}va6%tx&>SX!CATh&_I@_@Iz1$SB7mE^doNjxI2LhT#8Q# zR(N&<7_nF+3j}=HT)|><4mK^HoaETK3-)V^DeQIL)4zWre9QG~@WO`wf%gHiE6%8T zehREwa1nM5C04t(x8C!4SgPJ>`qXh@NAeo|4)k<&UnuadGc6UjoMszo10iv;qO2`U zEd_)xQPE1$2s5UJy`*Sw9OXm(Bx$M~FTw!L*{*!o>Nj%|bVJJ_e*3B|Fz3DU-Hw;~ zsG{dpHhp_;O~sl$T1OD;m5h1rJto3;-Ba@xvZ(4#-L|2H z$fcn*b_XIF!vA;C_>=6VUCYR6i4)HsA{tow(@;$GE}NmD(Ar`}O78@q{g;YI&=J1V zxg>ui;Jxaac+Ch~>!C?;ZTYYy*131PBHX(0w(L@3l-cZ#D6bQRHe=8sar-k@b9(qB zj7C4`btfg$=mF7B8+dklr?nVhi((nA*lt#aOMq>E{N=9@x9Scuyf884< zZFP;h(z)bwR&8EwzLXeRK^yEa{YMT;Vx}%96r%bHN|6@D@1Z9otdv$~oCZ4;74;+` z@0i{xJ`uk+%n7o7{iZI8&Br8=$)QEb+&7g5)T{8CBFP}bMD3oX_8V;K5Q~0{WVW&3 z%FHxH&W?WZUbC7UV<6EYg8Q4buusC>zUy`b1GmWXg2QkvK4a=%B@Ae_6O&f2Y`+wa zayftbRFWyN83X>3Q8&1lAsU6L(_<;apY`GOh%HlgmZ;?xQCMVe5Oi$Njzq;By*t}- zD2T*WYOp76<6$3}5j+P=t|*{y{F-ZdyS>;0$Eh?wqr7v^t}T_KISe`x7pba|LW;pz zYJag@^t3k22teN4xE{>B(YMAC__8=Er_#zGB#7*mye}+)+oS`&sdw#Sfcro86x)&2%xB2e7 z2da&E+BU0J6S6TqJU*FGvV(tphdXjaxyNi%q*4IPf5gJc93mFDzD2I%5U~*azl#OW zzct3vzcdC_T`{zVQ~*OLHgh>nob~zWzWln2K;3*iGFS=Ib?*K0wSRpsxNtD zVfU((_>6?xyL@uk9hBPXpH_EpX}&5_NLT>5$S7S#)OC~(o*a}<`fuz^MWjeoERtNl z6*6?be2f-yWHFVhS2ottP!NE>31Tv~VJ-kQ2*_Mzl-_k~@$ikgDM@otD{{sb=pnnW zYUha||6Di~q2b3=GQ{e^bHQ^=i%cjFpp?~b9mX1ro}(i0pda75<89d=uL@oiNjDiQ z*gBTQ@%&UY+6rJuGb5CjAQ&`OMKB3O3)Ji4vv(?dM)ofx#}>9)e7~1tRbd!Lh*$;U zu4$L3&BJFlF0_xbmzd9i!)0?{fgO#*tP~OZ@icmAL3?-_lOvH7g)f>ZLa)?*M8$W#$TQfgx{zl3xFE z;Su)a!uV8V898oNB2KgqyFp`|LkNZrdPJ2IF7XRwsJ}q+fBkzy25_z%!fzb`&0Ftm zJ_G%y$oHoRhk~d>0yZ-yg;e<)x52>BtAT~g=wVLxz~*MOFdJjw2s2X&p+YC6RwP3U z@1+q{hpPed?voMv4(67;lcq{u_OSwXN)-l1aU%7V803T-ixI^=<=YBhL{I+p4Bb* zKuP{0Bb|(=LXKW-S_>l{jv7}LV#cEl88)TQ>{kUn-)sb2UzG~XRpxdGPdY_P;y5+`9CBp6JTWp>Q;r0ExZ$U!8B=xj$U(EFWCnJA)dY zZqK~0oPU`<-A%6^-n})dx;dJ?$-HvuXcbq~H$i{T@LsY}f?r}>^KNn3#^Vuo$j39% z4SlW-U$m*UeM(wH2`Moi)YIs+TO)|hbzROy-D5DX`70~&N&+TH&R5iIwB zg}B8KlYC*_mMG6jWdbN$1Y`n2hG5HJTYgqLtm{!;K-y?0m`3=_ldwYYxjyIxF=^SH z(4nRdc6jP*Ko7)O+aM!_N_ke308MLbJbH#{LY*%oB#UB;O0Cq9YBf$v@BaM6nAT?Q)UT+%z$Sd`*v*V20Ips)!` zLuuFYgg{cr9H2Sy$vTx(jjNb$@?%(CtRR)Sg3Uv$wnL{5{bg zlr3!Zm77OfuK=#gKUW#=_wZXqWO}AODAUtn^}-)`c;p)6j1tq4?uObUBSmrLfQGI= z(~CmR%c9_HG3!m2j*nAVv_n*1*mWKk#U&DSm0UQXAC+4I84m4d0eZertZFYi*+pW3$ z5FuUg8dpS&X|S!=`Ds~y%{on*uje7LzcQt+nMTja%4N@#T|uGvJ*A-ZoAEuZm z%q=h7zCbgyL8^-Vt2Ryfspgbz&%-b%fO)a%p4jGHjb5pGYhMGu6J^VDvS@n&s>KOn z$kFDaU_>i>O?F7>5%tO*@`qM6FO1%zXdlQBWWUXZ9lKXs2iVLZB5v&$gwjMDVrRrV z-y}w(l>k27Mc`D#^B3>6uGs>EfhN)ye7r|))8AF!+>r*msr4II4`K1Cc2I+Ixr<)f zW9xC>iiZh89QgnRB2K$tP1W?{q(E4{5!BFGPB z1jtChhlN}|#;@@p9?wZg-bI0H|7b(;d-FY{_D?t)0B`9vJjf_GS}ueTSp&uvT%I6p zR$QI{`t%Rh1S9Idce!C%7lHOD#}!@%)bG9ETy*ZeF#dyBJ?BAX4`h!)AA&vjVup5U z5a+WE;~@+G?O~Wj^Adhd(B}gxwE?7HaKO1Me+}~M5sPCnu>bHL3sHX0@}w`N){Rqh zy9Rja%VP7YGX}P?LFz|rNGs%8#G$jmM#SR$G2r7Lu9>L}Kk7ORQoq-w8T{~O+lxXV zWAs&`Lm-3WC_e-FBh`jLa%WiYl-e5-6gBBY zyHeWLa-rqJO}>@(b;7iyvjfm^^PB3?r9E=?bfJC{H_G(I;~LJbq;9zbTc&vKtt+(| zED8Z*yF!z`K} z4LO3!N>FKBnP&?2yNVrvm)oUxFsEv)owxTgSaFHpUIm6}6T*e{cFqQk&`|iTyEfvh zN`HY0u>B6ZgT!ue$_MNORV}Y#cRp*alnlse*r#xGQ^%n0D6YJ#bbK;9gfX#v$T(TB zGg0c=9+|h^kjzB`-o_ABQ$I^c~jt+ylU4Dn8#b|B^Vbk$TtqN4dUo6 z+?Q?e=lLQ?S32e+_`@KA2SD>CcJMs+1!4sJ+ba>J1Va5+Ybt0s=eoku`h6P)XwVqM z=#Le8BHu2(?PoWLk5@sP0(B)rM;k1$!s@q-ut~8P*60)u{z9dO0XR;75~+6;TPvf# zgWIz&KuuOgQjkEUvF~7?)NGKiR@;P~PZD?~9dx}u)h;4F`oXFf=&PscYuj}lW3~4c4srY8d_6|y*9X;4inJSJ%b_RE&(pKBL^%j zFC1WbeM`+JM7$ZF4{>Q*RcV}NfAuYQBW(w~ZimNOfIFDNx3uj|`$f^A8FRGxL>vA5 z?aQcNxtybJL3oIgIp^Fwka=Qyxy>#JOleO@7>@Nq`F0-vlY_#7EI&0lv{js{nB3hT zg95UutF%c2#C)1jZ*NZt4btrSy?a*&=7Km;tAOir0}+gVe}dzY*;kl_`T z8y6ll^pSmHsTDb*m*iq*_w%i`IT%~#5W{VUF zrrQuE;RNi=5Ek<$dW!SZ0l=-vG{bKa#j%$#GnAI|t)+N%V|Vp1VkwP9`eOzxKS^5{ zo@YPm(oqDrzQ>=T@*}_NPheT|T)#$Bn#DZI+4R@FHA~*>P|xAlHkR&AQZ-w8m{Id; z|2~OE{h4G1Vbpru-n6+l{Ot3=#gVLU`gzcpuL8Mv$}{S}2Rk$j8^lEVzqe!q%R8z= z6M{1w$w~jSZ5Q&tEaebm`Tu~}yzy~`z;RF6kI{Z)2DF6Z@0k=3|0shK#^G^^r#~(`f5K4coyZk4}Li7(v!Wkd% z*CM=sf&$cjgP{4qlFsZf#YSKkXF7&|21NhG1fA7?g-3%o9Pt4EjAZ)@n{5w1cBTjX z-KF?fH53%5=l?huNdSNM!~Hi%!Rt2&nj3uVLJau3oZX1pLz+ z|JNVGL11Jz37Gs~2#yxK;EK=wPy6Iw*s{<+jr_MEpZh;8b%+0<;N*tS{!dfRUlax- z{}%=g7L{dlQy!h?ufxBFinluXcxX!1xrqDC%KyHRN zE3=wgea(mj*Q-$OcPY)(Vvj~w#oWb7HBi2c2-2<_A|`Q#sPyJ(mA(E%R)mIZMY&LG z->rykp8ltRY3QM85lks~5@4}m4&@kwPBB}qn9v2EC;tHE0o#i2B@$tc<6CFN-c4qF z&|O(ei}s?jDy)%n6C`Ht^Flg6f+E+_I*MYBJyReV9nC)%Gmcw*&?JaSm$;rxOzL7! z=J1eX#zw7B#LRGRMdE_&P8|C=b*O%2@GbsKV!^-xCJZ1%*?Q7A3jhdd(FC*8u2SUW4nm-W9z_@=sS*%_q5al zPAlX?OUUDZ%}>*rO%X%C^K`pjv_R*E#`3zK0wx2kb>1xmI~)A>cZP=TGQ{y37QH*l z8&9f)ykTI0Vf1`aimwmLY=awH2x+`)TptQ!A7J$;U(CT_rtXA@TfspXLt5wpnDFE*d zt$W+115Xj4AdPtGK}si6O{Q8?;ecVd&LXzcT^$vX9u=NdM|!Fe6T#9lsZ|%e%_`{f zab6*+iJmR@2_>AHhEj{ldm{fN5m0Z4`JNXezp~4Jt;9HKmC<;;!(tVwC6<|8M$9TJr5SR)+228=u^}D#ZX3C zR8$r{^GeGP5y|JRY@Z_8mlr_GLABPB|M6W$L4s}y>k*b-Qn)U@Sp-9j^mlEmj;-m_ z;`L}*sVdqmf)-|b-YDQpshZc*mHA+_BxaZG}o zZdkL~VaMXE6EeQ=01$6*^FYF)4AwFZ2|?i)S@;K^Iv3%e>xhoM!KQYsom?R?g)bLr zwioF?HLyCM#G0>VSv{;y2{bdWY)+DalB%^G9!iKjI%Rzg;h*N#2?M}xO4&M7wk*Q@b0gwZ0N2# zi~2Pl?J0Wwis=}<_yx6aea0}vibCj(${~^XY_CoDmkIdJ2F_qdCe&uj zQ|9}%SR4yTxLfo-rkJ0xXj}|ELl#myakzOHJAkx&eUe{8O#G*38tS}2*z|m1X2`Eh zLbaCxuP%G!skUbh z{XJ5CeHZU~Q(Ao+clIk35akOxjEH6+kpAY)3ZuH?xrk|-RLlIh(GIOC!lc0lI znbt{Rg}JTr>#m3UVmm`NPN#$-N?Om3MvTKPbe|(9s!IDQPbY-Nw{|4QtArs;^){5O z%h_6kpGpC=hcV|sVG|s%s|tA+kdMO5YDQ5FT~lv1T`Qg5^;dV@3ooZj9w6DWYhzVN zu34B(y0-#*Fo(k|e7NDDXm{PC_Y&~32MIiq;tVaOk@jYwoLFqA446DkPt4>yLy^Q%of>vn>W9))V=RgsM^vE^ngU&tII02EIa+2_qEXPDapQ_K3tu?$d3yW z$({!0vL?jXk%SxXr>`T>rtCA`Z%Bsiu@*ka!r+?`Wbu`flH;1}UkMCJ2& z!+(E=xJRRu-RYMGLgyo}H=%>0lQyxdjp)zPT=b?QTjT;^L#kKHkcIn zcI=~m@Sn1I5@w8IGPvYXMGl=_uuvF3eSD&wkAFo40392=rhWzS}`s+t<9QRW-df*8{EXFDXZVKP_($sWxV+m#?SyT%9=EJMdxsLaQ2+ zZhE~q07UHETP%_1yESIOgk0HK=O)eJ`e`r|6d_ON*)bQ?<_Z(a-DLmM0CJF1N{v zjhI1(Oj)e`;VcHk&B}wl>oc&JEi@(CpL&VG{O1H^Q3`9@on1uZCqs6lcQNAUU`9XX@!vj3-sJCWms6(- zuCR|V)^hCBBc408DzlrM%h9&P+JzL~!o;~o6c>~4%pSTkf33b@%%DpVUR0GiK1^BO z;Q&t5Zpzlw@GmLXDoxN?ovhkEbZm~>JoL_w@5*Lh?MfEL>oRU?hkxVm0Td9;lzk!+ zZtiBSnxAs#khr2gW{a{H+nzt>tzHxWCkxNXl?yhFm00su@A44|I$ z=#%e`vnGpm*40T=xLn?&j@Q&ipR*j1j1|$!DfFKTwrB63Q=6rup%~O`5?8}d!K$^` z=l5%Mt`;!-*Qa-aPMkXEBZ{k^>ehZP1i8+exF@ua(BQc`OKbKU5sxX)iG%kWgWY!6-`+hg2Qw z$fntslf!V5FA8s+R2o>LFEu(3PysNxvX!OWmu38~FHI^GXf@Y%tA08N1rrRBx-FVO z)*rtw3M2P0IVzr1?DIn8CdnhqroY?D+{HLsQpjkKl)qwH(_k6LmzEyNm?i`)Zl0_Q zck1!+R_U=)8_yI(KFps#@Hd^ze@X!Wm7Dd5Cy%FX1#~<>r+T=s?$ebgVz*!Ki`iIAQAfb_3-=j~Dws4CvR4djoLG7-7!|B!O-2GW*yTN4Obi zzb_`nCV*)GTpfTCGE|t@C1o#4P{0#mdoCRCl zjtGr>$rPQaF`H}GBz&t{!e39V9HPn}CUW@R(nUppvEUvmNbssPBi}KvQj;d6yy-c$?ESOPITFDi)0ZeZ-m$yS>8M1gJdBH;PEhpH}Z!S&BNA1dTnb+ zo`mCKgVwK+n~Ibo+)+C%-o1sMov`T06oRi27oaYCfsmpnAQpgqr*bsyBUMT`qoT&= zS>}TPMv@BY!skLAhqvDfcBqqHx*#@dx=02C8kS;~V9G3MZqL$$SdLX718KOU1;s3y zrwWGUzxg>w+K-yb^MV{+_wvzwHms3W`)kpFkW#lql9LI{OmH)g8oACm@DF?#eYWgi zX@4jXN0dj(ibNFdwa`MkxBz-v=Y9@cWMtm+K|f=vIEOh*$vv%J_0+Fb1hp762&WQ& zi|^D44*(Q(1gV5h-8%+&a|?g472q0d4~@NtDJZ``os7`TmjuP^*E;Mp z_@-Ra(>4t@D0niAjvd0(pqWlwizpbXhQ8P{mgyPpYW-QzIVARdt_}*xARt*lA*z=t z6c&aH^4lEVdJY(FUZDl7_0syXAk!=;ykHcGd`fHOBo`LH%n|lQpXfSYPA}lbfiOuR z-JdZO%OP_F%ZkKyS4O$O4IFYzg2-IWOa_WYWTHSGwNs;w;WS123I3ujm6}(uWnX9o zigP5gIT^$M@lh;Y=v%0`pZ9hUtFIJjQMZ07bu8*IXm6lq)6kfS%=>w1-{JJSsfbUXQAH~nl$MXHVj%3L0&Xqg3%i73wyZ%( z!MzQkYSCO{nt~5HgEkgDoaFUV}_$QYQ%PCo}XDP=j1_wE<2;l|}%?qjt6 zsm%qr@*@NjPY%XyO0EK$11S`IkS2ja$@%`5aJ11TW+qJnet;u|aGYV1ctc@aA6?98 zDN>@8Zu9+&lL$HN5K-{KZ@`~6g?gtxn2L}md;?xEIeQz5+EX?(;O14IB{P!n*u`hV z87Z5e4f`r@20j=rSt)fxTyZsNKLp{lN zci&s|dLE&bknEFoG#E%FUbcVj7!)^nk!9= ziCah;JS-KTh<9M@^C_WXFGUlPgmYxO7bB;~_gnNFFsw-Ac5`wWW55gG^?6f?YsmqU z`EGc*m2rJMh_P8l1mgE|Se&oz^w0t-VV|3F1jfxky=1xd&zFTh6~r2XK65P=vL-SP zfF*h&X~Uf;LdF)z5jG`9T``}h*W2|}Tce#S^`XB7&I$LBP}C+u3N^lw668|IAE(%_ z=(r)Rmo|#%)Ifi3(#!+VBIn3@Y{llqCq^HT<2R5zSadf*=g5O2%UrYF;HicYWGANX zEO^4ziP)w1f#GJpaTlEnOa+URls-*(Ky0)F=I4SF&Q7y)>~Jp4l@Y}1@8{41#j?V* zU6P?iU_t?}=)d&agb%afl!iwE7LO;U?wcy-O0CU;V99V1+IKT<8ONm_=G>v1F`n6j zh7l88H?)%F71uI2$b+vn9eTBNyTHm|Oo}8AvRV^XrDVEieP^e*Wd*Au2-R8uOr?WS z_vx&{ha;Rr=AqP6kI$MJj}(KZhb?38Svq|sWCsou?6uBemP;3>rrHnuzzPY`Vqm{p_^cbWa_QBz z7Le#2MIdV+gy0(UBW+%<%$lQxmM8h7BaN`*o@>}ox3+*SN9 zZKSsU@qBo{EgyR`@%OHVk(c^AC3IaP_9p(amn-^Co6^_FJ&BL5mM-9%d<>!-bGhmv z7zl_S{pJh21;N*Ve#SxLZ$76h79#)0m5;!#ck zrTwkt``zfN!RbOHNET796S+;t1>9hUg`g8w8UfcUUk2MOthSjJz_r^RFVt{wzLTkAVPBy~RK5g}^3Mp`!mt;Xn5`dGNV4>J< z%Vqqem3v{F;UE}|nf+z+mW1WrE};Q^fn)X&dE}Jmrt*zCo%vf;f%b>1;dZ3v(&!(s zO~sJ|fLJ(TRmCoTk`HQ<-guE~Iv(6ZO}{ezgmfENX{$whuaze`^nCURk(#X;+ksOqKg z*uy~t@HjYz2L(SR@=5~6KX&<$NmSicu!JWOfE6qFTUTi-i$}R9!u?Bx-J(0H^q{Xe z`0+|}#pE1*D56JozFp z6mmx&JUdLNrb+9%4%N{nw(Dp2@QTAj?5j0Vh>z&yY+=;btz>Twt*E%H?g2u@vtScds3TkI-Q(9p?WAM%(?c9vqz|m;O znJ9AQ*8i{*4vY*_#tuMi!-4s^qDqW`UGX7DDHVGXS)5$#cXNYA5uSR^=RwKRTyY$M zA-Yj!SYuM*fA1eU=T*4pFZofU*Zge>a~j!7!$u_NVeEu-NB;3s?et$_a~-vWjQpJ} zoz&zQt=h1pEN!hS^AwA^64h^+N#9!OYq;*bA}LtOCvx(qRz^KBDN zeXyOw)#bzF11wW4^MazJCD4DgGeg0y(0lM8AS8;baL?|Y~1ITQSlz%wQNHj&L{XCB$6BNcFFV_zNFk=o;@NOXzI`DSNF z=ew83`_rr2$4)iC)`tgg?a%(%#leN$yIpp0-*|KOJHuP$$7)xVr_a;vU03m9bMoTF zdv;9o*`KTTAcE(pEDw%`5Zn*IX9;}(@Askm_w&V_w{_|3C*J4Vt=ZhkgTozlV+W60 z_iMf}uAIBT1(Zraj>|8Ni&uGoU(Phq)5ow$!{5fz7On|sT>e;R%F_%D*hqp3ynxH zIKNY6tgjS@F1_SrOjEylp%or8+mT>l75x2^;TEN-i~=#j9qt+4uEni`izTXkIOMl4qGE3hfsl%o6b_*OD9(YXB8me!v46!5b7w&P ztT^z2DK-?vDlWzN0z+a878Rxj@kgqk(w}J0Txb+2jaVS47$96RBG|RK=vU_&k+Pf- zN*GjH$b6)4NnX90aSIimO`M<{Ceg(<%WFn+;oxz8+z!vE8A;LzkslqTs7vcQ39AT| zV*#14a)Q?0-g?08F-|jPe?Pi*#Ej6%=vVZoy`SaN$hpvPR)WiVN|7t?+v_k18}8_-Le@?BVs7VPKl4AU8R~O3m8I!O#N;#gtBngLt za$-LunXHcftPB4bF*j_H`3?`xvTDbH+iuIZGIuz;YclplF*j-H&yFb5Pjs+-5n4?r zLB^PH5gWk$q-6ecgDN+@aVnC{;@@Hau;SE78x(Y9j7g zE2Bn66rtFr_e&1&o$Z;0^K0Z%5x+Oj3hqvV3$nJ45`l~r^>zI+J-={ZUT4jodAMRF zL*6Ui4Wd!Gyp~yziPD_8Q|2>#42DDK_6*U`_q!u3*#~RQy@q*8)6pD-IrBDc@6U-V z2Z(EX7hyM^UY-YrsS_L7$~Z)#hVM&$O=wyyoxIdQ33+&Viv56MpP`nj?7Tep!#_{a z?OQnpE*PFpRTa9HPd15p#{;J|lp?Kuirg)mMv2mq;=-j-nsTh@74!8Oi~dBnJ7m|J z7Qf@Hq4-(|X=;3i##rAJxJh@K71qSY1xU!CbF`k#<8=x$sXd2_m^_TAoThY1vGw%Sd1#y_DtG;6M^7ZqaV!62`f4>dsv4nw#l}8Vt>?zxsruwP`nT! z)}>&`u9T+DQHVz&Bh*%w2kZhAWTTyxHwpJTNFnN$K=(CYXp=^kKs9S(oE-Xe*ip``5-c22uJ|?X zzWt{g6&=Vs^aN5!i<7o{o!;=+BbC@M{i)N0dgV!1Tc+HW*=@*YBH}e7O(BDyIGX+M zwJ71N^3a@Dzmu}+n>o#3{8D+Ny10m!eo(N+-t-X@?~2sw4g;*)nOWE+Sw=F|v@413k2; zl7f&^mf^$$TUx$Rpbyv!l>iF}+}d72h#Bi+JeYD-w-K{nzt>3OCt3(eyk~7N+k-v{ zem>*xKs${)fT^gEA`(U5lIzQ#hIR$IlSIE%Z_-4=XhjB_yOj2{#Bm7tM9WXiO4Cqg-S6$3accb zY1p_U3e?Tit+c`&BsgDUv|)|yJE2*NER=*4o^Y6&o`mG9VH&rwTqb%e9DJnL@p5hJ z&Qq_0dQ%SI_SL-Je2TVCho-OBmDI$k$|HrT^{L9;I}L0HEH7kwXg|p=;w8VX0a4#f z%;3i!LH5<~jwT*i(d1hCn$h=A!s~P#i0cy>(grBY&AqfnU00Oq18xei!E`8-{pI(3qN##R96< zyi04COi7jKyza0y`4_X1ZWca3rK68@OsayH`O^w-aF&^<9P%yQnACVy({0L@*K+>p zVx{poMST!MTBE-9=8Oo%+}gQM+fPih zvmiT55;U3@c>TXbI(>&{KDhe$i}u2>F9&dfdl)@JW&ubRX0G&8L4X5#Suzg&w9wa0 ze$3Bq_-RHnOguJ-O}R_Qz)YNsNOsyYYd`t70?V+n)o#{j{LOg255r2C(dsjfT8@4F zhn8!0sBlf2_u0!%gYyEZq2hV3X6KTp4&kqs^t=6an5t)RY22mr(;24T&F|udAD;#j z{{?qtqJUp@(-GS;zRc~Sf`D+k{TD{X0c6Z-#-uYMgKRxWy&)xS_UQZx6;i`(QhcZ^ z^Wfn&gH~fKYI3S5dw)PQ0qRmSOiTHW-;Ki)U?Ne@$AM$hy6DeLAyN719ctaL#=pPT z-aATF?0AoJuM11+ch7|E9mq%If9GBW2b%$|I6U-L5M(H~hrK&a)Dr@>-`Dx95@Ahb?`tHndQl(+Mph%4?_h0hA3m zu}i~pzmj1JMZ%tLnihlYxGG-IDRg3iJ@mc2oq?%Un<7d|*fuUZUJ1*b2dmd`YK-5i zW^CSEnGAd3@pfB$?Nn}S6MC2D0pS}pj{zDU1A~*M`<|M~w|4C`bwtCC3{+~l$>>K| z7q!FS48uU4(lVO#y>ftaYYBPPr3MDmRKhJ&hWZI>zIAH~*I{yCzBfCiv9N8!MihG% zlY(z>5MuulPT-EcFYK?X0*oT#4;dOc6chEUx7WPtV$63l2?T^q*Yh?$fDc5g4gNbr zShj(svEA57GGz@|I;!vs8m)5(BtcmG&SVhpfc&gmLgNo<98;gHV$BqD7OC$B>srxN zle4?EsncIUzF$Mz zZkq$$=hNs$IKedy4u(9Gk)|O|?D%InJ?&Y!B;Mus+Bh37#`3odPixA4g+mwCh&fd5 zgX_%ln6H54g~Xe{R-(uvTl+t4h((;V(7HwE**6vJ&iKSA^a=x1a_jg2%=6@t>aNJ3 zHPuJMpju~vFBqk5@s7OLJaN_bQT-6w#;|r$gFna&N0PHNoFZX!Q;vVbc>}iQLlqV! zu;-t&1Ls2dZ5KK+=6{fl_^XJVH_$DX-b^o&o8r;a`2Ip5N#I^J=oU5~)h73NNX<&y z&45w+4HtV-TSvy}gUEgbKtLakOLa}OKF+6mIw{FfiZLNb6m7Gfe(q?>R;+>3?g%FU*WIA3^r8L}%Q_evsAXqQsmm@^S zZAk*X@QcBrDIU@e5e(_$tm?Vty4GEBKgccSQ}HhLj{NEdfCE$~ZKupbLF_}7;L$~! z%URjH@fB7QTs-wy_>9y<`o_Y`0&{B>yr{q!_Rke7Fks=ts+g;6!AMPDrLu@?fBm_P zL|V%@{ZAH4m{aMbu6}W|O=9gdGs7iu!$tGta#AruV>?;~d&6E6J*1;ettpU8bt{&U zqRc@kUMG_tz^M^A9#=y@*>~}=36di<5%hfE)(r+?X-;{_=UXh4qt(n+1&|N`gV>(; z=j?TAF2zbQ_Za>G84^H-u&EMsf$pJ2VWN1zaXBI#$L*}6UHOZ&vw2W&+F7q@-YP-R zTq~+J525ymv&gCZCn!SiaWGVe{iZA)n{MHGX1Jm$pw+aN7z5A$<=%eGKBl#7GYOnX z18=`&vmPYeX!UNJ?-pF6&!D)ic0R#S&X-JC-Tx~s$h?)x;;$#m>scPZdzB$&HD+Gex|q>S6sr#9TxvB$UTSXZ8}_z2o(Uv8~AxH(Up#{mNzPl(T| zFZxE|t-o8%Dg8-%<^hZ_>>Mr^fv+Ef(;kTF9=SEF=6jpC#FDLTqCy+~gm*t`w*&bm zG{xXsf2sBc-MO-vQ-iTF9;qmO5VIBjLOymk1n3jz)OM4vs5o{4Qh%-A*tHO)<9Ij- zyU1sy%#kQHdv>Y@XfrCnf<9OJN+Wr3CMSLyixb)QA zewv}eQG|JU(pVd!XTqU`KwX>0z^DO#YlZcD<6*{N^2 z37Xrn6c;Q&6l;>enCItQPm*b!qn$>(TgAL}MuTP3b~wS;%{QPoRzC)Q#&TL4v0dzS zO2eG5RzAcB;N#!qG;@BWwRr_O7eUU=lG) z1|W*-;87NtHjlIZid53^7DvTF6~^~fmnAVcp*-w8g8nOpD0x6A?43j*87)mjdJL<{ z=Y8M4A51A7zdLI}sYjP!mRe8;6TvZ8k^(Ia zzMCJJ0ij3n3FA{H{9VAf`gG$l=(8_lg>nz)oD{~Wl4WWu3*RhTBe9D~ae9hstN~E$ zoMRrC3OIBW+b$V|ITn~g*Tj-RiB1%?P}z0D(XA4BRASdg@TLT^sSP~0?T{Q#DTDeY z-Az!2R7818m>kiSRPsutKX@DI(x(XrggcCdDr*B5e#noakXz~Qeih6@BP+bG<5LDr zi7WKm8wmYEv^0hJ059~%6Gs6MhYQ&1s0KHZqG@W_U=O9zbERwaZ%54{Hks-oj$5!+ z-sOSz$wiINSY>Nd=0*0mkqmUVkCh^q=!fQ?34}5mQ%3>*5TLck?VFQ@(JxeZ)L0VV zwEUvk1*YvAZZvWa22l&<#^B-Aob~$(Qwbfdv)C;+#n&9RG7|h^LQ(BI01`lyTBsk4 z5~s$O#yhM+1-AYzI1GVu=+vUtR|A1%Y2$W%*2_8|A`gri`rRjYQ;EXbBBP&)RmW1n z8d~?;sA+hJZHgF85Mvi<&tf8oLgVF29CVRHV7+;!(&;h&MLvjZSa4gQdC68D7%LJ# znPSSF5N;=tBF3A5WWfU3w;q5+3OF6jArwsZrKw{s?(u+Uw_^024)iO`AMh~E+fPdj z%Q+|qTLYe{1cTi;2Z9!!Je+9`zoI71dI7zJo{|V8KXt59w4b{01ROu>I1GNxQ80KB z8rg5xnJB$@QT`nOYFA7O+}Rku!0RofLwUNZJ| zFniR(F65nw5#IXCDf=gq&dsh^U69Z=7Nijo_x9zvUg8j-=}v3`uxPPI5x2ilKXatZ05NtD zO`D@+rme3TDX&jRd15%>PY#C%B4P&^TaOvM$C^klgomV-E+g)&eKx3eoj0j@8+#jOX~LGh2LG9EAj^ejCZV2 z@<=xt0~>R{vnO?RanTcm2}CjPXfV*TSTY2K{m@Lmd#_+9%0c}XWoGLoFeVbrrR8EW| zMC?LDb+?Byj|3yhw=kaW=3dhD3{NX_M}~qM&y(|a+gcoq>jwdqd?*P&k-5VWzE|o| zHYdj7A93w{^?)FkS7S7vic8(fO8EysQ5XpnsO%$?@y9=q1BTm%S-R&FY6t-hU5Nlp zf914qj;7X5fiXSdy^QFh58&^@Zl1t}Ab4SAc9Is=18}3~70A`(*a!4C&w4!5?!7(x z$-VFCFFd8VW{7cgbHBiXc8#$tYA!uj=(_}KIgZ!RVd!~c4QIb~rh&`JNrer?YGAq9j#$TeQj?Ru*RaPcZ>zMn>W z6Y|*RcTGrKPXqV65(EyN7v95(xdL4@7rUqs9G1yaV|nozuAJO&w8gwJL zd`k>Z{yIr*Yo9I_ z;Ey-E^1Fxqr;Dt)F!8`RcYe?y;Iw-eAQl9nrs(r}a{coX9qC`_(Zns)?3axCwB1>Eqa zr3bL>LKw|#G&=K@Q-e~r&)F`{5Xz@pNlN}+leBFQNJ5M~ly$u?tc`62?53i9u`exr z$>AiMS{i|!3(n27*Y1- zt3VQ)7Ksxebj)o;l&d8FY4?0%8c)gFF1rr*wI6Kry&y5^ zBWcX~1hyTGTi1aD$}??DaSog!On+b*_i=IfJL{_qhmgaksuaD?Nh86@Qy;+Ot(V@9 zaX5Bg5=JGTOSGTOX`5FIWAl`}5a$Q($W^?a?#i@6gS6)I__{D7gt{z9AR9f{ym)cm zk(o|Y*t9Coi-=(Dg3=I>U`(V>gs@ymj&{tx&lp^Ax{fOvDPHuda>z+gM?)4o=HD&WvyfIWd2piK4Pacy8=gKEHTAO%>>KJXAo z24*+`L`*OcIHp!Go)KNb>6=x{IyMxLQQxqC3d2y{6v*e&u+${y;w%%UZ~kK|d{H$A zmXOEA_*e`^qm-3Dd-wCO4}19eALjsIjX!f;=sYMOo0E4z@w46EC?Fu8ik!b%9AiUk zV_^$JOWVII$vS1-YE=L^=wA*4V$*hjP_(6~waoMxK41H}C4;`Sn5_s_~`G7%`$;P#>HoHo0vX29BedEk`pZv6Wqs+vc z9rEt5g3!0;MpbrlQpGWWG;C+rwCoN9*j!t8jU04^q>?dIyk2T$>0hgLR_o2pm!JDg zw+i86=+b#b2Dvw~_Xiyy>0^Vq;rB%As7N$U&zu3WSq4txq|pA05YP(ggs_h@#+H&#HsD$un!KH+BZW|1{zV2BnN zh5B82$-W2O-{FfB3?#Y?4Ynp@n>|sWa6Yqbi~Xa@;2RoZt^E{+Hz4m2O|+O6NBpWC zx?x(9l`Usk^|NWP33nM6)*5gh$=hJx;}(|LPCGln|V4b(m)sT}uar2c@PxH-eef-&{s* zdrw(sow}6!v_qY?_6|w9Z@R>M3nYuwd7$)7nIvgTd7ISRwb~a*az?K3`eE8N8>;OT zwtF52S7}Qdcjvoo4k{I{jGJvO>nFMsKf2+6R6dA((BEQDH*YAuHFrmn)^CKo1p~x^ z6%SU`ezd1P=WGUIw=mO`BH75^LjV60jDbOd^zcrGTX4BVUqX<-34y>67g~&k3~XLe zBDK{|9)IZW9b7+4OI$tEj-Ba$Zn2L~Mf<<~P=N+;ko6jw#GqfOb&B5Wy|XAm0s7K? zjOYWK5M9C}p2@6e>_V%4zq9b5)zCME9Gg|s`N^hjHa8m9tz7sXQ!<>yqu7ks`49qMw4Kmou} z+fU~v(FcfYB(Mjj#C~Yl03!tsggOgFaL4$&1RPBfG9{uR|MlvNVJj2rk+L@TtL?FX zH4CmFgl=(t|?)pVh+U&oh7oD^pJJfXi4@= zXc&rZ@yuzkxS?=z+!G0WtOBROnSfhUON0&2db_E(cDY&YFeo8)1Bc~ke6`ADxWhiO!Od3uXRok3FHnA*amX|l;Jq#qS|^IApta|w+pyxJc;4|K@QLmGrCY#Gy*W-+wEI+j*aZTd z$j5~JkJB~qJYNwM3@A`QfcxJLyDyzn!+hzSvrX9k6m~uXu{`bY$|73cn z`(&a4Hs~UM+IIarU<&|si^%c*?~Td7vq36>(ANzY_`Mhh?=Ko&{v`eWf0z{2KAFgYfhEXzfAM7RCzH_s!}LS_pPHyk zaPa;w%KT4@wi>{i5^gX-EnufV8L- zc|D*|sQ?&*(cj|np%6fkGGx5}3q}6J{@nzaRn7y}XbxN}r^WkwQi^{*w&f@G{X{MQ zBP>_|Nh?T2{`*k)#5I30&3}T*4-gO&J7Wb$J9{SvLwozb2uDF0;?oKh1O)5zBk`H~ JX8ZTw{|9x$!3qEX From 4a31d7759d6247241aed2711a389b43edffe067f Mon Sep 17 00:00:00 2001 From: John Rampono Date: Wed, 20 Jul 2022 11:08:20 +0800 Subject: [PATCH 004/151] Added ability to add task to current schedule --- .../InsertScheduleInstance_TaskInstance.sql | 25 ++++++++++++++++++- 1 file changed, 24 insertions(+), 1 deletion(-) diff --git a/solution/FunctionApp/FunctionApp/DataAccess/SqlTemplates/InsertScheduleInstance_TaskInstance.sql b/solution/FunctionApp/FunctionApp/DataAccess/SqlTemplates/InsertScheduleInstance_TaskInstance.sql index 5a593618..6aee03b9 100644 --- a/solution/FunctionApp/FunctionApp/DataAccess/SqlTemplates/InsertScheduleInstance_TaskInstance.sql +++ b/solution/FunctionApp/FunctionApp/DataAccess/SqlTemplates/InsertScheduleInstance_TaskInstance.sql @@ -10,6 +10,7 @@ Begin TRY DECLARE @tmpOutPut table( ScheduleInstanceId bigint, ScheduleMasterId bigint); +/*General Schedule Insert */ INSERT INTO [dbo].[ScheduleInstance] ([schedulemasterid],[scheduleddateutc],[scheduleddatetimeoffset],[activeyn]) OUTPUT INSERTED.ScheduleInstanceId, INSERTED.ScheduleMasterId @@ -17,7 +18,7 @@ INSERT INTO [dbo].[ScheduleInstance] ([schedulemasterid],[scheduleddateutc],[sch SELECT [schedulemasterid],[scheduleddateutc],[scheduleddatetimeoffset],[activeyn] FROM {tmpScheduleInstance} - +/*General Task Insert */ INSERT INTO [dbo].[TaskInstance] ([executionuid],[taskmasterid],[scheduleinstanceid],[adfpipeline],[taskinstancejson],[lastexecutionstatus],[activeyn]) SELECT [executionuid],tmpTI.[taskmasterid],B.[scheduleinstanceid],[adfpipeline],[taskinstancejson],[lastexecutionstatus],tmpTI.[activeyn] FROM {tmpTaskInstance} tmpTI @@ -25,6 +26,28 @@ INNER JOIN [dbo].[TaskMaster] TM on TM.TaskMasterId = tmpTI.TaskMasterId INNER JOIN @tmpOutPut B ON B.ScheduleMasterId = TM.ScheduleMasterId +WHERE TM.InsertIntoCurrentSchedule = 0 + +/*Insert Into Current Schedule Tasks*/ +INSERT INTO [dbo].[TaskInstance] ([executionuid],[taskmasterid],[scheduleinstanceid],[adfpipeline],[taskinstancejson],[lastexecutionstatus],[activeyn]) +SELECT [executionuid],tmpTI.[taskmasterid],B.[scheduleinstanceid],[adfpipeline],[taskinstancejson],[lastexecutionstatus],tmpTI.[activeyn] +FROM {tmpTaskInstance} tmpTI +INNER JOIN [dbo].[TaskMaster] TM +on TM.TaskMasterId = tmpTI.TaskMasterId +INNER JOIN +( + Select a.ScheduleMasterId, max(a.scheduleinstanceid) scheduleinstanceid + from [dbo].[ScheduleInstance] a + group by a.ScheduleMasterId +) B +ON B.ScheduleMasterId = TM.ScheduleMasterId +WHERE TM.InsertIntoCurrentSchedule = 1 + +/*Flip Flag on Insert into Current Schedule Tasks*/ +Update TaskMaster +Set InsertIntoCurrentSchedule = 0 +From TaskMaster +where InsertIntoCurrentSchedule = 1 END TRY From e7027e7fcf2b10b6beba676873f229cfe391769f Mon Sep 17 00:00:00 2001 From: John Rampono Date: Fri, 22 Jul 2022 12:02:46 +0800 Subject: [PATCH 005/151] Updating CICD --- solution/DeploymentV2/Sample.env | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) create mode 100644 solution/DeploymentV2/Sample.env diff --git a/solution/DeploymentV2/Sample.env b/solution/DeploymentV2/Sample.env new file mode 100644 index 00000000..dc17a9bc --- /dev/null +++ b/solution/DeploymentV2/Sample.env @@ -0,0 +1,23 @@ +ARM_KEYVAULT_NAME="Name of the Azure KeyVault that is used to hold sensitive information used by the deployment. Used in terragrunt_install step to open access to keyvault firewall." +ARM_SYNAPSE_WORKSPACE_NAME="Name of the Synapse Workspace that will be deployed. Used in terragrunt_install step to open access to Synapse firewall." +ARM_DATALAKE_NAME="Storage Account Name of the Data Lake Storage Account that will be deployed. Used in terragrunt_install step to open access to DataLake firewall." +ARM_CLIENT_ID=ARM_CLIENT_ID +ARM_CLIENT_SECRET=ARM_CLIENT_SECRET +ARM_SUBSCRIPTION_ID=ARM_SUBSCRIPTION_ID +ARM_TENANT_ID=ARM_TENANT_ID +ARM_DOMAIN=ARM_DOMAIN +ARM_SUBSCRIPTION_ID=ARM_SUBSCRIPTION_ID +ARM_TENANT_ID=ARM_TENANT_ID +ENVIRONMENT_TAG="Name of the resource group into which the analytics landing zone will be deployed" +ARM_RESOURCE_GROUP_NAME="Name of the resource group into which the analytics landing zone will be deployed" +ARM_STORAGE_NAME=ARM_STORAGE_NAME +ARM_JUMPHOST_PASSWORD="Jumphost Password used for Bastion Jumphost" +ARM_SYNAPSE_PASSWORD="Synapse Password used SQL Auth Access to Synapse" +WEB_APP_ADMIN_SECURITY_GROUP="Name of the security group whos memebers will be given admin access to framework web front end" +GIT_REPOSITORY_NAME="Git Repository name used for for publishing ADF and Synapse artefacts. Only used if synapse_git_toggle_integration or adf_git_toggle_integration is true in the hcl file." +GIT_PAT="Personal access token of git user used for publishing artefacts. Only used if synapse_git_toggle_integration or adf_git_toggle_integration is true in the hcl file." +GIT_USER_NAME="Usernames of git user used for publishing artefacts. Only used if synapse_git_toggle_integration or adf_git_toggle_integration is true in the hcl file." +GIT_EMAIL_ADDRESS="Email address of git user used for publishing artefacts. Only used if synapse_git_toggle_integration or adf_git_toggle_integration is true in the hcl file." +GIT_SYNAPSE_REPOSITORY_BRANCH_NAME="Git Branch Name associated with the branch that will be used to publish Synapse artefacts. Only used if synapse_git_toggle_integration is true in the hcl file." +GIT_ADF_REPOSITORY_BRANCH_NAME="Git Branch Name associated with the branch that will be used to publish ADF artefacts. Only used if adf_git_toggle_integration is true in the hcl file." +ARM_PAL_PARTNER_ID="ID of Implementation Partner for PAL purposes" \ No newline at end of file From 35ea9964a24839718999cba8e89b21ff3658c04f Mon Sep 17 00:00:00 2001 From: John Rampono Date: Fri, 22 Jul 2022 12:08:35 +0800 Subject: [PATCH 006/151] Updating CICD --- .github/workflows/continuous-delivery.yml | 109 ++-------------------- 1 file changed, 10 insertions(+), 99 deletions(-) diff --git a/.github/workflows/continuous-delivery.yml b/.github/workflows/continuous-delivery.yml index 1af1f77b..78aa4ee7 100644 --- a/.github/workflows/continuous-delivery.yml +++ b/.github/workflows/continuous-delivery.yml @@ -3,14 +3,14 @@ name: Continuous Delivery on: workflow_dispatch: push: - branches: $default-branch + branches: feature-1.0.4 jobs: deploy-to-nonprod: name: Deploy to NonProd Environment concurrency: terraform env: - environmentName: staging + environmentName: development gitDeploy : true skipTerraformDeployment: false specificUserIdForWebAppAdmin: 45672f90-82f9-4108-b389-da7139e938a1 @@ -19,6 +19,7 @@ jobs: datalakeName: ${{ secrets.ARM_DATALAKE_NAME }} # Required for Terraform ARM_CLIENT_ID: ${{ secrets.ARM_CLIENT_ID }} + ARM_PAL_PARTNER_ID: ${{ secrets.ARM_PAL_PARTNER_ID }} ARM_CLIENT_SECRET: ${{ secrets.ARM_CLIENT_SECRET }} ARM_SUBSCRIPTION_ID: ${{ secrets.ARM_SUBSCRIPTION_ID }} ARM_TENANT_ID: ${{ secrets.ARM_TENANT_ID }} @@ -46,7 +47,7 @@ jobs: #TF_LOG : TRACE environment: - name: NonProd + name: default runs-on: ubuntu-latest steps: @@ -68,109 +69,19 @@ jobs: continue-on-error: true run: | az extension add --name managementpartner - az managementpartner update --partner-id 6372669 || az managementpartner create --partner-id 6372669 + az managementpartner update --partner-id ${{ secrets.ARM_PAL_PARTNER_ID }} || az managementpartner create --partner-id ${{ secrets.ARM_PAL_PARTNER_ID }} - name: Terragrunt Install id: terragrunt_install working-directory: ./solution/DeploymentV2/terraform run: | - brew install terragrunt - az keyvault network-rule add -g ${{ env.TF_VAR_resource_group_name }} --name ${{ env.keyVaultName }} --ip-address ${{ steps.ip.outputs.ipv4 }}/32 - az synapse workspace firewall-rule create --name AllowCICD --resource-group ${{ env.TF_VAR_resource_group_name }} --start-ip-address ${{ steps.ip.outputs.ipv4 }} --end-ip-address ${{ steps.ip.outputs.ipv4 }} --workspace-name ${{ env.synapseWorkspaceName }} - az storage account network-rule add --resource-group ${{ env.TF_VAR_resource_group_name }} --account-name ${{ env.datalakeName }} --ip-address ${{ steps.ip.outputs.ipv4 }} + brew install terragrunt - - name: Install Jsonnet - id: jsonnet-install - working-directory: ./solution/DeploymentV2/ - env: - TF_VAR_ip_address : ${{steps.ip.outputs.ipv4}} - run: | - wget https://github.com/google/go-jsonnet/releases/download/v0.17.0/jsonnet-go_0.17.0_linux_amd64.deb - sudo dpkg -i jsonnet-go_0.17.0_linux_amd64.deb - - - name: Deploy Solution - id: solution-deployment - working-directory: ./solution/DeploymentV2/ - shell: pwsh - env: - TF_VAR_ip_address : ${{steps.ip.outputs.ipv4}} - run: | - git update-index --chmod=+x ./Deploy.ps1 - ./Deploy.ps1 - - #PROD ENVIRONMENT - deploy-to-prod: - name: Deploy to Prod Environment - concurrency: terraform - needs: [deploy-to-nonprod] - env: - environmentName: production - gitDeploy : true - skipTerraformDeployment: false - specificUserIdForWebAppAdmin: 45672f90-82f9-4108-b389-da7139e938a1 - # Required for updating firewall for runner - keyVaultName: ${{ secrets.ARM_KEYVAULT_NAME }} - synapseWorkspaceName: ${{ secrets.ARM_SYNAPSE_WORKSPACE_NAME }} - datalakeName: ${{ secrets.ARM_DATALAKE_NAME }} - # Required for Terraform - ARM_CLIENT_ID: ${{ secrets.ARM_CLIENT_ID }} - ARM_CLIENT_SECRET: ${{ secrets.ARM_CLIENT_SECRET }} - ARM_SUBSCRIPTION_ID: ${{ secrets.ARM_SUBSCRIPTION_ID }} - ARM_TENANT_ID: ${{ secrets.ARM_TENANT_ID }} - # Customizing Terraform vars - TF_VAR_domain : ${{ secrets.ARM_DOMAIN }} - TF_VAR_subscription_id : ${{ secrets.ARM_SUBSCRIPTION_ID }} - TF_VAR_tenant_id : ${{ secrets.ARM_TENANT_ID }} - TF_VAR_environment_tag : ${{ secrets.ENVIRONMENT_TAG }} - TF_VAR_resource_group_name : ${{ secrets.ARM_RESOURCE_GROUP_NAME }} - TF_VAR_state_storage_account_name : ${{ secrets.ARM_STORAGE_NAME }} - TF_VAR_jumphost_password : ${{secrets.ARM_JUMPHOST_PASSWORD}} - TF_VAR_synapse_sql_password : ${{secrets.ARM_SYNAPSE_PASSWORD}} - TF_VAR_web_app_admin_security_group : ${{secrets.WEB_APP_ADMIN_SECURITY_GROUP}} - # GIT Integration set up - TF_VAR_synapse_git_repository_name : ${{secrets.GIT_REPOSITORY_NAME}} - TF_VAR_synapse_git_repository_branch_name : ${{secrets.GIT_SYNAPSE_REPOSITORY_BRANCH_NAME}} - TF_VAR_synapse_git_pat : ${{secrets.GIT_PAT}} - TF_VAR_synapse_git_user_name : ${{secrets.GIT_USER_NAME}} - TF_VAR_synapse_git_email_address : ${{secrets.GIT_EMAIL_ADDRESS}} - TF_VAR_adf_git_repository_name : ${{secrets.GIT_REPOSITORY_NAME}} - TF_VAR_adf_git_repository_branch_name : ${{secrets.GIT_ADF_REPOSITORY_BRANCH_NAME}} - TF_VAR_adf_git_pat : ${{secrets.GIT_PAT}} - TF_VAR_adf_git_user_name : ${{secrets.GIT_USER_NAME}} - TF_VAR_adf_git_email_address : ${{secrets.GIT_EMAIL_ADDRESS}} - - - #TF_LOG : TRACE - environment: - name: Prod - runs-on: ubuntu-latest - steps: - - - name: Checkout - uses: actions/checkout@v3.0.0 - - - name: Get public IP - id: ip - uses: haythem/public-ip@v1.2 - - - name: Login via Az module - uses: azure/login@v1 - with: - creds: '{"clientId":"${{ secrets.ARM_CLIENT_ID }}","clientSecret":"${{ secrets.ARM_CLIENT_SECRET }}","subscriptionId":"${{ secrets.ARM_SUBSCRIPTION_ID }}","tenantId":"${{ secrets.ARM_TENANT_ID }}"}' - enable-AzPSSession: true - - - name: Set PAL - id: set_pal + - name: Open Firewalls + id: open firewalls continue-on-error: true - run: | - az extension add --name managementpartner - az managementpartner update --partner-id 6372669 || az managementpartner create --partner-id 6372669 - - - name: Terragrunt Install - id: terragrunt_install working-directory: ./solution/DeploymentV2/terraform - run: | - brew install terragrunt + run: | az keyvault network-rule add -g ${{ env.TF_VAR_resource_group_name }} --name ${{ env.keyVaultName }} --ip-address ${{ steps.ip.outputs.ipv4 }}/32 az synapse workspace firewall-rule create --name AllowCICD --resource-group ${{ env.TF_VAR_resource_group_name }} --start-ip-address ${{ steps.ip.outputs.ipv4 }} --end-ip-address ${{ steps.ip.outputs.ipv4 }} --workspace-name ${{ env.synapseWorkspaceName }} az storage account network-rule add --resource-group ${{ env.TF_VAR_resource_group_name }} --account-name ${{ env.datalakeName }} --ip-address ${{ steps.ip.outputs.ipv4 }} @@ -192,4 +103,4 @@ jobs: TF_VAR_ip_address : ${{steps.ip.outputs.ipv4}} run: | git update-index --chmod=+x ./Deploy.ps1 - ./Deploy.ps1 + ./Deploy.ps1 \ No newline at end of file From b3cb693f2372796748748f0c16ddc85e9fbef626 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Fri, 22 Jul 2022 12:19:50 +0800 Subject: [PATCH 007/151] Updating CICD --- .github/workflows/continuous-delivery.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/continuous-delivery.yml b/.github/workflows/continuous-delivery.yml index 78aa4ee7..56dd6cd0 100644 --- a/.github/workflows/continuous-delivery.yml +++ b/.github/workflows/continuous-delivery.yml @@ -3,7 +3,7 @@ name: Continuous Delivery on: workflow_dispatch: push: - branches: feature-1.0.4 + branches: [feature-1.0.4] jobs: deploy-to-nonprod: @@ -47,7 +47,7 @@ jobs: #TF_LOG : TRACE environment: - name: default + name: development runs-on: ubuntu-latest steps: From e1a42309e6dd96815095e32bba507c40154aaeec Mon Sep 17 00:00:00 2001 From: John Rampono Date: Fri, 22 Jul 2022 12:22:04 +0800 Subject: [PATCH 008/151] Update continuous-delivery.yml --- .github/workflows/continuous-delivery.yml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/workflows/continuous-delivery.yml b/.github/workflows/continuous-delivery.yml index 56dd6cd0..f0b9cb5b 100644 --- a/.github/workflows/continuous-delivery.yml +++ b/.github/workflows/continuous-delivery.yml @@ -6,7 +6,9 @@ on: branches: [feature-1.0.4] jobs: - deploy-to-nonprod: + + deploy: + name: Deploy to NonProd Environment concurrency: terraform env: @@ -103,4 +105,4 @@ jobs: TF_VAR_ip_address : ${{steps.ip.outputs.ipv4}} run: | git update-index --chmod=+x ./Deploy.ps1 - ./Deploy.ps1 \ No newline at end of file + ./Deploy.ps1 From 0321dcd7e8933ebc12a848dfc858f0b1f31f8be8 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Fri, 22 Jul 2022 12:26:56 +0800 Subject: [PATCH 009/151] Update continuous-delivery.yml --- .github/workflows/continuous-delivery.yml | 196 +++++++++++----------- 1 file changed, 97 insertions(+), 99 deletions(-) diff --git a/.github/workflows/continuous-delivery.yml b/.github/workflows/continuous-delivery.yml index f0b9cb5b..bd3efe1a 100644 --- a/.github/workflows/continuous-delivery.yml +++ b/.github/workflows/continuous-delivery.yml @@ -5,104 +5,102 @@ on: push: branches: [feature-1.0.4] -jobs: - - deploy: - - name: Deploy to NonProd Environment - concurrency: terraform +jobs: + deploy: + name: Deploy to NonProd Environment + concurrency: terraform + env: + environmentName: development + gitDeploy : true + skipTerraformDeployment: false + specificUserIdForWebAppAdmin: 45672f90-82f9-4108-b389-da7139e938a1 + keyVaultName: ${{ secrets.ARM_KEYVAULT_NAME }} + synapseWorkspaceName: ${{ secrets.ARM_SYNAPSE_WORKSPACE_NAME }} + datalakeName: ${{ secrets.ARM_DATALAKE_NAME }} + # Required for Terraform + ARM_CLIENT_ID: ${{ secrets.ARM_CLIENT_ID }} + ARM_PAL_PARTNER_ID: ${{ secrets.ARM_PAL_PARTNER_ID }} + ARM_CLIENT_SECRET: ${{ secrets.ARM_CLIENT_SECRET }} + ARM_SUBSCRIPTION_ID: ${{ secrets.ARM_SUBSCRIPTION_ID }} + ARM_TENANT_ID: ${{ secrets.ARM_TENANT_ID }} + # Customizing Terraform vars + TF_VAR_domain : ${{ secrets.ARM_DOMAIN }} + TF_VAR_subscription_id : ${{ secrets.ARM_SUBSCRIPTION_ID }} + TF_VAR_tenant_id : ${{ secrets.ARM_TENANT_ID }} + TF_VAR_environment_tag : ${{ secrets.ENVIRONMENT_TAG }} + TF_VAR_resource_group_name : ${{ secrets.ARM_RESOURCE_GROUP_NAME }} + TF_VAR_state_storage_account_name : ${{ secrets.ARM_STORAGE_NAME }} + TF_VAR_jumphost_password : ${{secrets.ARM_JUMPHOST_PASSWORD}} + TF_VAR_synapse_sql_password : ${{secrets.ARM_SYNAPSE_PASSWORD}} + TF_VAR_web_app_admin_security_group : ${{secrets.WEB_APP_ADMIN_SECURITY_GROUP}} + # GIT Integration set up + TF_VAR_synapse_git_repository_name : ${{secrets.GIT_REPOSITORY_NAME}} + TF_VAR_synapse_git_repository_branch_name : ${{secrets.GIT_SYNAPSE_REPOSITORY_BRANCH_NAME}} + TF_VAR_synapse_git_pat : ${{secrets.GIT_PAT}} + TF_VAR_synapse_git_user_name : ${{secrets.GIT_USER_NAME}} + TF_VAR_synapse_git_email_address : ${{secrets.GIT_EMAIL_ADDRESS}} + TF_VAR_adf_git_repository_name : ${{secrets.GIT_REPOSITORY_NAME}} + TF_VAR_adf_git_repository_branch_name : ${{secrets.GIT_ADF_REPOSITORY_BRANCH_NAME}} + TF_VAR_adf_git_pat : ${{secrets.GIT_PAT}} + TF_VAR_adf_git_user_name : ${{secrets.GIT_USER_NAME}} + TF_VAR_adf_git_email_address : ${{secrets.GIT_EMAIL_ADDRESS}} + + #TF_LOG : TRACE + environment: + name: development + runs-on: ubuntu-latest + steps: + + - name: Checkout + uses: actions/checkout@v3.0.0 + + - name: Get public IP + id: ip + uses: haythem/public-ip@v1.2 + + - name: Login via Az module + uses: azure/login@v1 + with: + creds: '{"clientId":"${{ secrets.ARM_CLIENT_ID }}","clientSecret":"${{ secrets.ARM_CLIENT_SECRET }}","subscriptionId":"${{ secrets.ARM_SUBSCRIPTION_ID }}","tenantId":"${{ secrets.ARM_TENANT_ID }}"}' + enable-AzPSSession: true + + - name: Set PAL + id: set_pal + continue-on-error: true + run: | + az extension add --name managementpartner + az managementpartner update --partner-id ${{ secrets.ARM_PAL_PARTNER_ID }} || az managementpartner create --partner-id ${{ secrets.ARM_PAL_PARTNER_ID }} + + - name: Terragrunt Install + id: terragrunt_install + working-directory: ./solution/DeploymentV2/terraform + run: | + brew install terragrunt + + - name: Open Firewalls + id: open firewalls + continue-on-error: true + working-directory: ./solution/DeploymentV2/terraform + run: | + az keyvault network-rule add -g ${{ env.TF_VAR_resource_group_name }} --name ${{ env.keyVaultName }} --ip-address ${{ steps.ip.outputs.ipv4 }}/32 + az synapse workspace firewall-rule create --name AllowCICD --resource-group ${{ env.TF_VAR_resource_group_name }} --start-ip-address ${{ steps.ip.outputs.ipv4 }} --end-ip-address ${{ steps.ip.outputs.ipv4 }} --workspace-name ${{ env.synapseWorkspaceName }} + az storage account network-rule add --resource-group ${{ env.TF_VAR_resource_group_name }} --account-name ${{ env.datalakeName }} --ip-address ${{ steps.ip.outputs.ipv4 }} + + - name: Install Jsonnet + id: jsonnet-install + working-directory: ./solution/DeploymentV2/ env: - environmentName: development - gitDeploy : true - skipTerraformDeployment: false - specificUserIdForWebAppAdmin: 45672f90-82f9-4108-b389-da7139e938a1 - keyVaultName: ${{ secrets.ARM_KEYVAULT_NAME }} - synapseWorkspaceName: ${{ secrets.ARM_SYNAPSE_WORKSPACE_NAME }} - datalakeName: ${{ secrets.ARM_DATALAKE_NAME }} - # Required for Terraform - ARM_CLIENT_ID: ${{ secrets.ARM_CLIENT_ID }} - ARM_PAL_PARTNER_ID: ${{ secrets.ARM_PAL_PARTNER_ID }} - ARM_CLIENT_SECRET: ${{ secrets.ARM_CLIENT_SECRET }} - ARM_SUBSCRIPTION_ID: ${{ secrets.ARM_SUBSCRIPTION_ID }} - ARM_TENANT_ID: ${{ secrets.ARM_TENANT_ID }} - # Customizing Terraform vars - TF_VAR_domain : ${{ secrets.ARM_DOMAIN }} - TF_VAR_subscription_id : ${{ secrets.ARM_SUBSCRIPTION_ID }} - TF_VAR_tenant_id : ${{ secrets.ARM_TENANT_ID }} - TF_VAR_environment_tag : ${{ secrets.ENVIRONMENT_TAG }} - TF_VAR_resource_group_name : ${{ secrets.ARM_RESOURCE_GROUP_NAME }} - TF_VAR_state_storage_account_name : ${{ secrets.ARM_STORAGE_NAME }} - TF_VAR_jumphost_password : ${{secrets.ARM_JUMPHOST_PASSWORD}} - TF_VAR_synapse_sql_password : ${{secrets.ARM_SYNAPSE_PASSWORD}} - TF_VAR_web_app_admin_security_group : ${{secrets.WEB_APP_ADMIN_SECURITY_GROUP}} - # GIT Integration set up - TF_VAR_synapse_git_repository_name : ${{secrets.GIT_REPOSITORY_NAME}} - TF_VAR_synapse_git_repository_branch_name : ${{secrets.GIT_SYNAPSE_REPOSITORY_BRANCH_NAME}} - TF_VAR_synapse_git_pat : ${{secrets.GIT_PAT}} - TF_VAR_synapse_git_user_name : ${{secrets.GIT_USER_NAME}} - TF_VAR_synapse_git_email_address : ${{secrets.GIT_EMAIL_ADDRESS}} - TF_VAR_adf_git_repository_name : ${{secrets.GIT_REPOSITORY_NAME}} - TF_VAR_adf_git_repository_branch_name : ${{secrets.GIT_ADF_REPOSITORY_BRANCH_NAME}} - TF_VAR_adf_git_pat : ${{secrets.GIT_PAT}} - TF_VAR_adf_git_user_name : ${{secrets.GIT_USER_NAME}} - TF_VAR_adf_git_email_address : ${{secrets.GIT_EMAIL_ADDRESS}} + TF_VAR_ip_address : ${{steps.ip.outputs.ipv4}} + run: | + wget https://github.com/google/go-jsonnet/releases/download/v0.17.0/jsonnet-go_0.17.0_linux_amd64.deb + sudo dpkg -i jsonnet-go_0.17.0_linux_amd64.deb - #TF_LOG : TRACE - environment: - name: development - runs-on: ubuntu-latest - steps: - - - name: Checkout - uses: actions/checkout@v3.0.0 - - - name: Get public IP - id: ip - uses: haythem/public-ip@v1.2 - - - name: Login via Az module - uses: azure/login@v1 - with: - creds: '{"clientId":"${{ secrets.ARM_CLIENT_ID }}","clientSecret":"${{ secrets.ARM_CLIENT_SECRET }}","subscriptionId":"${{ secrets.ARM_SUBSCRIPTION_ID }}","tenantId":"${{ secrets.ARM_TENANT_ID }}"}' - enable-AzPSSession: true - - - name: Set PAL - id: set_pal - continue-on-error: true - run: | - az extension add --name managementpartner - az managementpartner update --partner-id ${{ secrets.ARM_PAL_PARTNER_ID }} || az managementpartner create --partner-id ${{ secrets.ARM_PAL_PARTNER_ID }} - - - name: Terragrunt Install - id: terragrunt_install - working-directory: ./solution/DeploymentV2/terraform - run: | - brew install terragrunt - - - name: Open Firewalls - id: open firewalls - continue-on-error: true - working-directory: ./solution/DeploymentV2/terraform - run: | - az keyvault network-rule add -g ${{ env.TF_VAR_resource_group_name }} --name ${{ env.keyVaultName }} --ip-address ${{ steps.ip.outputs.ipv4 }}/32 - az synapse workspace firewall-rule create --name AllowCICD --resource-group ${{ env.TF_VAR_resource_group_name }} --start-ip-address ${{ steps.ip.outputs.ipv4 }} --end-ip-address ${{ steps.ip.outputs.ipv4 }} --workspace-name ${{ env.synapseWorkspaceName }} - az storage account network-rule add --resource-group ${{ env.TF_VAR_resource_group_name }} --account-name ${{ env.datalakeName }} --ip-address ${{ steps.ip.outputs.ipv4 }} - - - name: Install Jsonnet - id: jsonnet-install - working-directory: ./solution/DeploymentV2/ - env: - TF_VAR_ip_address : ${{steps.ip.outputs.ipv4}} - run: | - wget https://github.com/google/go-jsonnet/releases/download/v0.17.0/jsonnet-go_0.17.0_linux_amd64.deb - sudo dpkg -i jsonnet-go_0.17.0_linux_amd64.deb - - - name: Deploy Solution - id: solution-deployment - working-directory: ./solution/DeploymentV2/ - shell: pwsh - env: - TF_VAR_ip_address : ${{steps.ip.outputs.ipv4}} - run: | - git update-index --chmod=+x ./Deploy.ps1 - ./Deploy.ps1 + - name: Deploy Solution + id: solution-deployment + working-directory: ./solution/DeploymentV2/ + shell: pwsh + env: + TF_VAR_ip_address : ${{steps.ip.outputs.ipv4}} + run: | + git update-index --chmod=+x ./Deploy.ps1 + ./Deploy.ps1 From b1a5f2eeed22e6da160d0f00830f7d3c8fbf0260 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Fri, 22 Jul 2022 12:29:43 +0800 Subject: [PATCH 010/151] Update continuous-delivery.yml --- .github/workflows/continuous-delivery.yml | 94 ++++++++++++++++++++++- 1 file changed, 93 insertions(+), 1 deletion(-) diff --git a/.github/workflows/continuous-delivery.yml b/.github/workflows/continuous-delivery.yml index bd3efe1a..c7943a20 100644 --- a/.github/workflows/continuous-delivery.yml +++ b/.github/workflows/continuous-delivery.yml @@ -6,7 +6,7 @@ on: branches: [feature-1.0.4] jobs: - deploy: + deploy-to-nonprod: name: Deploy to NonProd Environment concurrency: terraform env: @@ -104,3 +104,95 @@ jobs: run: | git update-index --chmod=+x ./Deploy.ps1 ./Deploy.ps1 +deploy-to-prod: + name: Deploy to Prod Environment + concurrency: terraform + needs: [deploy-to-nonprod] + env: + environmentName: production + gitDeploy : true + skipTerraformDeployment: false + specificUserIdForWebAppAdmin: 45672f90-82f9-4108-b389-da7139e938a1 + # Required for updating firewall for runner + keyVaultName: ${{ secrets.ARM_KEYVAULT_NAME }} + synapseWorkspaceName: ${{ secrets.ARM_SYNAPSE_WORKSPACE_NAME }} + datalakeName: ${{ secrets.ARM_DATALAKE_NAME }} + # Required for Terraform + ARM_CLIENT_ID: ${{ secrets.ARM_CLIENT_ID }} + ARM_CLIENT_SECRET: ${{ secrets.ARM_CLIENT_SECRET }} + ARM_SUBSCRIPTION_ID: ${{ secrets.ARM_SUBSCRIPTION_ID }} + ARM_TENANT_ID: ${{ secrets.ARM_TENANT_ID }} + # Customizing Terraform vars + TF_VAR_domain : ${{ secrets.ARM_DOMAIN }} + TF_VAR_subscription_id : ${{ secrets.ARM_SUBSCRIPTION_ID }} + TF_VAR_tenant_id : ${{ secrets.ARM_TENANT_ID }} + TF_VAR_environment_tag : ${{ secrets.ENVIRONMENT_TAG }} + TF_VAR_resource_group_name : ${{ secrets.ARM_RESOURCE_GROUP_NAME }} + TF_VAR_state_storage_account_name : ${{ secrets.ARM_STORAGE_NAME }} + TF_VAR_jumphost_password : ${{secrets.ARM_JUMPHOST_PASSWORD}} + TF_VAR_synapse_sql_password : ${{secrets.ARM_SYNAPSE_PASSWORD}} + TF_VAR_web_app_admin_security_group : ${{secrets.WEB_APP_ADMIN_SECURITY_GROUP}} + # GIT Integration set up + TF_VAR_synapse_git_repository_name : ${{secrets.GIT_REPOSITORY_NAME}} + TF_VAR_synapse_git_repository_branch_name : ${{secrets.GIT_SYNAPSE_REPOSITORY_BRANCH_NAME}} + TF_VAR_synapse_git_pat : ${{secrets.GIT_PAT}} + TF_VAR_synapse_git_user_name : ${{secrets.GIT_USER_NAME}} + TF_VAR_synapse_git_email_address : ${{secrets.GIT_EMAIL_ADDRESS}} + TF_VAR_adf_git_repository_name : ${{secrets.GIT_REPOSITORY_NAME}} + TF_VAR_adf_git_repository_branch_name : ${{secrets.GIT_ADF_REPOSITORY_BRANCH_NAME}} + TF_VAR_adf_git_pat : ${{secrets.GIT_PAT}} + TF_VAR_adf_git_user_name : ${{secrets.GIT_USER_NAME}} + TF_VAR_adf_git_email_address : ${{secrets.GIT_EMAIL_ADDRESS}} + + + #TF_LOG : TRACE + environment: + name: Prod + runs-on: ubuntu-latest + steps: + + - name: Checkout + uses: actions/checkout@v3.0.0 + + - name: Get public IP + id: ip + uses: haythem/public-ip@v1.2 + + - name: Login via Az module + uses: azure/login@v1 + with: + creds: '{"clientId":"${{ secrets.ARM_CLIENT_ID }}","clientSecret":"${{ secrets.ARM_CLIENT_SECRET }}","subscriptionId":"${{ secrets.ARM_SUBSCRIPTION_ID }}","tenantId":"${{ secrets.ARM_TENANT_ID }}"}' + enable-AzPSSession: true + + - name: Set PAL + id: set_pal + continue-on-error: true + run: | + az extension add --name managementpartner + az managementpartner update --partner-id 6372669 || az managementpartner create --partner-id 6372669 + - name: Terragrunt Install + id: terragrunt_install + working-directory: ./solution/DeploymentV2/terraform + run: | + brew install terragrunt + az keyvault network-rule add -g ${{ env.TF_VAR_resource_group_name }} --name ${{ env.keyVaultName }} --ip-address ${{ steps.ip.outputs.ipv4 }}/32 + az synapse workspace firewall-rule create --name AllowCICD --resource-group ${{ env.TF_VAR_resource_group_name }} --start-ip-address ${{ steps.ip.outputs.ipv4 }} --end-ip-address ${{ steps.ip.outputs.ipv4 }} --workspace-name ${{ env.synapseWorkspaceName }} + az storage account network-rule add --resource-group ${{ env.TF_VAR_resource_group_name }} --account-name ${{ env.datalakeName }} --ip-address ${{ steps.ip.outputs.ipv4 }} + - name: Install Jsonnet + id: jsonnet-install + working-directory: ./solution/DeploymentV2/ + env: + TF_VAR_ip_address : ${{steps.ip.outputs.ipv4}} + run: | + wget https://github.com/google/go-jsonnet/releases/download/v0.17.0/jsonnet-go_0.17.0_linux_amd64.deb + sudo dpkg -i jsonnet-go_0.17.0_linux_amd64.deb + + - name: Deploy Solution + id: solution-deployment + working-directory: ./solution/DeploymentV2/ + shell: pwsh + env: + TF_VAR_ip_address : ${{steps.ip.outputs.ipv4}} + run: | + git update-index --chmod=+x ./Deploy.ps1 + ./Deploy.ps1 From fc40ee03c67053dbb41a958c8bdb647ee7cc8fef Mon Sep 17 00:00:00 2001 From: John Rampono Date: Fri, 22 Jul 2022 12:31:04 +0800 Subject: [PATCH 011/151] Update continuous-delivery.yml --- .github/workflows/continuous-delivery.yml | 195 +++++++++++----------- 1 file changed, 96 insertions(+), 99 deletions(-) diff --git a/.github/workflows/continuous-delivery.yml b/.github/workflows/continuous-delivery.yml index c7943a20..1af1f77b 100644 --- a/.github/workflows/continuous-delivery.yml +++ b/.github/workflows/continuous-delivery.yml @@ -3,108 +3,103 @@ name: Continuous Delivery on: workflow_dispatch: push: - branches: [feature-1.0.4] + branches: $default-branch -jobs: - deploy-to-nonprod: - name: Deploy to NonProd Environment - concurrency: terraform - env: - environmentName: development - gitDeploy : true - skipTerraformDeployment: false - specificUserIdForWebAppAdmin: 45672f90-82f9-4108-b389-da7139e938a1 - keyVaultName: ${{ secrets.ARM_KEYVAULT_NAME }} - synapseWorkspaceName: ${{ secrets.ARM_SYNAPSE_WORKSPACE_NAME }} - datalakeName: ${{ secrets.ARM_DATALAKE_NAME }} - # Required for Terraform - ARM_CLIENT_ID: ${{ secrets.ARM_CLIENT_ID }} - ARM_PAL_PARTNER_ID: ${{ secrets.ARM_PAL_PARTNER_ID }} - ARM_CLIENT_SECRET: ${{ secrets.ARM_CLIENT_SECRET }} - ARM_SUBSCRIPTION_ID: ${{ secrets.ARM_SUBSCRIPTION_ID }} - ARM_TENANT_ID: ${{ secrets.ARM_TENANT_ID }} - # Customizing Terraform vars - TF_VAR_domain : ${{ secrets.ARM_DOMAIN }} - TF_VAR_subscription_id : ${{ secrets.ARM_SUBSCRIPTION_ID }} - TF_VAR_tenant_id : ${{ secrets.ARM_TENANT_ID }} - TF_VAR_environment_tag : ${{ secrets.ENVIRONMENT_TAG }} - TF_VAR_resource_group_name : ${{ secrets.ARM_RESOURCE_GROUP_NAME }} - TF_VAR_state_storage_account_name : ${{ secrets.ARM_STORAGE_NAME }} - TF_VAR_jumphost_password : ${{secrets.ARM_JUMPHOST_PASSWORD}} - TF_VAR_synapse_sql_password : ${{secrets.ARM_SYNAPSE_PASSWORD}} - TF_VAR_web_app_admin_security_group : ${{secrets.WEB_APP_ADMIN_SECURITY_GROUP}} - # GIT Integration set up - TF_VAR_synapse_git_repository_name : ${{secrets.GIT_REPOSITORY_NAME}} - TF_VAR_synapse_git_repository_branch_name : ${{secrets.GIT_SYNAPSE_REPOSITORY_BRANCH_NAME}} - TF_VAR_synapse_git_pat : ${{secrets.GIT_PAT}} - TF_VAR_synapse_git_user_name : ${{secrets.GIT_USER_NAME}} - TF_VAR_synapse_git_email_address : ${{secrets.GIT_EMAIL_ADDRESS}} - TF_VAR_adf_git_repository_name : ${{secrets.GIT_REPOSITORY_NAME}} - TF_VAR_adf_git_repository_branch_name : ${{secrets.GIT_ADF_REPOSITORY_BRANCH_NAME}} - TF_VAR_adf_git_pat : ${{secrets.GIT_PAT}} - TF_VAR_adf_git_user_name : ${{secrets.GIT_USER_NAME}} - TF_VAR_adf_git_email_address : ${{secrets.GIT_EMAIL_ADDRESS}} - - #TF_LOG : TRACE - environment: - name: development - runs-on: ubuntu-latest - steps: - - - name: Checkout - uses: actions/checkout@v3.0.0 - - - name: Get public IP - id: ip - uses: haythem/public-ip@v1.2 - - - name: Login via Az module - uses: azure/login@v1 - with: - creds: '{"clientId":"${{ secrets.ARM_CLIENT_ID }}","clientSecret":"${{ secrets.ARM_CLIENT_SECRET }}","subscriptionId":"${{ secrets.ARM_SUBSCRIPTION_ID }}","tenantId":"${{ secrets.ARM_TENANT_ID }}"}' - enable-AzPSSession: true - - - name: Set PAL - id: set_pal - continue-on-error: true - run: | - az extension add --name managementpartner - az managementpartner update --partner-id ${{ secrets.ARM_PAL_PARTNER_ID }} || az managementpartner create --partner-id ${{ secrets.ARM_PAL_PARTNER_ID }} - - - name: Terragrunt Install - id: terragrunt_install - working-directory: ./solution/DeploymentV2/terraform - run: | - brew install terragrunt - - - name: Open Firewalls - id: open firewalls - continue-on-error: true - working-directory: ./solution/DeploymentV2/terraform - run: | - az keyvault network-rule add -g ${{ env.TF_VAR_resource_group_name }} --name ${{ env.keyVaultName }} --ip-address ${{ steps.ip.outputs.ipv4 }}/32 - az synapse workspace firewall-rule create --name AllowCICD --resource-group ${{ env.TF_VAR_resource_group_name }} --start-ip-address ${{ steps.ip.outputs.ipv4 }} --end-ip-address ${{ steps.ip.outputs.ipv4 }} --workspace-name ${{ env.synapseWorkspaceName }} - az storage account network-rule add --resource-group ${{ env.TF_VAR_resource_group_name }} --account-name ${{ env.datalakeName }} --ip-address ${{ steps.ip.outputs.ipv4 }} - - - name: Install Jsonnet - id: jsonnet-install - working-directory: ./solution/DeploymentV2/ +jobs: + deploy-to-nonprod: + name: Deploy to NonProd Environment + concurrency: terraform env: - TF_VAR_ip_address : ${{steps.ip.outputs.ipv4}} - run: | - wget https://github.com/google/go-jsonnet/releases/download/v0.17.0/jsonnet-go_0.17.0_linux_amd64.deb - sudo dpkg -i jsonnet-go_0.17.0_linux_amd64.deb + environmentName: staging + gitDeploy : true + skipTerraformDeployment: false + specificUserIdForWebAppAdmin: 45672f90-82f9-4108-b389-da7139e938a1 + keyVaultName: ${{ secrets.ARM_KEYVAULT_NAME }} + synapseWorkspaceName: ${{ secrets.ARM_SYNAPSE_WORKSPACE_NAME }} + datalakeName: ${{ secrets.ARM_DATALAKE_NAME }} + # Required for Terraform + ARM_CLIENT_ID: ${{ secrets.ARM_CLIENT_ID }} + ARM_CLIENT_SECRET: ${{ secrets.ARM_CLIENT_SECRET }} + ARM_SUBSCRIPTION_ID: ${{ secrets.ARM_SUBSCRIPTION_ID }} + ARM_TENANT_ID: ${{ secrets.ARM_TENANT_ID }} + # Customizing Terraform vars + TF_VAR_domain : ${{ secrets.ARM_DOMAIN }} + TF_VAR_subscription_id : ${{ secrets.ARM_SUBSCRIPTION_ID }} + TF_VAR_tenant_id : ${{ secrets.ARM_TENANT_ID }} + TF_VAR_environment_tag : ${{ secrets.ENVIRONMENT_TAG }} + TF_VAR_resource_group_name : ${{ secrets.ARM_RESOURCE_GROUP_NAME }} + TF_VAR_state_storage_account_name : ${{ secrets.ARM_STORAGE_NAME }} + TF_VAR_jumphost_password : ${{secrets.ARM_JUMPHOST_PASSWORD}} + TF_VAR_synapse_sql_password : ${{secrets.ARM_SYNAPSE_PASSWORD}} + TF_VAR_web_app_admin_security_group : ${{secrets.WEB_APP_ADMIN_SECURITY_GROUP}} + # GIT Integration set up + TF_VAR_synapse_git_repository_name : ${{secrets.GIT_REPOSITORY_NAME}} + TF_VAR_synapse_git_repository_branch_name : ${{secrets.GIT_SYNAPSE_REPOSITORY_BRANCH_NAME}} + TF_VAR_synapse_git_pat : ${{secrets.GIT_PAT}} + TF_VAR_synapse_git_user_name : ${{secrets.GIT_USER_NAME}} + TF_VAR_synapse_git_email_address : ${{secrets.GIT_EMAIL_ADDRESS}} + TF_VAR_adf_git_repository_name : ${{secrets.GIT_REPOSITORY_NAME}} + TF_VAR_adf_git_repository_branch_name : ${{secrets.GIT_ADF_REPOSITORY_BRANCH_NAME}} + TF_VAR_adf_git_pat : ${{secrets.GIT_PAT}} + TF_VAR_adf_git_user_name : ${{secrets.GIT_USER_NAME}} + TF_VAR_adf_git_email_address : ${{secrets.GIT_EMAIL_ADDRESS}} - - name: Deploy Solution - id: solution-deployment - working-directory: ./solution/DeploymentV2/ - shell: pwsh - env: - TF_VAR_ip_address : ${{steps.ip.outputs.ipv4}} - run: | - git update-index --chmod=+x ./Deploy.ps1 - ./Deploy.ps1 -deploy-to-prod: + #TF_LOG : TRACE + environment: + name: NonProd + runs-on: ubuntu-latest + steps: + + - name: Checkout + uses: actions/checkout@v3.0.0 + + - name: Get public IP + id: ip + uses: haythem/public-ip@v1.2 + + - name: Login via Az module + uses: azure/login@v1 + with: + creds: '{"clientId":"${{ secrets.ARM_CLIENT_ID }}","clientSecret":"${{ secrets.ARM_CLIENT_SECRET }}","subscriptionId":"${{ secrets.ARM_SUBSCRIPTION_ID }}","tenantId":"${{ secrets.ARM_TENANT_ID }}"}' + enable-AzPSSession: true + + - name: Set PAL + id: set_pal + continue-on-error: true + run: | + az extension add --name managementpartner + az managementpartner update --partner-id 6372669 || az managementpartner create --partner-id 6372669 + + - name: Terragrunt Install + id: terragrunt_install + working-directory: ./solution/DeploymentV2/terraform + run: | + brew install terragrunt + az keyvault network-rule add -g ${{ env.TF_VAR_resource_group_name }} --name ${{ env.keyVaultName }} --ip-address ${{ steps.ip.outputs.ipv4 }}/32 + az synapse workspace firewall-rule create --name AllowCICD --resource-group ${{ env.TF_VAR_resource_group_name }} --start-ip-address ${{ steps.ip.outputs.ipv4 }} --end-ip-address ${{ steps.ip.outputs.ipv4 }} --workspace-name ${{ env.synapseWorkspaceName }} + az storage account network-rule add --resource-group ${{ env.TF_VAR_resource_group_name }} --account-name ${{ env.datalakeName }} --ip-address ${{ steps.ip.outputs.ipv4 }} + + - name: Install Jsonnet + id: jsonnet-install + working-directory: ./solution/DeploymentV2/ + env: + TF_VAR_ip_address : ${{steps.ip.outputs.ipv4}} + run: | + wget https://github.com/google/go-jsonnet/releases/download/v0.17.0/jsonnet-go_0.17.0_linux_amd64.deb + sudo dpkg -i jsonnet-go_0.17.0_linux_amd64.deb + + - name: Deploy Solution + id: solution-deployment + working-directory: ./solution/DeploymentV2/ + shell: pwsh + env: + TF_VAR_ip_address : ${{steps.ip.outputs.ipv4}} + run: | + git update-index --chmod=+x ./Deploy.ps1 + ./Deploy.ps1 + + #PROD ENVIRONMENT + deploy-to-prod: name: Deploy to Prod Environment concurrency: terraform needs: [deploy-to-nonprod] @@ -170,6 +165,7 @@ deploy-to-prod: run: | az extension add --name managementpartner az managementpartner update --partner-id 6372669 || az managementpartner create --partner-id 6372669 + - name: Terragrunt Install id: terragrunt_install working-directory: ./solution/DeploymentV2/terraform @@ -178,6 +174,7 @@ deploy-to-prod: az keyvault network-rule add -g ${{ env.TF_VAR_resource_group_name }} --name ${{ env.keyVaultName }} --ip-address ${{ steps.ip.outputs.ipv4 }}/32 az synapse workspace firewall-rule create --name AllowCICD --resource-group ${{ env.TF_VAR_resource_group_name }} --start-ip-address ${{ steps.ip.outputs.ipv4 }} --end-ip-address ${{ steps.ip.outputs.ipv4 }} --workspace-name ${{ env.synapseWorkspaceName }} az storage account network-rule add --resource-group ${{ env.TF_VAR_resource_group_name }} --account-name ${{ env.datalakeName }} --ip-address ${{ steps.ip.outputs.ipv4 }} + - name: Install Jsonnet id: jsonnet-install working-directory: ./solution/DeploymentV2/ From 8d15700cc85f4dbdda8490e3ae578c70ee4546fc Mon Sep 17 00:00:00 2001 From: John Rampono Date: Fri, 22 Jul 2022 12:36:28 +0800 Subject: [PATCH 012/151] Update continuous-delivery.yml --- .github/workflows/continuous-delivery.yml | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/.github/workflows/continuous-delivery.yml b/.github/workflows/continuous-delivery.yml index 1af1f77b..a8cbc208 100644 --- a/.github/workflows/continuous-delivery.yml +++ b/.github/workflows/continuous-delivery.yml @@ -10,7 +10,7 @@ jobs: name: Deploy to NonProd Environment concurrency: terraform env: - environmentName: staging + environmentName: development gitDeploy : true skipTerraformDeployment: false specificUserIdForWebAppAdmin: 45672f90-82f9-4108-b389-da7139e938a1 @@ -19,6 +19,7 @@ jobs: datalakeName: ${{ secrets.ARM_DATALAKE_NAME }} # Required for Terraform ARM_CLIENT_ID: ${{ secrets.ARM_CLIENT_ID }} + ARM_PAL_PARTNER_ID: ${{ secrets.ARM_PAL_PARTNER_ID }} ARM_CLIENT_SECRET: ${{ secrets.ARM_CLIENT_SECRET }} ARM_SUBSCRIPTION_ID: ${{ secrets.ARM_SUBSCRIPTION_ID }} ARM_TENANT_ID: ${{ secrets.ARM_TENANT_ID }} @@ -46,7 +47,7 @@ jobs: #TF_LOG : TRACE environment: - name: NonProd + name: development runs-on: ubuntu-latest steps: @@ -68,7 +69,8 @@ jobs: continue-on-error: true run: | az extension add --name managementpartner - az managementpartner update --partner-id 6372669 || az managementpartner create --partner-id 6372669 + az managementpartner update --partner-id ${{ secrets.ARM_PAL_PARTNER_ID }} || az managementpartner create --partner-id ${{ secrets.ARM_PAL_PARTNER_ID }} + - name: Terragrunt Install id: terragrunt_install From 8dfb55aec29b42fa49690fc4d435b16aa208c1ae Mon Sep 17 00:00:00 2001 From: John Rampono Date: Fri, 22 Jul 2022 12:52:02 +0800 Subject: [PATCH 013/151] Update continuous-delivery.yml --- .github/workflows/continuous-delivery.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/continuous-delivery.yml b/.github/workflows/continuous-delivery.yml index a8cbc208..2db33d6a 100644 --- a/.github/workflows/continuous-delivery.yml +++ b/.github/workflows/continuous-delivery.yml @@ -3,7 +3,7 @@ name: Continuous Delivery on: workflow_dispatch: push: - branches: $default-branch + branches: feature-1.0.4 jobs: deploy-to-nonprod: From 6bde5f2b0765f8e04a0e99ca110b0aaa0a042164 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Fri, 22 Jul 2022 12:56:37 +0800 Subject: [PATCH 014/151] Updating CICD --- .github/workflows/continuous-delivery.yml | 37 +---------------------- 1 file changed, 1 insertion(+), 36 deletions(-) diff --git a/.github/workflows/continuous-delivery.yml b/.github/workflows/continuous-delivery.yml index 2db33d6a..fe4bae38 100644 --- a/.github/workflows/continuous-delivery.yml +++ b/.github/workflows/continuous-delivery.yml @@ -159,39 +159,4 @@ jobs: uses: azure/login@v1 with: creds: '{"clientId":"${{ secrets.ARM_CLIENT_ID }}","clientSecret":"${{ secrets.ARM_CLIENT_SECRET }}","subscriptionId":"${{ secrets.ARM_SUBSCRIPTION_ID }}","tenantId":"${{ secrets.ARM_TENANT_ID }}"}' - enable-AzPSSession: true - - - name: Set PAL - id: set_pal - continue-on-error: true - run: | - az extension add --name managementpartner - az managementpartner update --partner-id 6372669 || az managementpartner create --partner-id 6372669 - - - name: Terragrunt Install - id: terragrunt_install - working-directory: ./solution/DeploymentV2/terraform - run: | - brew install terragrunt - az keyvault network-rule add -g ${{ env.TF_VAR_resource_group_name }} --name ${{ env.keyVaultName }} --ip-address ${{ steps.ip.outputs.ipv4 }}/32 - az synapse workspace firewall-rule create --name AllowCICD --resource-group ${{ env.TF_VAR_resource_group_name }} --start-ip-address ${{ steps.ip.outputs.ipv4 }} --end-ip-address ${{ steps.ip.outputs.ipv4 }} --workspace-name ${{ env.synapseWorkspaceName }} - az storage account network-rule add --resource-group ${{ env.TF_VAR_resource_group_name }} --account-name ${{ env.datalakeName }} --ip-address ${{ steps.ip.outputs.ipv4 }} - - - name: Install Jsonnet - id: jsonnet-install - working-directory: ./solution/DeploymentV2/ - env: - TF_VAR_ip_address : ${{steps.ip.outputs.ipv4}} - run: | - wget https://github.com/google/go-jsonnet/releases/download/v0.17.0/jsonnet-go_0.17.0_linux_amd64.deb - sudo dpkg -i jsonnet-go_0.17.0_linux_amd64.deb - - - name: Deploy Solution - id: solution-deployment - working-directory: ./solution/DeploymentV2/ - shell: pwsh - env: - TF_VAR_ip_address : ${{steps.ip.outputs.ipv4}} - run: | - git update-index --chmod=+x ./Deploy.ps1 - ./Deploy.ps1 + enable-AzPSSession: true From 0985a165548b811bfcb3318a38412e9837e90694 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Fri, 22 Jul 2022 15:57:16 +0800 Subject: [PATCH 015/151] Ugrading Azure Storage Lib. --- solution/FunctionApp/FunctionApp/FunctionApp.csproj | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/solution/FunctionApp/FunctionApp/FunctionApp.csproj b/solution/FunctionApp/FunctionApp/FunctionApp.csproj index 32e631c3..5de65e0d 100644 --- a/solution/FunctionApp/FunctionApp/FunctionApp.csproj +++ b/solution/FunctionApp/FunctionApp/FunctionApp.csproj @@ -32,11 +32,11 @@ - + - + - + From 95bb7c32da505662e8ae70b72bb62bd8f4e2fd17 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Fri, 22 Jul 2022 15:58:06 +0800 Subject: [PATCH 016/151] Updating CICD --- solution/FunctionApp/FunctionApp/FunctionApp.csproj | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/solution/FunctionApp/FunctionApp/FunctionApp.csproj b/solution/FunctionApp/FunctionApp/FunctionApp.csproj index 32e631c3..7a5a0d1d 100644 --- a/solution/FunctionApp/FunctionApp/FunctionApp.csproj +++ b/solution/FunctionApp/FunctionApp/FunctionApp.csproj @@ -34,7 +34,7 @@ - + From 4f3607ea0d467bc8b39ee56a1e782dba8dce255c Mon Sep 17 00:00:00 2001 From: John Rampono Date: Fri, 22 Jul 2022 16:05:24 +0800 Subject: [PATCH 017/151] CICD Changes --- .github/workflows/continuous-delivery.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/continuous-delivery.yml b/.github/workflows/continuous-delivery.yml index fe4bae38..ef7fd4e1 100644 --- a/.github/workflows/continuous-delivery.yml +++ b/.github/workflows/continuous-delivery.yml @@ -98,7 +98,7 @@ jobs: TF_VAR_ip_address : ${{steps.ip.outputs.ipv4}} run: | git update-index --chmod=+x ./Deploy.ps1 - ./Deploy.ps1 + #PROD ENVIRONMENT deploy-to-prod: From e9711ecece2071b19d1c7b8833e2090aa8874bac Mon Sep 17 00:00:00 2001 From: John Rampono Date: Fri, 22 Jul 2022 16:16:08 +0800 Subject: [PATCH 018/151] CICD Changes --- .github/workflows/continuous-delivery.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.github/workflows/continuous-delivery.yml b/.github/workflows/continuous-delivery.yml index ef7fd4e1..fa77b864 100644 --- a/.github/workflows/continuous-delivery.yml +++ b/.github/workflows/continuous-delivery.yml @@ -77,6 +77,11 @@ jobs: working-directory: ./solution/DeploymentV2/terraform run: | brew install terragrunt + + - name: Open Firewalls for Agent + id: open_firewalls + working-directory: ./solution/DeploymentV2/terraform + run: | az keyvault network-rule add -g ${{ env.TF_VAR_resource_group_name }} --name ${{ env.keyVaultName }} --ip-address ${{ steps.ip.outputs.ipv4 }}/32 az synapse workspace firewall-rule create --name AllowCICD --resource-group ${{ env.TF_VAR_resource_group_name }} --start-ip-address ${{ steps.ip.outputs.ipv4 }} --end-ip-address ${{ steps.ip.outputs.ipv4 }} --workspace-name ${{ env.synapseWorkspaceName }} az storage account network-rule add --resource-group ${{ env.TF_VAR_resource_group_name }} --account-name ${{ env.datalakeName }} --ip-address ${{ steps.ip.outputs.ipv4 }} From d7921c014e225ee9d5e510135aaa49d01664cfa9 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Fri, 22 Jul 2022 16:39:51 +0800 Subject: [PATCH 019/151] CICD Changes --- .github/workflows/continuous-delivery.yml | 19 ++++------ solution/DeploymentV2/Deploy.ps1 | 44 ++++++++++++++++++----- 2 files changed, 42 insertions(+), 21 deletions(-) diff --git a/.github/workflows/continuous-delivery.yml b/.github/workflows/continuous-delivery.yml index fa77b864..a0748d71 100644 --- a/.github/workflows/continuous-delivery.yml +++ b/.github/workflows/continuous-delivery.yml @@ -6,8 +6,8 @@ on: branches: feature-1.0.4 jobs: - deploy-to-nonprod: - name: Deploy to NonProd Environment + deploy-to-env-one: + name: Deploy to Environment1 concurrency: terraform env: environmentName: development @@ -106,10 +106,10 @@ jobs: #PROD ENVIRONMENT - deploy-to-prod: - name: Deploy to Prod Environment + deploy-to-env-two: + name: Deploy to Environment Two concurrency: terraform - needs: [deploy-to-nonprod] + needs: [deploy-to-env-one] env: environmentName: production gitDeploy : true @@ -146,8 +146,8 @@ jobs: TF_VAR_adf_git_user_name : ${{secrets.GIT_USER_NAME}} TF_VAR_adf_git_email_address : ${{secrets.GIT_EMAIL_ADDRESS}} - - #TF_LOG : TRACE + #PROD ENVIRONMENT + #TF_LOG : TRACE environment: name: Prod runs-on: ubuntu-latest @@ -160,8 +160,3 @@ jobs: id: ip uses: haythem/public-ip@v1.2 - - name: Login via Az module - uses: azure/login@v1 - with: - creds: '{"clientId":"${{ secrets.ARM_CLIENT_ID }}","clientSecret":"${{ secrets.ARM_CLIENT_SECRET }}","subscriptionId":"${{ secrets.ARM_SUBSCRIPTION_ID }}","tenantId":"${{ secrets.ARM_TENANT_ID }}"}' - enable-AzPSSession: true diff --git a/solution/DeploymentV2/Deploy.ps1 b/solution/DeploymentV2/Deploy.ps1 index a22731ac..e5bbdda9 100644 --- a/solution/DeploymentV2/Deploy.ps1 +++ b/solution/DeploymentV2/Deploy.ps1 @@ -21,6 +21,32 @@ # You can run this script multiple times if needed. #---------------------------------------------------------------------------------------------------------------- +if ($null -eq $Env:GITHUB_ENV) + { + [Environment]::SetEnvironmentVariable("GITHUB_ENV",".\bin\GitEnv.txt") + $FileNameOnly = Split-Path $env:GITHUB_ENV -leaf + $PathOnly = Split-Path $env:GITHUB_ENV + if ((Test-Path $env:GITHUB_ENV)) + { + # Remove-Item -Path $env:GITHUB_ENV + } + else + { + + New-Item -Path $PathOnly -Name $FileNameOnly -type "file" -value "" + } + +} + +function PersistEnvVariable($Name, $Value) +{ + Write-Debug "Writing $Name to env file" + echo "$Name=$Value" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append + #Also Push Variables to the Session Env Variables for local testing + [Environment]::SetEnvironmentVariable($Name, "$Value") + +} + #------------------------------------------------------------------------------------------------------------ @@ -41,7 +67,7 @@ $AddSpecificUserAsWebAppAdmin = $env:AdsGf_AddSpecificUserAsWebAppAdmin #------------------------------------------------------------------------------------------------------------ # Main Terraform #------------------------------------------------------------------------------------------------------------ -Invoke-Expression ./Deploy_1_Infra0.ps1 +#Invoke-Expression ./Deploy_1_Infra0.ps1 #------------------------------------------------------------------------------------------------------------ @@ -91,14 +117,14 @@ Invoke-Expression ./Deploy_1_Infra0.ps1 #------------------------------------------------------------------------------------------------------------ # Run Each SubModule #------------------------------------------------------------------------------------------------------------ -Invoke-Expression ./Deploy_3_Infra1.ps1 -Invoke-Expression ./Deploy_4_PrivateLinks.ps1 -Invoke-Expression ./Deploy_5_WebApp.ps1 -Invoke-Expression ./Deploy_6_FuncApp.ps1 -Invoke-Expression ./Deploy_7_MetadataDB.ps1 -Invoke-Expression ./Deploy_8_SQLLogins.ps1 -Invoke-Expression ./Deploy_9_DataFactory.ps1 -Invoke-Expression ./Deploy_10_SampleFiles.ps1 +#Invoke-Expression ./Deploy_3_Infra1.ps1 +#Invoke-Expression ./Deploy_4_PrivateLinks.ps1 +#Invoke-Expression ./Deploy_5_WebApp.ps1 +#Invoke-Expression ./Deploy_6_FuncApp.ps1 +#Invoke-Expression ./Deploy_7_MetadataDB.ps1 +#Invoke-Expression ./Deploy_8_SQLLogins.ps1 +#Invoke-Expression ./Deploy_9_DataFactory.ps1 +#Invoke-Expression ./Deploy_10_SampleFiles.ps1 #---------------------------------------------------------------------------------------------------------------- # Set up Purview From bf382c0388710e1dacb044fc005ed9825cecc14c Mon Sep 17 00:00:00 2001 From: John Rampono Date: Fri, 22 Jul 2022 16:49:47 +0800 Subject: [PATCH 020/151] CICD Changes --- .github/workflows/continuous-delivery.yml | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/.github/workflows/continuous-delivery.yml b/.github/workflows/continuous-delivery.yml index a0748d71..bf5a713f 100644 --- a/.github/workflows/continuous-delivery.yml +++ b/.github/workflows/continuous-delivery.yml @@ -7,13 +7,13 @@ on: jobs: deploy-to-env-one: - name: Deploy to Environment1 + name: Deploy to Environment One concurrency: terraform env: environmentName: development gitDeploy : true skipTerraformDeployment: false - specificUserIdForWebAppAdmin: 45672f90-82f9-4108-b389-da7139e938a1 + specificUserIdForWebAppAdmin: ${{ secrets.WEB_APP_ADMIN_USER }} keyVaultName: ${{ secrets.ARM_KEYVAULT_NAME }} synapseWorkspaceName: ${{ secrets.ARM_SYNAPSE_WORKSPACE_NAME }} datalakeName: ${{ secrets.ARM_DATALAKE_NAME }} @@ -102,7 +102,8 @@ jobs: env: TF_VAR_ip_address : ${{steps.ip.outputs.ipv4}} run: | - git update-index --chmod=+x ./Deploy.ps1 + git update-index --chmod=+x ./Deploy.ps1 + ./Deploy.ps1 #PROD ENVIRONMENT @@ -114,13 +115,13 @@ jobs: environmentName: production gitDeploy : true skipTerraformDeployment: false - specificUserIdForWebAppAdmin: 45672f90-82f9-4108-b389-da7139e938a1 - # Required for updating firewall for runner + specificUserIdForWebAppAdmin: ${{ secrets.WEB_APP_ADMIN_USER }} keyVaultName: ${{ secrets.ARM_KEYVAULT_NAME }} synapseWorkspaceName: ${{ secrets.ARM_SYNAPSE_WORKSPACE_NAME }} datalakeName: ${{ secrets.ARM_DATALAKE_NAME }} # Required for Terraform ARM_CLIENT_ID: ${{ secrets.ARM_CLIENT_ID }} + ARM_PAL_PARTNER_ID: ${{ secrets.ARM_PAL_PARTNER_ID }} ARM_CLIENT_SECRET: ${{ secrets.ARM_CLIENT_SECRET }} ARM_SUBSCRIPTION_ID: ${{ secrets.ARM_SUBSCRIPTION_ID }} ARM_TENANT_ID: ${{ secrets.ARM_TENANT_ID }} From d4f1a2e83a13246ce4b464c5dcb425929ef97a5d Mon Sep 17 00:00:00 2001 From: John Rampono Date: Fri, 22 Jul 2022 16:57:00 +0800 Subject: [PATCH 021/151] CICD Changes --- solution/DeploymentV2/Deploy.ps1 | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/solution/DeploymentV2/Deploy.ps1 b/solution/DeploymentV2/Deploy.ps1 index e5bbdda9..a3a79d3e 100644 --- a/solution/DeploymentV2/Deploy.ps1 +++ b/solution/DeploymentV2/Deploy.ps1 @@ -75,8 +75,9 @@ $AddSpecificUserAsWebAppAdmin = $env:AdsGf_AddSpecificUserAsWebAppAdmin #------------------------------------------------------------------------------------------------------------ Set-Location "./terraform" Write-Host "Reading Terraform Outputs" + terragrunt init --terragrunt-config vars/$environmentName/terragrunt.hcl -reconfigure Import-Module .\..\GatherOutputsFromTerraform.psm1 -force - $tout = GatherOutputsFromTerraform + $tout = GatherOutputsFromTerraform $outputs = terragrunt output -json --terragrunt-config ./vars/$environmentName/terragrunt.hcl | ConvertFrom-Json $subscription_id =$outputs.subscription_id.value $resource_group_name =$outputs.resource_group_name.value From d5e956cdd6703e51653a789734b2cb6aaa1321dc Mon Sep 17 00:00:00 2001 From: John Rampono Date: Fri, 22 Jul 2022 17:12:47 +0800 Subject: [PATCH 022/151] CICD Changes --- solution/DeploymentV2/Deploy.ps1 | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/solution/DeploymentV2/Deploy.ps1 b/solution/DeploymentV2/Deploy.ps1 index a3a79d3e..e9779d5e 100644 --- a/solution/DeploymentV2/Deploy.ps1 +++ b/solution/DeploymentV2/Deploy.ps1 @@ -52,6 +52,7 @@ function PersistEnvVariable($Name, $Value) #------------------------------------------------------------------------------------------------------------ # Preparation #Mandatory #------------------------------------------------------------------------------------------------------------ + $deploymentFolderPath = (Get-Location).Path $gitDeploy = ([System.Environment]::GetEnvironmentVariable('gitDeploy') -eq 'true') $skipTerraformDeployment = ([System.Environment]::GetEnvironmentVariable('skipTerraformDeployment') -eq 'true') @@ -64,10 +65,19 @@ $env:TF_VAR_ip_address = $myIp $AddSpecificUserAsWebAppAdmin = $env:AdsGf_AddSpecificUserAsWebAppAdmin +PersistEnvVariable $Name = "deploymentFolderPath" $Value = (Get-Location).Path +PersistEnvVariable $Name = "gitDeploy" $Value = ([System.Environment]::GetEnvironmentVariable('gitDeploy') -eq 'true') +PersistEnvVariable $Name = "skipTerraformDeployment" $Value = ([System.Environment]::GetEnvironmentVariable('skipTerraformDeployment') -eq 'true') +PersistEnvVariable $Name = "environmentName" $Value = [System.Environment]::GetEnvironmentVariable('environmentName') +PersistEnvVariable $Name = "myIp" $Value = (Invoke-WebRequest ifconfig.me/ip).Content +PersistEnvVariable $Name = "TF_VAR_ip_address" $Value = (Invoke-WebRequest ifconfig.me/ip).Content +PersistEnvVariable $Name = "AddSpecificUserAsWebAppAdmin" $Value = [System.Environment]::GetEnvironmentVariable('AddSpecificUserAsWebAppAdmin') + + #------------------------------------------------------------------------------------------------------------ # Main Terraform #------------------------------------------------------------------------------------------------------------ -#Invoke-Expression ./Deploy_1_Infra0.ps1 +Invoke-Expression ./Deploy_1_Infra0.ps1 #------------------------------------------------------------------------------------------------------------ @@ -75,7 +85,8 @@ $AddSpecificUserAsWebAppAdmin = $env:AdsGf_AddSpecificUserAsWebAppAdmin #------------------------------------------------------------------------------------------------------------ Set-Location "./terraform" Write-Host "Reading Terraform Outputs" - terragrunt init --terragrunt-config vars/$environmentName/terragrunt.hcl -reconfigure + #Run Init Just in Case we skipped the Infra Section + $init = terragrunt init --terragrunt-config vars/$environmentName/terragrunt.hcl -reconfigure Import-Module .\..\GatherOutputsFromTerraform.psm1 -force $tout = GatherOutputsFromTerraform $outputs = terragrunt output -json --terragrunt-config ./vars/$environmentName/terragrunt.hcl | ConvertFrom-Json From 77ae4b18f8d6fb615a38497aa5261533208478f0 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Fri, 22 Jul 2022 17:17:14 +0800 Subject: [PATCH 023/151] CICD Changes --- solution/DeploymentV2/Deploy.ps1 | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/solution/DeploymentV2/Deploy.ps1 b/solution/DeploymentV2/Deploy.ps1 index e9779d5e..49ae83ce 100644 --- a/solution/DeploymentV2/Deploy.ps1 +++ b/solution/DeploymentV2/Deploy.ps1 @@ -65,13 +65,13 @@ $env:TF_VAR_ip_address = $myIp $AddSpecificUserAsWebAppAdmin = $env:AdsGf_AddSpecificUserAsWebAppAdmin -PersistEnvVariable $Name = "deploymentFolderPath" $Value = (Get-Location).Path -PersistEnvVariable $Name = "gitDeploy" $Value = ([System.Environment]::GetEnvironmentVariable('gitDeploy') -eq 'true') -PersistEnvVariable $Name = "skipTerraformDeployment" $Value = ([System.Environment]::GetEnvironmentVariable('skipTerraformDeployment') -eq 'true') -PersistEnvVariable $Name = "environmentName" $Value = [System.Environment]::GetEnvironmentVariable('environmentName') -PersistEnvVariable $Name = "myIp" $Value = (Invoke-WebRequest ifconfig.me/ip).Content -PersistEnvVariable $Name = "TF_VAR_ip_address" $Value = (Invoke-WebRequest ifconfig.me/ip).Content -PersistEnvVariable $Name = "AddSpecificUserAsWebAppAdmin" $Value = [System.Environment]::GetEnvironmentVariable('AddSpecificUserAsWebAppAdmin') +#PersistEnvVariable $Name = "deploymentFolderPath" $Value = (Get-Location).Path +#PersistEnvVariable $Name = "gitDeploy" $Value = ([System.Environment]::GetEnvironmentVariable('gitDeploy') -eq 'true') +#PersistEnvVariable $Name = "skipTerraformDeployment" $Value = ([System.Environment]::GetEnvironmentVariable('skipTerraformDeployment') -eq 'true') +#PersistEnvVariable $Name = "environmentName" $Value = [System.Environment]::GetEnvironmentVariable('environmentName') +#PersistEnvVariable $Name = "myIp" $Value = (Invoke-WebRequest ifconfig.me/ip).Content +#PersistEnvVariable $Name = "TF_VAR_ip_address" $Value = (Invoke-WebRequest ifconfig.me/ip).Content +#PersistEnvVariable $Name = "AddSpecificUserAsWebAppAdmin" $Value = [System.Environment]::GetEnvironmentVariable('AdsGf_AddSpecificUserAsWebAppAdmin') #------------------------------------------------------------------------------------------------------------ From b8a97cbd88595e02e1205c13524da6c279de34a7 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Fri, 22 Jul 2022 17:23:31 +0800 Subject: [PATCH 024/151] CICD Testing --- .github/workflows/continuous-delivery.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/continuous-delivery.yml b/.github/workflows/continuous-delivery.yml index bf5a713f..2c9973f8 100644 --- a/.github/workflows/continuous-delivery.yml +++ b/.github/workflows/continuous-delivery.yml @@ -10,7 +10,7 @@ jobs: name: Deploy to Environment One concurrency: terraform env: - environmentName: development + environmentName: local gitDeploy : true skipTerraformDeployment: false specificUserIdForWebAppAdmin: ${{ secrets.WEB_APP_ADMIN_USER }} From e4dd7f9506f3356ab43e158e8fc7577c5c2e75a5 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Fri, 22 Jul 2022 18:45:16 +0800 Subject: [PATCH 025/151] CICD Testing --- .github/workflows/continuous-delivery.yml | 2 +- solution/DeploymentV2/terraform/readme.md | 12 +++++ .../terraform/vars/local/terragrunt.hcl | 6 +-- .../terraform/vars/staging/terragrunt.hcl | 49 ++++++++----------- 4 files changed, 37 insertions(+), 32 deletions(-) diff --git a/.github/workflows/continuous-delivery.yml b/.github/workflows/continuous-delivery.yml index 2c9973f8..e69a5495 100644 --- a/.github/workflows/continuous-delivery.yml +++ b/.github/workflows/continuous-delivery.yml @@ -10,7 +10,7 @@ jobs: name: Deploy to Environment One concurrency: terraform env: - environmentName: local + environmentName: local #This determines the location of the .hcl file that will be used gitDeploy : true skipTerraformDeployment: false specificUserIdForWebAppAdmin: ${{ secrets.WEB_APP_ADMIN_USER }} diff --git a/solution/DeploymentV2/terraform/readme.md b/solution/DeploymentV2/terraform/readme.md index d0b47c70..55bf47bb 100644 --- a/solution/DeploymentV2/terraform/readme.md +++ b/solution/DeploymentV2/terraform/readme.md @@ -70,6 +70,18 @@ cd infrastructure terragrunt init --terragrunt-config vars/development/terragrunt.hcl ``` +## If you need to import existing resources + + +1. Grant you service principal rights to the resources. + +eg. az ad app owner add --id db2c4f38-1566-41af-a1d4-495cd59097cc --owner-object-id 4c732d19-4076-4a76-87f3-6fbfd77f007d + +2. Then import resources into state + +terraform import azuread_application.web_reg[0] 497fb46f-3d88-4445-b9e8-7065970e3b40 +terraform import azuread_application.function_app_reg[0] db2c4f38-1566-41af-a1d4-495cd59097cc + # Required Azure resource providers Microsoft.Storage diff --git a/solution/DeploymentV2/terraform/vars/local/terragrunt.hcl b/solution/DeploymentV2/terraform/vars/local/terragrunt.hcl index 77590550..2e42becb 100644 --- a/solution/DeploymentV2/terraform/vars/local/terragrunt.hcl +++ b/solution/DeploymentV2/terraform/vars/local/terragrunt.hcl @@ -4,7 +4,7 @@ inputs = { tenant_id = "72f988bf-86f1-41af-91ab-2d7cd011db47" # This is the Azure AD tenant ID subscription_id = "035a1364-f00d-48e2-b582-4fe125905ee3" # The azure subscription id to deploy to resource_location = "Australia East" # The location of the resources - resource_group_name = "adt1" # The resource group all resources will be deployed to + resource_group_name = "gft2" # The resource group all resources will be deployed to owner_tag = "Contoso" # Owner tag value for Azure resources environment_tag = "dev" # This is used on Azure tags as well as all resource names ip_address = "144.138.148.220" # This is the ip address of the agent/current IP. Used to create firewall exemptions. @@ -17,15 +17,15 @@ inputs = { deploy_purview = false deploy_synapse = true deploy_metadata_database = true - is_vnet_isolated = true + is_vnet_isolated = false publish_web_app = true publish_function_app = true publish_sample_files = true - publish_sif_database = true publish_metadata_database = true configure_networking = true publish_datafactory_pipelines = true publish_web_app_addcurrentuserasadmin = true deploy_selfhostedsql = false is_onprem_datafactory_ir_registered = false + publish_sif_database = true } diff --git a/solution/DeploymentV2/terraform/vars/staging/terragrunt.hcl b/solution/DeploymentV2/terraform/vars/staging/terragrunt.hcl index 10614289..448ac76e 100644 --- a/solution/DeploymentV2/terraform/vars/staging/terragrunt.hcl +++ b/solution/DeploymentV2/terraform/vars/staging/terragrunt.hcl @@ -7,8 +7,8 @@ remote_state { config = { # You need to update the resource group and storage account here. # You should have created these with the Prepare.ps1 script. - resource_group_name = "lockboxdev02" - storage_account_name = "lockboxdev02state" + resource_group_name = "gft2" + storage_account_name = "gft2state" container_name = "tstate" key = "terraform.tfstate" } @@ -18,40 +18,33 @@ remote_state { # If you are deploying using pipelines, these can be overridden from environment variables # using TF_VAR_variablename inputs = { - prefix = "ark" # All azure resources will be prefixed with this - domain = "arkahna.io" # Used when configuring AAD config for Azure functions - tenant_id = "0fee3d31-b963-4a1c-8f4a-ca367205aa65" # This is the Azure AD tenant ID - subscription_id = "687fe1ae-a520-4f86-b921-a80664c40f9b" # The azure subscription id to deploy to + prefix = "ads" # All azure resources will be prefixed with this + domain = "microsoft.com" # Used when configuring AAD config for Azure functions + tenant_id = "72f988bf-86f1-41af-91ab-2d7cd011db47" # This is the Azure AD tenant ID + subscription_id = "035a1364-f00d-48e2-b582-4fe125905ee3" # The azure subscription id to deploy to resource_location = "Australia East" # The location of the resources - resource_group_name = "lockboxdev02" # The resource group all resources will be deployed to - owner_tag = "Arkahna" # Owner tag value for Azure resources + resource_group_name = "gft2" # The resource group all resources will be deployed to + owner_tag = "Contoso" # Owner tag value for Azure resources environment_tag = "stg" # This is used on Azure tags as well as all resource names - ip_address = "101.179.198.119" # This is the ip address of the agent/current IP. Used to create firewall exemptions. + ip_address = "144.138.148.220" # This is the ip address of the agent/current IP. Used to create firewall exemptions. deploy_web_app = true deploy_function_app = true deploy_custom_terraform = false # This is whether the infrastructure located in the terraform_custom folder is deployed or not. - deploy_sentinel = false - deploy_purview = false + deploy_app_service_plan = true + deploy_data_factory = true + deploy_sentinel = true + deploy_purview = false deploy_synapse = true deploy_metadata_database = true - is_vnet_isolated = true - publish_web_app = false + is_vnet_isolated = false + publish_web_app = true publish_function_app = true - publish_sample_files = false - publish_metadata_database = false - publish_sql_logins = false - configure_networking = true - publish_datafactory_pipelines = false + publish_sample_files = true + publish_metadata_database = true + configure_networking = false + publish_datafactory_pipelines = true publish_web_app_addcurrentuserasadmin = true - deploy_synapse_sqlpool = false deploy_selfhostedsql = false - is_onprem_datafactory_ir_registered = true - deploy_h2o-ai = false - synapse_git_toggle_integration = true - synapse_git_repository_owner = "h-sha" - synapse_git_repository_name = "testLockbox" - synapse_git_repository_branch_name = "lockboxdev02" - synapse_git_repository_root_folder = "/Synapse" - synapse_git_use_pat = false - synapse_git_pat = "" + is_onprem_datafactory_ir_registered = false + publish_sif_database = true } From fc718877280b807ec3481e6141eb1686eaa32092 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Fri, 22 Jul 2022 18:52:48 +0800 Subject: [PATCH 026/151] modified: .github/workflows/continuous-delivery.yml --- .github/workflows/continuous-delivery.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/continuous-delivery.yml b/.github/workflows/continuous-delivery.yml index e69a5495..98c45db9 100644 --- a/.github/workflows/continuous-delivery.yml +++ b/.github/workflows/continuous-delivery.yml @@ -10,7 +10,8 @@ jobs: name: Deploy to Environment One concurrency: terraform env: - environmentName: local #This determines the location of the .hcl file that will be used + # This determines the location of the .hcl file that will be used + environmentName: local gitDeploy : true skipTerraformDeployment: false specificUserIdForWebAppAdmin: ${{ secrets.WEB_APP_ADMIN_USER }} From 1ae0389703d512f478088beb9a97f099499d2961 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Fri, 22 Jul 2022 18:55:17 +0800 Subject: [PATCH 027/151] CICDTesting --- .github/workflows/continuous-delivery.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/continuous-delivery.yml b/.github/workflows/continuous-delivery.yml index 98c45db9..3dc2ad3f 100644 --- a/.github/workflows/continuous-delivery.yml +++ b/.github/workflows/continuous-delivery.yml @@ -11,7 +11,7 @@ jobs: concurrency: terraform env: # This determines the location of the .hcl file that will be used - environmentName: local + environmentName: staging gitDeploy : true skipTerraformDeployment: false specificUserIdForWebAppAdmin: ${{ secrets.WEB_APP_ADMIN_USER }} From 3e0555db0d11f525b1eb624cbefcb39b4ae0082b Mon Sep 17 00:00:00 2001 From: John Rampono Date: Fri, 22 Jul 2022 19:09:02 +0800 Subject: [PATCH 028/151] CICD Changes --- .github/workflows/continuous-delivery.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/workflows/continuous-delivery.yml b/.github/workflows/continuous-delivery.yml index 3dc2ad3f..0c44f429 100644 --- a/.github/workflows/continuous-delivery.yml +++ b/.github/workflows/continuous-delivery.yml @@ -52,6 +52,12 @@ jobs: runs-on: ubuntu-latest steps: + - name: PrintInfo + run: | + echo "Deploying to Resource Group: ${{ env.TF_VAR_resource_group_name }} " + echo "echo Hcl file name: ${{ env.environmentName}} " + echo "echo Hcl file name: ${{ env.ARM_CLIENT_ID}} " + - name: Checkout uses: actions/checkout@v3.0.0 From b3253d692d67dcb2fdbe51e9025a175f22fd1fbd Mon Sep 17 00:00:00 2001 From: John Rampono Date: Fri, 22 Jul 2022 19:13:46 +0800 Subject: [PATCH 029/151] CICD --- .github/workflows/continuous-delivery.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/continuous-delivery.yml b/.github/workflows/continuous-delivery.yml index 0c44f429..3767699a 100644 --- a/.github/workflows/continuous-delivery.yml +++ b/.github/workflows/continuous-delivery.yml @@ -48,7 +48,7 @@ jobs: #TF_LOG : TRACE environment: - name: development + name: staging runs-on: ubuntu-latest steps: From 86bd7839f77bc44b00ae8b1ca05977341d4aa192 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Fri, 22 Jul 2022 19:18:29 +0800 Subject: [PATCH 030/151] CICD --- .github/workflows/continuous-delivery.yml | 4 ++-- solution/DeploymentV2/Deploy.ps1 | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/continuous-delivery.yml b/.github/workflows/continuous-delivery.yml index 3767699a..7a1dfa2e 100644 --- a/.github/workflows/continuous-delivery.yml +++ b/.github/workflows/continuous-delivery.yml @@ -11,7 +11,7 @@ jobs: concurrency: terraform env: # This determines the location of the .hcl file that will be used - environmentName: staging + environmentName: local gitDeploy : true skipTerraformDeployment: false specificUserIdForWebAppAdmin: ${{ secrets.WEB_APP_ADMIN_USER }} @@ -48,7 +48,7 @@ jobs: #TF_LOG : TRACE environment: - name: staging + name: development runs-on: ubuntu-latest steps: diff --git a/solution/DeploymentV2/Deploy.ps1 b/solution/DeploymentV2/Deploy.ps1 index 49ae83ce..bf982ebc 100644 --- a/solution/DeploymentV2/Deploy.ps1 +++ b/solution/DeploymentV2/Deploy.ps1 @@ -77,7 +77,7 @@ $AddSpecificUserAsWebAppAdmin = $env:AdsGf_AddSpecificUserAsWebAppAdmin #------------------------------------------------------------------------------------------------------------ # Main Terraform #------------------------------------------------------------------------------------------------------------ -Invoke-Expression ./Deploy_1_Infra0.ps1 +#Invoke-Expression ./Deploy_1_Infra0.ps1 #------------------------------------------------------------------------------------------------------------ From 86ae7ffe3f606ef3cb96505e99eb66a5ae2af1e0 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Fri, 22 Jul 2022 19:29:43 +0800 Subject: [PATCH 031/151] CICD Test --- .github/workflows/continuous-delivery.yml | 15 ++------------- 1 file changed, 2 insertions(+), 13 deletions(-) diff --git a/.github/workflows/continuous-delivery.yml b/.github/workflows/continuous-delivery.yml index 7a1dfa2e..c7c219cc 100644 --- a/.github/workflows/continuous-delivery.yml +++ b/.github/workflows/continuous-delivery.yml @@ -10,8 +10,7 @@ jobs: name: Deploy to Environment One concurrency: terraform env: - # This determines the location of the .hcl file that will be used - environmentName: local + environmentName: local gitDeploy : true skipTerraformDeployment: false specificUserIdForWebAppAdmin: ${{ secrets.WEB_APP_ADMIN_USER }} @@ -52,12 +51,6 @@ jobs: runs-on: ubuntu-latest steps: - - name: PrintInfo - run: | - echo "Deploying to Resource Group: ${{ env.TF_VAR_resource_group_name }} " - echo "echo Hcl file name: ${{ env.environmentName}} " - echo "echo Hcl file name: ${{ env.ARM_CLIENT_ID}} " - - name: Checkout uses: actions/checkout@v3.0.0 @@ -77,8 +70,6 @@ jobs: run: | az extension add --name managementpartner az managementpartner update --partner-id ${{ secrets.ARM_PAL_PARTNER_ID }} || az managementpartner create --partner-id ${{ secrets.ARM_PAL_PARTNER_ID }} - - - name: Terragrunt Install id: terragrunt_install working-directory: ./solution/DeploymentV2/terraform @@ -92,7 +83,6 @@ jobs: az keyvault network-rule add -g ${{ env.TF_VAR_resource_group_name }} --name ${{ env.keyVaultName }} --ip-address ${{ steps.ip.outputs.ipv4 }}/32 az synapse workspace firewall-rule create --name AllowCICD --resource-group ${{ env.TF_VAR_resource_group_name }} --start-ip-address ${{ steps.ip.outputs.ipv4 }} --end-ip-address ${{ steps.ip.outputs.ipv4 }} --workspace-name ${{ env.synapseWorkspaceName }} az storage account network-rule add --resource-group ${{ env.TF_VAR_resource_group_name }} --account-name ${{ env.datalakeName }} --ip-address ${{ steps.ip.outputs.ipv4 }} - - name: Install Jsonnet id: jsonnet-install working-directory: ./solution/DeploymentV2/ @@ -166,5 +156,4 @@ jobs: - name: Get public IP id: ip - uses: haythem/public-ip@v1.2 - + uses: haythem/public-ip@v1.2 \ No newline at end of file From b3fe734e136bcb8b42a3aa9e684db97a93e3c0d3 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Fri, 22 Jul 2022 19:55:24 +0800 Subject: [PATCH 032/151] CICD --- .github/workflows/continuous-delivery.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/continuous-delivery.yml b/.github/workflows/continuous-delivery.yml index c7c219cc..42860ae9 100644 --- a/.github/workflows/continuous-delivery.yml +++ b/.github/workflows/continuous-delivery.yml @@ -9,8 +9,10 @@ jobs: deploy-to-env-one: name: Deploy to Environment One concurrency: terraform + environment: + name: development env: - environmentName: local + environmentName: staging gitDeploy : true skipTerraformDeployment: false specificUserIdForWebAppAdmin: ${{ secrets.WEB_APP_ADMIN_USER }} @@ -45,9 +47,7 @@ jobs: TF_VAR_adf_git_user_name : ${{secrets.GIT_USER_NAME}} TF_VAR_adf_git_email_address : ${{secrets.GIT_EMAIL_ADDRESS}} - #TF_LOG : TRACE - environment: - name: development + #TF_LOG : TRACE runs-on: ubuntu-latest steps: From f13fdf02e10742ab247b189ab270b02f1988bad6 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Fri, 22 Jul 2022 20:00:51 +0800 Subject: [PATCH 033/151] CICD --- .github/workflows/continuous-delivery.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/continuous-delivery.yml b/.github/workflows/continuous-delivery.yml index 42860ae9..0054d4e7 100644 --- a/.github/workflows/continuous-delivery.yml +++ b/.github/workflows/continuous-delivery.yml @@ -12,7 +12,7 @@ jobs: environment: name: development env: - environmentName: staging + environmentName: development gitDeploy : true skipTerraformDeployment: false specificUserIdForWebAppAdmin: ${{ secrets.WEB_APP_ADMIN_USER }} From 2d3acf70f510fe03e417ea6ec4aa3c4f2b930e84 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Fri, 22 Jul 2022 20:03:36 +0800 Subject: [PATCH 034/151] CICD --- .github/workflows/continuous-delivery.yml | 22 ++++++++++++---------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/.github/workflows/continuous-delivery.yml b/.github/workflows/continuous-delivery.yml index 0054d4e7..f089a795 100644 --- a/.github/workflows/continuous-delivery.yml +++ b/.github/workflows/continuous-delivery.yml @@ -7,15 +7,13 @@ on: jobs: deploy-to-env-one: - name: Deploy to Environment One + name: Deploy to Environment1 concurrency: terraform - environment: - name: development env: environmentName: development gitDeploy : true skipTerraformDeployment: false - specificUserIdForWebAppAdmin: ${{ secrets.WEB_APP_ADMIN_USER }} + specificUserIdForWebAppAdmin: 45672f90-82f9-4108-b389-da7139e938a1 keyVaultName: ${{ secrets.ARM_KEYVAULT_NAME }} synapseWorkspaceName: ${{ secrets.ARM_SYNAPSE_WORKSPACE_NAME }} datalakeName: ${{ secrets.ARM_DATALAKE_NAME }} @@ -47,7 +45,9 @@ jobs: TF_VAR_adf_git_user_name : ${{secrets.GIT_USER_NAME}} TF_VAR_adf_git_email_address : ${{secrets.GIT_EMAIL_ADDRESS}} - #TF_LOG : TRACE + #TF_LOG : TRACE + environment: + name: development runs-on: ubuntu-latest steps: @@ -70,6 +70,8 @@ jobs: run: | az extension add --name managementpartner az managementpartner update --partner-id ${{ secrets.ARM_PAL_PARTNER_ID }} || az managementpartner create --partner-id ${{ secrets.ARM_PAL_PARTNER_ID }} + + - name: Terragrunt Install id: terragrunt_install working-directory: ./solution/DeploymentV2/terraform @@ -83,6 +85,7 @@ jobs: az keyvault network-rule add -g ${{ env.TF_VAR_resource_group_name }} --name ${{ env.keyVaultName }} --ip-address ${{ steps.ip.outputs.ipv4 }}/32 az synapse workspace firewall-rule create --name AllowCICD --resource-group ${{ env.TF_VAR_resource_group_name }} --start-ip-address ${{ steps.ip.outputs.ipv4 }} --end-ip-address ${{ steps.ip.outputs.ipv4 }} --workspace-name ${{ env.synapseWorkspaceName }} az storage account network-rule add --resource-group ${{ env.TF_VAR_resource_group_name }} --account-name ${{ env.datalakeName }} --ip-address ${{ steps.ip.outputs.ipv4 }} + - name: Install Jsonnet id: jsonnet-install working-directory: ./solution/DeploymentV2/ @@ -99,8 +102,7 @@ jobs: env: TF_VAR_ip_address : ${{steps.ip.outputs.ipv4}} run: | - git update-index --chmod=+x ./Deploy.ps1 - ./Deploy.ps1 + git update-index --chmod=+x ./Deploy.ps1 #PROD ENVIRONMENT @@ -112,13 +114,13 @@ jobs: environmentName: production gitDeploy : true skipTerraformDeployment: false - specificUserIdForWebAppAdmin: ${{ secrets.WEB_APP_ADMIN_USER }} + specificUserIdForWebAppAdmin: 45672f90-82f9-4108-b389-da7139e938a1 + # Required for updating firewall for runner keyVaultName: ${{ secrets.ARM_KEYVAULT_NAME }} synapseWorkspaceName: ${{ secrets.ARM_SYNAPSE_WORKSPACE_NAME }} datalakeName: ${{ secrets.ARM_DATALAKE_NAME }} # Required for Terraform ARM_CLIENT_ID: ${{ secrets.ARM_CLIENT_ID }} - ARM_PAL_PARTNER_ID: ${{ secrets.ARM_PAL_PARTNER_ID }} ARM_CLIENT_SECRET: ${{ secrets.ARM_CLIENT_SECRET }} ARM_SUBSCRIPTION_ID: ${{ secrets.ARM_SUBSCRIPTION_ID }} ARM_TENANT_ID: ${{ secrets.ARM_TENANT_ID }} @@ -156,4 +158,4 @@ jobs: - name: Get public IP id: ip - uses: haythem/public-ip@v1.2 \ No newline at end of file + uses: haythem/public-ip@v1.2 From 25899985015ce7775cef420b66872440919077a1 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Fri, 22 Jul 2022 20:23:11 +0800 Subject: [PATCH 035/151] modified: .github/workflows/continuous-delivery.yml modified: solution/DeploymentV2/terraform/readme.md --- .github/workflows/continuous-delivery.yml | 19 +++++++++++++------ solution/DeploymentV2/terraform/readme.md | 2 +- 2 files changed, 14 insertions(+), 7 deletions(-) diff --git a/.github/workflows/continuous-delivery.yml b/.github/workflows/continuous-delivery.yml index f089a795..9cc8860d 100644 --- a/.github/workflows/continuous-delivery.yml +++ b/.github/workflows/continuous-delivery.yml @@ -7,13 +7,14 @@ on: jobs: deploy-to-env-one: - name: Deploy to Environment1 + name: Deploy to Environment One concurrency: terraform env: - environmentName: development + # This determines the location of the .hcl file that will be used + environmentName: staging gitDeploy : true skipTerraformDeployment: false - specificUserIdForWebAppAdmin: 45672f90-82f9-4108-b389-da7139e938a1 + specificUserIdForWebAppAdmin: ${{ secrets.WEB_APP_ADMIN_USER }} keyVaultName: ${{ secrets.ARM_KEYVAULT_NAME }} synapseWorkspaceName: ${{ secrets.ARM_SYNAPSE_WORKSPACE_NAME }} datalakeName: ${{ secrets.ARM_DATALAKE_NAME }} @@ -51,6 +52,11 @@ jobs: runs-on: ubuntu-latest steps: + - name: PrintInfo + run: | + echo "Deploying to Resource Group: ${{ env.TF_VAR_resource_group_name }} " + echo "echo Hcl file name: ${{ env.environmentName}} " + - name: Checkout uses: actions/checkout@v3.0.0 @@ -102,7 +108,8 @@ jobs: env: TF_VAR_ip_address : ${{steps.ip.outputs.ipv4}} run: | - git update-index --chmod=+x ./Deploy.ps1 + git update-index --chmod=+x ./Deploy.ps1 + ./Deploy.ps1 #PROD ENVIRONMENT @@ -114,13 +121,13 @@ jobs: environmentName: production gitDeploy : true skipTerraformDeployment: false - specificUserIdForWebAppAdmin: 45672f90-82f9-4108-b389-da7139e938a1 - # Required for updating firewall for runner + specificUserIdForWebAppAdmin: ${{ secrets.WEB_APP_ADMIN_USER }} keyVaultName: ${{ secrets.ARM_KEYVAULT_NAME }} synapseWorkspaceName: ${{ secrets.ARM_SYNAPSE_WORKSPACE_NAME }} datalakeName: ${{ secrets.ARM_DATALAKE_NAME }} # Required for Terraform ARM_CLIENT_ID: ${{ secrets.ARM_CLIENT_ID }} + ARM_PAL_PARTNER_ID: ${{ secrets.ARM_PAL_PARTNER_ID }} ARM_CLIENT_SECRET: ${{ secrets.ARM_CLIENT_SECRET }} ARM_SUBSCRIPTION_ID: ${{ secrets.ARM_SUBSCRIPTION_ID }} ARM_TENANT_ID: ${{ secrets.ARM_TENANT_ID }} diff --git a/solution/DeploymentV2/terraform/readme.md b/solution/DeploymentV2/terraform/readme.md index 55bf47bb..e117408b 100644 --- a/solution/DeploymentV2/terraform/readme.md +++ b/solution/DeploymentV2/terraform/readme.md @@ -74,7 +74,7 @@ terragrunt init --terragrunt-config vars/development/terragrunt.hcl 1. Grant you service principal rights to the resources. - +eg. $assignment = az role assignment create --role "Owner" --assignee 4c732d19-4076-4a76-87f3-6fbfd77f007d --resource-group "gft2" eg. az ad app owner add --id db2c4f38-1566-41af-a1d4-495cd59097cc --owner-object-id 4c732d19-4076-4a76-87f3-6fbfd77f007d 2. Then import resources into state From f46baa36f2dab654f1dc5b2419a9131012a94cd5 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Fri, 22 Jul 2022 21:40:08 +0800 Subject: [PATCH 036/151] CICD --- .github/workflows/continuous-delivery.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/continuous-delivery.yml b/.github/workflows/continuous-delivery.yml index 9cc8860d..3935e404 100644 --- a/.github/workflows/continuous-delivery.yml +++ b/.github/workflows/continuous-delivery.yml @@ -86,6 +86,7 @@ jobs: - name: Open Firewalls for Agent id: open_firewalls + continue-on-error: true working-directory: ./solution/DeploymentV2/terraform run: | az keyvault network-rule add -g ${{ env.TF_VAR_resource_group_name }} --name ${{ env.keyVaultName }} --ip-address ${{ steps.ip.outputs.ipv4 }}/32 From d66380805af759750bfdc0d72303a0c807177e3a Mon Sep 17 00:00:00 2001 From: John Rampono Date: Fri, 22 Jul 2022 21:44:04 +0800 Subject: [PATCH 037/151] CICD --- solution/DeploymentV2/Deploy.ps1 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/solution/DeploymentV2/Deploy.ps1 b/solution/DeploymentV2/Deploy.ps1 index bf982ebc..49ae83ce 100644 --- a/solution/DeploymentV2/Deploy.ps1 +++ b/solution/DeploymentV2/Deploy.ps1 @@ -77,7 +77,7 @@ $AddSpecificUserAsWebAppAdmin = $env:AdsGf_AddSpecificUserAsWebAppAdmin #------------------------------------------------------------------------------------------------------------ # Main Terraform #------------------------------------------------------------------------------------------------------------ -#Invoke-Expression ./Deploy_1_Infra0.ps1 +Invoke-Expression ./Deploy_1_Infra0.ps1 #------------------------------------------------------------------------------------------------------------ From 303fc9cdbebfefa92f45f833957fc6827a2d8a3c Mon Sep 17 00:00:00 2001 From: John Rampono Date: Sat, 23 Jul 2022 07:06:40 +0800 Subject: [PATCH 038/151] Breaking up terraform into layers --- solution/DeploymentV2/terraform/readme.md | 6 +- .../DeploymentV2/terraform_custom/vars.tf | 27 ++++ .../DeploymentV2/terraform_prepare/.gitignore | 39 ++++++ .../terraform_prepare/.terraform.lock.hcl | 80 ++++++++++++ .../terraform_prepare/app_service.tf | 74 +++++++++++ .../terraform_prepare/function_app.tf | 39 ++++++ .../DeploymentV2/terraform_prepare/locals.tf | 20 +++ .../DeploymentV2/terraform_prepare/main.tf | 50 ++++++++ .../DeploymentV2/terraform_prepare/vars.tf | 121 ++++++++++++++++++ .../vars/admz/terragrunt.hcl | 30 +++++ .../vars/local/terragrunt.hcl | 32 +++++ .../vars/production/terragrunt.hcl | 33 +++++ .../vars/staging/terragrunt.hcl | 32 +++++ 13 files changed, 582 insertions(+), 1 deletion(-) create mode 100644 solution/DeploymentV2/terraform_prepare/.gitignore create mode 100644 solution/DeploymentV2/terraform_prepare/.terraform.lock.hcl create mode 100644 solution/DeploymentV2/terraform_prepare/app_service.tf create mode 100644 solution/DeploymentV2/terraform_prepare/function_app.tf create mode 100644 solution/DeploymentV2/terraform_prepare/locals.tf create mode 100644 solution/DeploymentV2/terraform_prepare/main.tf create mode 100644 solution/DeploymentV2/terraform_prepare/vars.tf create mode 100644 solution/DeploymentV2/terraform_prepare/vars/admz/terragrunt.hcl create mode 100644 solution/DeploymentV2/terraform_prepare/vars/local/terragrunt.hcl create mode 100644 solution/DeploymentV2/terraform_prepare/vars/production/terragrunt.hcl create mode 100644 solution/DeploymentV2/terraform_prepare/vars/staging/terragrunt.hcl diff --git a/solution/DeploymentV2/terraform/readme.md b/solution/DeploymentV2/terraform/readme.md index e117408b..0f05e929 100644 --- a/solution/DeploymentV2/terraform/readme.md +++ b/solution/DeploymentV2/terraform/readme.md @@ -75,7 +75,11 @@ terragrunt init --terragrunt-config vars/development/terragrunt.hcl 1. Grant you service principal rights to the resources. eg. $assignment = az role assignment create --role "Owner" --assignee 4c732d19-4076-4a76-87f3-6fbfd77f007d --resource-group "gft2" -eg. az ad app owner add --id db2c4f38-1566-41af-a1d4-495cd59097cc --owner-object-id 4c732d19-4076-4a76-87f3-6fbfd77f007d + +az ad app owner add --id db2c4f38-1566-41af-a1d4-495cd59097cc --owner-object-id 4c732d19-4076-4a76-87f3-6fbfd77f007d +az ad app owner add --id d2e89752-2e75-48ba-a5a7-cb4bbc7bcfc8 --owner-object-id 4c732d19-4076-4a76-87f3-6fbfd77f007d + + 2. Then import resources into state diff --git a/solution/DeploymentV2/terraform_custom/vars.tf b/solution/DeploymentV2/terraform_custom/vars.tf index 9e3b854a..98e9e54d 100644 --- a/solution/DeploymentV2/terraform_custom/vars.tf +++ b/solution/DeploymentV2/terraform_custom/vars.tf @@ -62,3 +62,30 @@ variable "app_name" { default = "ads" type = string } + + +#--------------------------------------------------------------- +# Override individual resource names +#--------------------------------------------------------------- + +variable "webapp_name" { + description = "The override name for the web app service. If empty, will be autogenerated based on prefix settings" + default = "" + type = string +} +variable "functionapp_name" { + description = "The override name for the function app service resource. If empty, will be autogenerated based on prefix settings" + default = "" + type = string +} + +variable "aad_webapp_name" { + description = "The override name for the AAD App registration for the web app. If empty, will be autogenerated based on prefix settings" + default = "" + type = string +} +variable "aad_functionapp_name" { + description = "The override name for the AAD App registration for the function app. If empty, will be autogenerated based on prefix settings" + default = "" + type = string +} \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_prepare/.gitignore b/solution/DeploymentV2/terraform_prepare/.gitignore new file mode 100644 index 00000000..cc143939 --- /dev/null +++ b/solution/DeploymentV2/terraform_prepare/.gitignore @@ -0,0 +1,39 @@ +# Local .terraform directories +**/.terraform/* + +**/arkahna/* + +# .tfstate files +*.tfstate +*.tfstate.* + +# Crash log files +crash.log + +# Exclude all .tfvars files, which are likely to contain sentitive data, such as +# password, private keys, and other secrets. These should not be part of version +# control as they are data points which are potentially sensitive and subject +# to change depending on the environment. +# +*.tfvars + +# Ignore override files as they are usually used to override resources locally and so +# are not checked in +override.tf +override.tf.json +*_override.tf +*_override.tf.json + +# Include override files you do wish to add to version control using negated pattern +# +# !example_override.tf + +# Include tfplan files to ignore the plan output of command: terraform plan -out=tfplan +# example: *tfplan* + +# Ignore CLI configuration files +.terraformrc +terraform.rc + + +backend.tf diff --git a/solution/DeploymentV2/terraform_prepare/.terraform.lock.hcl b/solution/DeploymentV2/terraform_prepare/.terraform.lock.hcl new file mode 100644 index 00000000..dc07510c --- /dev/null +++ b/solution/DeploymentV2/terraform_prepare/.terraform.lock.hcl @@ -0,0 +1,80 @@ +# This file is maintained automatically by "terraform init". +# Manual edits may be lost in future updates. + +provider "registry.terraform.io/hashicorp/azuread" { + version = "2.22.0" + constraints = "2.22.0" + hashes = [ + "h1:so17lrrqkdZcmQp5V/hvY5vLXw1BmwQMnlvGcRq/u0c=", + "zh:062d84c514cd5015af60693ca4f3aece80d358fd7172951546eaba8093065c5b", + "zh:13749654ccd901408c74de2e1d7de43157044c4e739edcc0a66012a6cc6bba7a", + "zh:138c107f6aa554924a241806bca69248af1b7ce79ec93c6eef369886f33eef0a", + "zh:1c3e89cf19118fc07d7b04257251fc9897e722c16e0a0df7b07fcd261f8c12e7", + "zh:33c656e07492808da0584717a3cd52377dff15ae0f1f5f411321b8de08a7693e", + "zh:4e08570e51742e717a914db5dd15c0a73cd1686e0c1f1a07123d3aa70cc00718", + "zh:4fef3aca24238cead0798d29196c9e2270622091897dba040c21500c2ddb4095", + "zh:614c60e3dfdd17b7d93b9355e057c825bb36e61f5bc25ccbc6550ff7bd726b65", + "zh:65d8789b8b088322d4e27ea6cd9935749980fe0a1b94e8e56f0cca35c34c394e", + "zh:823abd9bbd9f42bc4c5769be033bf734bb81bb20152b7e1c009a6234b849e5b6", + "zh:9c7ece6b3c65253bfef6ee29acc0cac033ec061bd6755c5496a7e5c17997c918", + "zh:fc0ff3e3104ee6e89c2fa3bf6c83ba698062e64165b60acfe7ad00f2161d1250", + ] +} + +provider "registry.terraform.io/hashicorp/azurerm" { + version = "3.12.0" + constraints = "3.12.0" + hashes = [ + "h1:KF6bIhK7POPuO1HYK1G8b5Fae+0n8c/gM+9EBVJJQ2Q=", + "zh:0bbc93276a38da205d2b8ce34a2f813e96687a2f6fc7addd9bb05b85dab2a662", + "zh:3af12159e0b5217a7b35f081fba1e34ac8fb995acc7e6d2ec86592a559eb85c8", + "zh:7d1bdc9b4d9b1990409d52cb915e5acbe17bd81b29d28f7fcdaaf96003dca77c", + "zh:81ab77524cfa91aed929e35e2ed63b2ac73add7c33d1b3d5cdc21937606ecc7c", + "zh:84ddddd9f4c695199ef2824eea853d29434e164e0ef3603451aed39d8852ba28", + "zh:9905a5ca2d7c5c6e43a4be1f7b207d584860ec4ddad1aaa475fb03a731333751", + "zh:9cdf3223d9f4a2dbabcd1ebc663beab356a4ee5b1f584088772da8271c74890b", + "zh:a8317436ec286aae91d9bfbcd36edb9b0b58c195a9cd0adffb7f588f124bef1e", + "zh:cea079d3f4eff9e301ca207c7ce676553f9acc3202abf88ff161d6faa1e1a54a", + "zh:f569b65999264a9416862bca5cd2a6177d94ccb0424f3a4ef424428912b9cb3c", + "zh:fdaa4de7d6713bd8b1b4a51135c9eadbaa317ea87e7af9c00c52f67018fba288", + "zh:ff25a0a5fb54174a8a37d4e40413fa85737d8bb837c5635b6e88621c36c202bd", + ] +} + +provider "registry.terraform.io/hashicorp/random" { + version = "3.3.0" + constraints = ">= 2.2.0, 3.3.0" + hashes = [ + "h1:4VU/t0rwHuvJI0JZ3Zd93uEWaKIWeXqKx1GhAhgTn6A=", + "zh:0148a1a98ddbc3cf6ad6ef7bb4e5a2820ca50fdb8723d4448a011bfabb6f3d7c", + "zh:1f8c6d2046d6ea626c7abcfca2fbb95dce21663053a098570ebef71433f4a001", + "zh:3681788777b6b191edc5d2aeaece6217f36c1f92fcd2478bf804185f9fc48f9f", + "zh:3e8f7ae388fe981f86b5f6d4636e2b8ddb98b4cec63330f24b04c408ca338fa3", + "zh:3eb6fadea3a905a3e8be63cf3fd9c2dc1a885a8a4d67ac6945b4e562b22ce2d5", + "zh:46761443b5a83bce53a9e8dbb88a60ee260b1825f6e265dfb8865b9ab552ef0b", + "zh:59edb583bfe9ae60023289c570e62a87c86649341fd5e1042adc592334459967", + "zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3", + "zh:8c46658d69142562984be3c6aa9ea29b2d389f8c991197b722c550e8a34fe49c", + "zh:9923e10598c76078cd6b67962aeb0c65160273e4fb36134a994003d1e7375200", + "zh:d528eb4854d5fb529934e0de3b57d33bf8a19db302c5cba6e8292e674291aaeb", + "zh:e9be013d175b21debee2b626574883aa579e4b03a085ca4e4122dd6ae2ffec53", + ] +} + +provider "registry.terraform.io/hashicorp/time" { + version = "0.7.2" + hashes = [ + "h1:YYLAfhMFP5nhV2iZPslqsLkZN+6sZo7gMJW7pLcLfM8=", + "zh:0bbe0158c2a9e3f5be911b7e94477586110c51746bb13d102054f22754565bda", + "zh:3250af7fd49b8aaf2ccc895588af05197d886e38b727e3ba33bcbb8cc96ad34d", + "zh:35e4de0437f4fa9c1ad69aaf8136413be2369ea607d78e04bb68dc66a6a520b8", + "zh:369756417a6272e79cad31eb2c82c202f6a4b6e4204a893f656644ba9e149fa2", + "zh:390370f1179d89b33c3a0731691e772d5450a7d59fc66671ec625e201db74aa2", + "zh:3d12ac905259d225c685bc42e5507ed0fbdaa5a09c30dce7c1932d908df857f7", + "zh:75f63e5e1c68e6c5bccba4568c3564e2774eb3a7a19189eb8e2b6e0d58c8f8cc", + "zh:7c22a2078a608e3e0278c4cbc9c483909062ebd1843bddaf8f176346c6d378b1", + "zh:7cfb3c02f78f0060d59c757c4726ab45a962ce4a9cf4833beca704a1020785bd", + "zh:a0325917f47c28a2ed088dedcea0d9520d91b264e63cc667fe4336ac993c0c11", + "zh:c181551d4c0a40b52e236f1755cc340aeca0fb5dcfd08b3b1c393a7667d2f327", + ] +} diff --git a/solution/DeploymentV2/terraform_prepare/app_service.tf b/solution/DeploymentV2/terraform_prepare/app_service.tf new file mode 100644 index 00000000..f21ab736 --- /dev/null +++ b/solution/DeploymentV2/terraform_prepare/app_service.tf @@ -0,0 +1,74 @@ + +resource "random_uuid" "app_reg_role_id" {} +resource "random_uuid" "app_reg_role_id2" {} + +resource "azuread_application" "web_reg" { + count = var.deploy_web_app && var.deploy_azure_ad_web_app_registration ? 1 : 0 + display_name = local.aad_webapp_name + owners = [data.azurerm_client_config.current.object_id] + web { + homepage_url = local.webapp_url + redirect_uris = ["${local.webapp_url}/signin-oidc", "https://localhost:44385/signin-oidc"] + implicit_grant { + access_token_issuance_enabled = false + id_token_issuance_enabled = true + } + } + app_role { + allowed_member_types = ["User"] + id = random_uuid.app_reg_role_id.result + description = "Administer features of the application" + display_name = "Administrator" + enabled = true + value = "Administrator" + } + + app_role { + allowed_member_types = ["User"] + id = random_uuid.app_reg_role_id2.result + description = "Reader features of the application" + display_name = "Reader" + enabled = true + value = "Reader" + } + + required_resource_access { + resource_app_id = "00000003-0000-0000-c000-000000000000" + + resource_access { + id = "b340eb25-3456-403f-be2f-af7a0d370277" + type = "Scope" + } + resource_access { + id = "37f7f235-527c-4136-accd-4a02d197296e" + type = "Scope" + } + resource_access { + id = "14dad69e-099b-42c9-810b-d002981feec1" + type = "Scope" + } + resource_access { + id = "7ab1d382-f21e-4acd-a863-ba3e13f7da61" + type = "Role" + } + resource_access { + id = "98830695-27a2-44f7-8c18-0c3ebc9698f6" + type = "Role" + } + } +} + +resource "time_sleep" "wait_30_seconds" { + depends_on = [azuread_application.web_reg] + create_duration = "30s" +} + +resource "azuread_service_principal" "web_sp" { + count = var.deploy_web_app && var.deploy_azure_ad_web_app_registration ? 1 : 0 + application_id = azuread_application.web_reg[0].application_id + owners = [data.azurerm_client_config.current.object_id] + depends_on = [time_sleep.wait_30_seconds] +} + + + diff --git a/solution/DeploymentV2/terraform_prepare/function_app.tf b/solution/DeploymentV2/terraform_prepare/function_app.tf new file mode 100644 index 00000000..c94ec1e3 --- /dev/null +++ b/solution/DeploymentV2/terraform_prepare/function_app.tf @@ -0,0 +1,39 @@ +resource "random_uuid" "function_app_reg_role_id" {} + +# This is used for auth in the Azure Function +resource "azuread_application" "function_app_reg" { + count = var.deploy_azure_ad_function_app_registration ? 1 : 0 + owners = [data.azurerm_client_config.current.object_id] + identifier_uris = ["api://${local.functionapp_name}"] + display_name = local.aad_functionapp_name + web { + homepage_url = local.functionapp_url + implicit_grant { + access_token_issuance_enabled = false + } + } + app_role { + allowed_member_types = ["Application", "User"] + id = random_uuid.function_app_reg_role_id.result + description = "Used to applications to call the ADS Go Fast functions" + display_name = "FunctionAPICaller" + enabled = true + value = "FunctionAPICaller" + } + required_resource_access { + resource_app_id = "00000003-0000-0000-c000-000000000000" + resource_access { + id = "b340eb25-3456-403f-be2f-af7a0d370277" + type = "Scope" + } + } +} + +resource "azuread_service_principal" "function_app" { + count = var.deploy_azure_ad_function_app_registration ? 1 : 0 + owners = [data.azurerm_client_config.current.object_id] + application_id = azuread_application.function_app_reg[0].application_id +} + + + diff --git a/solution/DeploymentV2/terraform_prepare/locals.tf b/solution/DeploymentV2/terraform_prepare/locals.tf new file mode 100644 index 00000000..2de29d5d --- /dev/null +++ b/solution/DeploymentV2/terraform_prepare/locals.tf @@ -0,0 +1,20 @@ +locals { + webapp_name = (var.webapp_name != "" ? var.webapp_name : module.naming.app_service.name_unique) + webapp_url = "https://${local.webapp_name}.azurewebsites.net" + functionapp_name = (var.functionapp_name != "" ? var.functionapp_name : module.naming.function_app.name_unique) + functionapp_url = "https://${local.functionapp_name}.azurewebsites.net" + aad_webapp_name = (var.aad_webapp_name != "" ? var.aad_webapp_name : "ADS GoFast Web Portal (${var.environment_tag})") + aad_functionapp_name = (var.aad_functionapp_name != "" ? var.aad_functionapp_name : "ADS GoFast Orchestration App (${var.environment_tag})") + + + tags = { + Environment = var.environment_tag + Owner = var.owner_tag + Author = var.author_tag + Application = var.app_name + CreatedDate = timestamp() + } + +} + + diff --git a/solution/DeploymentV2/terraform_prepare/main.tf b/solution/DeploymentV2/terraform_prepare/main.tf new file mode 100644 index 00000000..e8db32f1 --- /dev/null +++ b/solution/DeploymentV2/terraform_prepare/main.tf @@ -0,0 +1,50 @@ +# Configure the Azure provider +terraform { + required_providers { + azurerm = { + source = "hashicorp/azurerm" + version = "=3.12.0" + } + azuread = { + source = "hashicorp/azuread" + version = "=2.22.0" + } + random = { + source = "hashicorp/random" + version = "=3.3.0" + } + } +} + +provider "azurerm" { + features { + key_vault { + purge_soft_delete_on_destroy = true + } + } + subscription_id = var.subscription_id + skip_provider_registration = true +} + +provider "azuread" { + tenant_id = var.tenant_id +} + +data "azurerm_client_config" "current" { +} + +module "naming" { + source = "Azure/naming/azurerm" + version = "0.1.1" + prefix = [ + var.prefix, + var.environment_tag + ] + suffix = [ + var.app_name + ] +} + +resource "random_id" "rg_deployment_unique" { + byte_length = 4 +} diff --git a/solution/DeploymentV2/terraform_prepare/vars.tf b/solution/DeploymentV2/terraform_prepare/vars.tf new file mode 100644 index 00000000..384a9958 --- /dev/null +++ b/solution/DeploymentV2/terraform_prepare/vars.tf @@ -0,0 +1,121 @@ +#--------------------------------------------------------------- +# Provider details +#--------------------------------------------------------------- +variable "ip_address" { + description = "The CICD ipaddress. We add an IP whitelisting to allow the setting of keyvault secrets" + type = string +} + +variable "tenant_id" { + description = "The AAD tenant ID" + type = string +} + +variable "subscription_id" { + description = "The Azure Subscription ID" + type = string +} + +variable "resource_location" { + description = "The Azure Region being deployed to." + type = string + default = "Australia East" +} + +variable "resource_group_name" { + type = string +} +#--------------------------------------------------------------- +# Tags +#--------------------------------------------------------------- + +variable "owner_tag" { + description = "The tags to apply to resources." + type = string + default = "opensource.microsoft.com" +} + +variable "author_tag" { + description = "The tags to apply to resources." + type = string + default = "opensource.microsoft.com" +} + +variable "environment_tag" { + description = "The name of the environment. Don't use spaces" + default = "dev" + type = string +} + + + +#--------------------------------------------------------------- +# Naming Prefix Settings +#--------------------------------------------------------------- +variable "prefix" { + description = "The prefix value to be used for autogenerated naming conventions" + default = "ark" + type = string +} +variable "app_name" { + description = "The app_name suffix value to be used for autogenerated naming conventions" + default = "ads" + type = string +} + + +#--------------------------------------------------------------- +# Override individual resource names +#--------------------------------------------------------------- + +variable "webapp_name" { + description = "The override name for the web app service. If empty, will be autogenerated based on prefix settings" + default = "" + type = string +} +variable "functionapp_name" { + description = "The override name for the function app service resource. If empty, will be autogenerated based on prefix settings" + default = "" + type = string +} + +variable "aad_webapp_name" { + description = "The override name for the AAD App registration for the web app. If empty, will be autogenerated based on prefix settings" + default = "" + type = string +} +variable "aad_functionapp_name" { + description = "The override name for the AAD App registration for the function app. If empty, will be autogenerated based on prefix settings" + default = "" + type = string +} + + + +#--------------------------------------------------------------- +# Feature Toggles +#--------------------------------------------------------------- + + +variable "deploy_web_app" { + description = "Feature toggle for deploying the Web App" + default = true + type = bool +} +variable "deploy_function_app" { + description = "Feature toggle for deploying the Function App" + default = true + type = bool +} + + +variable "deploy_azure_ad_web_app_registration" { + description = "Feature toggle for deploying the Azure AD App registration for the Web Portal" + default = true + type = bool +} +variable "deploy_azure_ad_function_app_registration" { + description = "Feature toggle for deploying the Azure AD App registration for the Function App" + default = true + type = bool +} \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_prepare/vars/admz/terragrunt.hcl b/solution/DeploymentV2/terraform_prepare/vars/admz/terragrunt.hcl new file mode 100644 index 00000000..c027ad94 --- /dev/null +++ b/solution/DeploymentV2/terraform_prepare/vars/admz/terragrunt.hcl @@ -0,0 +1,30 @@ +remote_state { + backend = "azurerm" + generate = { + path = "backend.tf" + if_exists = "overwrite_terragrunt" + } + config = { + # You need to update the resource group and storage account here. + # You should have created these with the Prepare.ps1 script. + resource_group_name = "dlzdev08lite" + storage_account_name = "teststatedev08litestate" + container_name = "tstate" + key = "customisations.tfstate" + } +} + +# These inputs are provided to the terraform variables when deploying the environment +# If you are deploying using pipelines, these can be overridden from environment variables +# using TF_VAR_variablename +inputs = { + prefix = "ark" # All azure resources will be prefixed with this + domain = "arkahna.io" # Used when configuring AAD config for Azure functions + tenant_id = "0fee3d31-b963-4a1c-8f4a-ca367205aa65" # This is the Azure AD tenant ID + subscription_id = "14f299e1-be54-43e9-bf5e-696840f86fc4" # The azure subscription id to deploy to + resource_location = "Australia East" # The location of the resources + resource_group_name = "dlzdev08lite" # The resource group all resources will be deployed to + owner_tag = "Arkahna" # Owner tag value for Azure resources + environment_tag = "prod" # This is used on Azure tags as well as all resource names + ip_address = "101.179.193.89" # This is the ip address of the agent/current IP. Used to create firewall exemptions. +} diff --git a/solution/DeploymentV2/terraform_prepare/vars/local/terragrunt.hcl b/solution/DeploymentV2/terraform_prepare/vars/local/terragrunt.hcl new file mode 100644 index 00000000..8454a8e3 --- /dev/null +++ b/solution/DeploymentV2/terraform_prepare/vars/local/terragrunt.hcl @@ -0,0 +1,32 @@ +remote_state { + backend = "azurerm" + generate = { + path = "backend.tf" + if_exists = "overwrite_terragrunt" + } + config = { + # You need to update the resource group and storage account here. + # You should have created these with the Prepare.ps1 script. + resource_group_name = "gft2" + storage_account_name = "gft2state" + container_name = "tstate" + key = "terraform_prepare.tfstate" + } +} + +# These inputs are provided to the terraform variables when deploying the environment +# If you are deploying using pipelines, these can be overridden from environment variables +# using TF_VAR_variablename +inputs = { + prefix = "ads" # All azure resources will be prefixed with this + domain = "microsoft.com" # Used when configuring AAD config for Azure functions + tenant_id = "72f988bf-86f1-41af-91ab-2d7cd011db47" # This is the Azure AD tenant ID + subscription_id = "035a1364-f00d-48e2-b582-4fe125905ee3" # The azure subscription id to deploy to + resource_location = "Australia East" # The location of the resources + resource_group_name = "gft2" # The resource group all resources will be deployed to + owner_tag = "Contoso" # Owner tag value for Azure resources + environment_tag = "stg" # This is used on Azure tags as well as all resource names + ip_address = "144.138.148.220" # This is the ip address of the agent/current IP. Used to create firewall exemptions. + deploy_web_app = true + deploy_function_app = true +} \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_prepare/vars/production/terragrunt.hcl b/solution/DeploymentV2/terraform_prepare/vars/production/terragrunt.hcl new file mode 100644 index 00000000..3c137bb4 --- /dev/null +++ b/solution/DeploymentV2/terraform_prepare/vars/production/terragrunt.hcl @@ -0,0 +1,33 @@ +remote_state { + backend = "azurerm" + generate = { + path = "backend.tf" + if_exists = "overwrite_terragrunt" + } + config = { + # You need to update the resource group and storage account here. + # You should have created these with the Prepare.ps1 script. + resource_group_name = "gft2" + storage_account_name = "gft2state" + container_name = "tstate" + key = "terraform_prepare.tfstate" + } +} + +# These inputs are provided to the terraform variables when deploying the environment +# If you are deploying using pipelines, these can be overridden from environment variables +# using TF_VAR_variablename +inputs = { + prefix = "ads" # All azure resources will be prefixed with this + domain = "microsoft.com" # Used when configuring AAD config for Azure functions + tenant_id = "72f988bf-86f1-41af-91ab-2d7cd011db47" # This is the Azure AD tenant ID + subscription_id = "035a1364-f00d-48e2-b582-4fe125905ee3" # The azure subscription id to deploy to + resource_location = "Australia East" # The location of the resources + resource_group_name = "gft2" # The resource group all resources will be deployed to + owner_tag = "Contoso" # Owner tag value for Azure resources + environment_tag = "stg" # This is used on Azure tags as well as all resource names + ip_address = "144.138.148.220" # This is the ip address of the agent/current IP. Used to create firewall exemptions. + deploy_web_app = true + deploy_function_app = true +} + diff --git a/solution/DeploymentV2/terraform_prepare/vars/staging/terragrunt.hcl b/solution/DeploymentV2/terraform_prepare/vars/staging/terragrunt.hcl new file mode 100644 index 00000000..0f8e2bbb --- /dev/null +++ b/solution/DeploymentV2/terraform_prepare/vars/staging/terragrunt.hcl @@ -0,0 +1,32 @@ +remote_state { + backend = "azurerm" + generate = { + path = "backend.tf" + if_exists = "overwrite_terragrunt" + } + config = { + # You need to update the resource group and storage account here. + # You should have created these with the Prepare.ps1 script. + resource_group_name = "gft2" + storage_account_name = "gft2state" + container_name = "tstate" + key = "terraform_prepare.tfstate" + } +} + +# These inputs are provided to the terraform variables when deploying the environment +# If you are deploying using pipelines, these can be overridden from environment variables +# using TF_VAR_variablename +inputs = { + prefix = "ads" # All azure resources will be prefixed with this + domain = "microsoft.com" # Used when configuring AAD config for Azure functions + tenant_id = "72f988bf-86f1-41af-91ab-2d7cd011db47" # This is the Azure AD tenant ID + subscription_id = "035a1364-f00d-48e2-b582-4fe125905ee3" # The azure subscription id to deploy to + resource_location = "Australia East" # The location of the resources + resource_group_name = "gft2" # The resource group all resources will be deployed to + owner_tag = "Contoso" # Owner tag value for Azure resources + environment_tag = "stg" # This is used on Azure tags as well as all resource names + ip_address = "144.138.148.220" # This is the ip address of the agent/current IP. Used to create firewall exemptions. + deploy_web_app = true + deploy_function_app = true +} From d3b69ecb2c78740c4e875b72cbd1bf9ecf2d1a7b Mon Sep 17 00:00:00 2001 From: John Rampono Date: Sat, 23 Jul 2022 08:09:29 +0800 Subject: [PATCH 039/151] Breaking up terraform into layers --- .../.gitignore | 0 .../.terraform.lock.hcl | 0 .../app_service.tf | 0 .../function_app.tf | 0 .../locals.tf | 0 .../main.tf | 0 .../DeploymentV2/terraform_layer1/outputs.tf | 31 +++++ .../vars.tf | 0 .../vars/admz/terragrunt.hcl | 0 .../vars/local/terragrunt.hcl | 2 +- .../vars/production/terragrunt.hcl | 2 +- .../vars/staging/terragrunt.hcl | 2 +- .../DeploymentV2/terraform_layer2/.gitignore | 39 ++++++ .../terraform_layer2/.terraform.lock.hcl | 62 +++++++++ .../DeploymentV2/terraform_layer2/layer1.tf | 12 ++ .../DeploymentV2/terraform_layer2/locals.tf | 20 +++ .../DeploymentV2/terraform_layer2/main.tf | 50 ++++++++ .../DeploymentV2/terraform_layer2/outputs.tf | 11 ++ .../DeploymentV2/terraform_layer2/vars.tf | 121 ++++++++++++++++++ .../terraform_layer2/vars/admz/terragrunt.hcl | 30 +++++ .../vars/local/terragrunt.hcl | 32 +++++ .../vars/production/terragrunt.hcl | 33 +++++ .../vars/staging/terragrunt.hcl | 32 +++++ 23 files changed, 476 insertions(+), 3 deletions(-) rename solution/DeploymentV2/{terraform_prepare => terraform_layer1}/.gitignore (100%) rename solution/DeploymentV2/{terraform_prepare => terraform_layer1}/.terraform.lock.hcl (100%) rename solution/DeploymentV2/{terraform_prepare => terraform_layer1}/app_service.tf (100%) rename solution/DeploymentV2/{terraform_prepare => terraform_layer1}/function_app.tf (100%) rename solution/DeploymentV2/{terraform_prepare => terraform_layer1}/locals.tf (100%) rename solution/DeploymentV2/{terraform_prepare => terraform_layer1}/main.tf (100%) create mode 100644 solution/DeploymentV2/terraform_layer1/outputs.tf rename solution/DeploymentV2/{terraform_prepare => terraform_layer1}/vars.tf (100%) rename solution/DeploymentV2/{terraform_prepare => terraform_layer1}/vars/admz/terragrunt.hcl (100%) rename solution/DeploymentV2/{terraform_prepare => terraform_layer1}/vars/local/terragrunt.hcl (96%) rename solution/DeploymentV2/{terraform_prepare => terraform_layer1}/vars/production/terragrunt.hcl (95%) rename solution/DeploymentV2/{terraform_prepare => terraform_layer1}/vars/staging/terragrunt.hcl (97%) create mode 100644 solution/DeploymentV2/terraform_layer2/.gitignore create mode 100644 solution/DeploymentV2/terraform_layer2/.terraform.lock.hcl create mode 100644 solution/DeploymentV2/terraform_layer2/layer1.tf create mode 100644 solution/DeploymentV2/terraform_layer2/locals.tf create mode 100644 solution/DeploymentV2/terraform_layer2/main.tf create mode 100644 solution/DeploymentV2/terraform_layer2/outputs.tf create mode 100644 solution/DeploymentV2/terraform_layer2/vars.tf create mode 100644 solution/DeploymentV2/terraform_layer2/vars/admz/terragrunt.hcl create mode 100644 solution/DeploymentV2/terraform_layer2/vars/local/terragrunt.hcl create mode 100644 solution/DeploymentV2/terraform_layer2/vars/production/terragrunt.hcl create mode 100644 solution/DeploymentV2/terraform_layer2/vars/staging/terragrunt.hcl diff --git a/solution/DeploymentV2/terraform_prepare/.gitignore b/solution/DeploymentV2/terraform_layer1/.gitignore similarity index 100% rename from solution/DeploymentV2/terraform_prepare/.gitignore rename to solution/DeploymentV2/terraform_layer1/.gitignore diff --git a/solution/DeploymentV2/terraform_prepare/.terraform.lock.hcl b/solution/DeploymentV2/terraform_layer1/.terraform.lock.hcl similarity index 100% rename from solution/DeploymentV2/terraform_prepare/.terraform.lock.hcl rename to solution/DeploymentV2/terraform_layer1/.terraform.lock.hcl diff --git a/solution/DeploymentV2/terraform_prepare/app_service.tf b/solution/DeploymentV2/terraform_layer1/app_service.tf similarity index 100% rename from solution/DeploymentV2/terraform_prepare/app_service.tf rename to solution/DeploymentV2/terraform_layer1/app_service.tf diff --git a/solution/DeploymentV2/terraform_prepare/function_app.tf b/solution/DeploymentV2/terraform_layer1/function_app.tf similarity index 100% rename from solution/DeploymentV2/terraform_prepare/function_app.tf rename to solution/DeploymentV2/terraform_layer1/function_app.tf diff --git a/solution/DeploymentV2/terraform_prepare/locals.tf b/solution/DeploymentV2/terraform_layer1/locals.tf similarity index 100% rename from solution/DeploymentV2/terraform_prepare/locals.tf rename to solution/DeploymentV2/terraform_layer1/locals.tf diff --git a/solution/DeploymentV2/terraform_prepare/main.tf b/solution/DeploymentV2/terraform_layer1/main.tf similarity index 100% rename from solution/DeploymentV2/terraform_prepare/main.tf rename to solution/DeploymentV2/terraform_layer1/main.tf diff --git a/solution/DeploymentV2/terraform_layer1/outputs.tf b/solution/DeploymentV2/terraform_layer1/outputs.tf new file mode 100644 index 00000000..5dadefbf --- /dev/null +++ b/solution/DeploymentV2/terraform_layer1/outputs.tf @@ -0,0 +1,31 @@ +output "tenant_id" { + value = var.tenant_id +} + +output "resource_group_name" { + value = var.resource_group_name +} + +output "function_app_reg" { + value = azuread_application.function_app_reg[0] +} + +output "aad_funcreg_id" { + value = var.deploy_function_app ? azuread_application.function_app_reg[0].application_id : "" +} + +output "aad_webreg_id" { + value = var.deploy_web_app ? azuread_application.web_reg[0].application_id : "" +} + +output "webapp_name" { + value = local.webapp_name +} + +output "functionapp_name" { + value = local.functionapp_name +} + +output "rg_deployment_unique" { + value = random_id.rg_deployment_unique +} \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_prepare/vars.tf b/solution/DeploymentV2/terraform_layer1/vars.tf similarity index 100% rename from solution/DeploymentV2/terraform_prepare/vars.tf rename to solution/DeploymentV2/terraform_layer1/vars.tf diff --git a/solution/DeploymentV2/terraform_prepare/vars/admz/terragrunt.hcl b/solution/DeploymentV2/terraform_layer1/vars/admz/terragrunt.hcl similarity index 100% rename from solution/DeploymentV2/terraform_prepare/vars/admz/terragrunt.hcl rename to solution/DeploymentV2/terraform_layer1/vars/admz/terragrunt.hcl diff --git a/solution/DeploymentV2/terraform_prepare/vars/local/terragrunt.hcl b/solution/DeploymentV2/terraform_layer1/vars/local/terragrunt.hcl similarity index 96% rename from solution/DeploymentV2/terraform_prepare/vars/local/terragrunt.hcl rename to solution/DeploymentV2/terraform_layer1/vars/local/terragrunt.hcl index 8454a8e3..76f9a888 100644 --- a/solution/DeploymentV2/terraform_prepare/vars/local/terragrunt.hcl +++ b/solution/DeploymentV2/terraform_layer1/vars/local/terragrunt.hcl @@ -10,7 +10,7 @@ remote_state { resource_group_name = "gft2" storage_account_name = "gft2state" container_name = "tstate" - key = "terraform_prepare.tfstate" + key = "terraform_layer1.tfstate" } } diff --git a/solution/DeploymentV2/terraform_prepare/vars/production/terragrunt.hcl b/solution/DeploymentV2/terraform_layer1/vars/production/terragrunt.hcl similarity index 95% rename from solution/DeploymentV2/terraform_prepare/vars/production/terragrunt.hcl rename to solution/DeploymentV2/terraform_layer1/vars/production/terragrunt.hcl index 3c137bb4..7224486d 100644 --- a/solution/DeploymentV2/terraform_prepare/vars/production/terragrunt.hcl +++ b/solution/DeploymentV2/terraform_layer1/vars/production/terragrunt.hcl @@ -10,7 +10,7 @@ remote_state { resource_group_name = "gft2" storage_account_name = "gft2state" container_name = "tstate" - key = "terraform_prepare.tfstate" + key = "terraform_layer1.tfstate" } } diff --git a/solution/DeploymentV2/terraform_prepare/vars/staging/terragrunt.hcl b/solution/DeploymentV2/terraform_layer1/vars/staging/terragrunt.hcl similarity index 97% rename from solution/DeploymentV2/terraform_prepare/vars/staging/terragrunt.hcl rename to solution/DeploymentV2/terraform_layer1/vars/staging/terragrunt.hcl index 0f8e2bbb..f977573f 100644 --- a/solution/DeploymentV2/terraform_prepare/vars/staging/terragrunt.hcl +++ b/solution/DeploymentV2/terraform_layer1/vars/staging/terragrunt.hcl @@ -10,7 +10,7 @@ remote_state { resource_group_name = "gft2" storage_account_name = "gft2state" container_name = "tstate" - key = "terraform_prepare.tfstate" + key = "terraform_layer1.tfstate" } } diff --git a/solution/DeploymentV2/terraform_layer2/.gitignore b/solution/DeploymentV2/terraform_layer2/.gitignore new file mode 100644 index 00000000..cc143939 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/.gitignore @@ -0,0 +1,39 @@ +# Local .terraform directories +**/.terraform/* + +**/arkahna/* + +# .tfstate files +*.tfstate +*.tfstate.* + +# Crash log files +crash.log + +# Exclude all .tfvars files, which are likely to contain sentitive data, such as +# password, private keys, and other secrets. These should not be part of version +# control as they are data points which are potentially sensitive and subject +# to change depending on the environment. +# +*.tfvars + +# Ignore override files as they are usually used to override resources locally and so +# are not checked in +override.tf +override.tf.json +*_override.tf +*_override.tf.json + +# Include override files you do wish to add to version control using negated pattern +# +# !example_override.tf + +# Include tfplan files to ignore the plan output of command: terraform plan -out=tfplan +# example: *tfplan* + +# Ignore CLI configuration files +.terraformrc +terraform.rc + + +backend.tf diff --git a/solution/DeploymentV2/terraform_layer2/.terraform.lock.hcl b/solution/DeploymentV2/terraform_layer2/.terraform.lock.hcl new file mode 100644 index 00000000..8ca5626b --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/.terraform.lock.hcl @@ -0,0 +1,62 @@ +# This file is maintained automatically by "terraform init". +# Manual edits may be lost in future updates. + +provider "registry.terraform.io/hashicorp/azuread" { + version = "2.22.0" + constraints = "2.22.0" + hashes = [ + "h1:so17lrrqkdZcmQp5V/hvY5vLXw1BmwQMnlvGcRq/u0c=", + "zh:062d84c514cd5015af60693ca4f3aece80d358fd7172951546eaba8093065c5b", + "zh:13749654ccd901408c74de2e1d7de43157044c4e739edcc0a66012a6cc6bba7a", + "zh:138c107f6aa554924a241806bca69248af1b7ce79ec93c6eef369886f33eef0a", + "zh:1c3e89cf19118fc07d7b04257251fc9897e722c16e0a0df7b07fcd261f8c12e7", + "zh:33c656e07492808da0584717a3cd52377dff15ae0f1f5f411321b8de08a7693e", + "zh:4e08570e51742e717a914db5dd15c0a73cd1686e0c1f1a07123d3aa70cc00718", + "zh:4fef3aca24238cead0798d29196c9e2270622091897dba040c21500c2ddb4095", + "zh:614c60e3dfdd17b7d93b9355e057c825bb36e61f5bc25ccbc6550ff7bd726b65", + "zh:65d8789b8b088322d4e27ea6cd9935749980fe0a1b94e8e56f0cca35c34c394e", + "zh:823abd9bbd9f42bc4c5769be033bf734bb81bb20152b7e1c009a6234b849e5b6", + "zh:9c7ece6b3c65253bfef6ee29acc0cac033ec061bd6755c5496a7e5c17997c918", + "zh:fc0ff3e3104ee6e89c2fa3bf6c83ba698062e64165b60acfe7ad00f2161d1250", + ] +} + +provider "registry.terraform.io/hashicorp/azurerm" { + version = "3.12.0" + constraints = "3.12.0" + hashes = [ + "h1:KF6bIhK7POPuO1HYK1G8b5Fae+0n8c/gM+9EBVJJQ2Q=", + "zh:0bbc93276a38da205d2b8ce34a2f813e96687a2f6fc7addd9bb05b85dab2a662", + "zh:3af12159e0b5217a7b35f081fba1e34ac8fb995acc7e6d2ec86592a559eb85c8", + "zh:7d1bdc9b4d9b1990409d52cb915e5acbe17bd81b29d28f7fcdaaf96003dca77c", + "zh:81ab77524cfa91aed929e35e2ed63b2ac73add7c33d1b3d5cdc21937606ecc7c", + "zh:84ddddd9f4c695199ef2824eea853d29434e164e0ef3603451aed39d8852ba28", + "zh:9905a5ca2d7c5c6e43a4be1f7b207d584860ec4ddad1aaa475fb03a731333751", + "zh:9cdf3223d9f4a2dbabcd1ebc663beab356a4ee5b1f584088772da8271c74890b", + "zh:a8317436ec286aae91d9bfbcd36edb9b0b58c195a9cd0adffb7f588f124bef1e", + "zh:cea079d3f4eff9e301ca207c7ce676553f9acc3202abf88ff161d6faa1e1a54a", + "zh:f569b65999264a9416862bca5cd2a6177d94ccb0424f3a4ef424428912b9cb3c", + "zh:fdaa4de7d6713bd8b1b4a51135c9eadbaa317ea87e7af9c00c52f67018fba288", + "zh:ff25a0a5fb54174a8a37d4e40413fa85737d8bb837c5635b6e88621c36c202bd", + ] +} + +provider "registry.terraform.io/hashicorp/random" { + version = "3.3.0" + constraints = ">= 2.2.0, 3.3.0" + hashes = [ + "h1:4VU/t0rwHuvJI0JZ3Zd93uEWaKIWeXqKx1GhAhgTn6A=", + "zh:0148a1a98ddbc3cf6ad6ef7bb4e5a2820ca50fdb8723d4448a011bfabb6f3d7c", + "zh:1f8c6d2046d6ea626c7abcfca2fbb95dce21663053a098570ebef71433f4a001", + "zh:3681788777b6b191edc5d2aeaece6217f36c1f92fcd2478bf804185f9fc48f9f", + "zh:3e8f7ae388fe981f86b5f6d4636e2b8ddb98b4cec63330f24b04c408ca338fa3", + "zh:3eb6fadea3a905a3e8be63cf3fd9c2dc1a885a8a4d67ac6945b4e562b22ce2d5", + "zh:46761443b5a83bce53a9e8dbb88a60ee260b1825f6e265dfb8865b9ab552ef0b", + "zh:59edb583bfe9ae60023289c570e62a87c86649341fd5e1042adc592334459967", + "zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3", + "zh:8c46658d69142562984be3c6aa9ea29b2d389f8c991197b722c550e8a34fe49c", + "zh:9923e10598c76078cd6b67962aeb0c65160273e4fb36134a994003d1e7375200", + "zh:d528eb4854d5fb529934e0de3b57d33bf8a19db302c5cba6e8292e674291aaeb", + "zh:e9be013d175b21debee2b626574883aa579e4b03a085ca4e4122dd6ae2ffec53", + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/layer1.tf b/solution/DeploymentV2/terraform_layer2/layer1.tf new file mode 100644 index 00000000..d315121b --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/layer1.tf @@ -0,0 +1,12 @@ +data "terraform_remote_state" "layer1" { + # The settings here should match the "backend" settings in the + # configuration that manages the network resources. + backend = "azurerm" + + config = { + container_name = "tstate" + key = "terraform_layer1.tfstate" + resource_group_name = "gft2" + storage_account_name = "gft2state" + } +} \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer2/locals.tf b/solution/DeploymentV2/terraform_layer2/locals.tf new file mode 100644 index 00000000..2de29d5d --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/locals.tf @@ -0,0 +1,20 @@ +locals { + webapp_name = (var.webapp_name != "" ? var.webapp_name : module.naming.app_service.name_unique) + webapp_url = "https://${local.webapp_name}.azurewebsites.net" + functionapp_name = (var.functionapp_name != "" ? var.functionapp_name : module.naming.function_app.name_unique) + functionapp_url = "https://${local.functionapp_name}.azurewebsites.net" + aad_webapp_name = (var.aad_webapp_name != "" ? var.aad_webapp_name : "ADS GoFast Web Portal (${var.environment_tag})") + aad_functionapp_name = (var.aad_functionapp_name != "" ? var.aad_functionapp_name : "ADS GoFast Orchestration App (${var.environment_tag})") + + + tags = { + Environment = var.environment_tag + Owner = var.owner_tag + Author = var.author_tag + Application = var.app_name + CreatedDate = timestamp() + } + +} + + diff --git a/solution/DeploymentV2/terraform_layer2/main.tf b/solution/DeploymentV2/terraform_layer2/main.tf new file mode 100644 index 00000000..e8db32f1 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/main.tf @@ -0,0 +1,50 @@ +# Configure the Azure provider +terraform { + required_providers { + azurerm = { + source = "hashicorp/azurerm" + version = "=3.12.0" + } + azuread = { + source = "hashicorp/azuread" + version = "=2.22.0" + } + random = { + source = "hashicorp/random" + version = "=3.3.0" + } + } +} + +provider "azurerm" { + features { + key_vault { + purge_soft_delete_on_destroy = true + } + } + subscription_id = var.subscription_id + skip_provider_registration = true +} + +provider "azuread" { + tenant_id = var.tenant_id +} + +data "azurerm_client_config" "current" { +} + +module "naming" { + source = "Azure/naming/azurerm" + version = "0.1.1" + prefix = [ + var.prefix, + var.environment_tag + ] + suffix = [ + var.app_name + ] +} + +resource "random_id" "rg_deployment_unique" { + byte_length = 4 +} diff --git a/solution/DeploymentV2/terraform_layer2/outputs.tf b/solution/DeploymentV2/terraform_layer2/outputs.tf new file mode 100644 index 00000000..21e32368 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/outputs.tf @@ -0,0 +1,11 @@ +output "tenant_id" { + value = var.tenant_id +} + +output "resource_group_name" { + value = var.resource_group_name +} + +output "aad_funcreg_id" { + value = data.terraform_remote_state.layer1.outputs.aad_funcreg_id +} \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer2/vars.tf b/solution/DeploymentV2/terraform_layer2/vars.tf new file mode 100644 index 00000000..384a9958 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/vars.tf @@ -0,0 +1,121 @@ +#--------------------------------------------------------------- +# Provider details +#--------------------------------------------------------------- +variable "ip_address" { + description = "The CICD ipaddress. We add an IP whitelisting to allow the setting of keyvault secrets" + type = string +} + +variable "tenant_id" { + description = "The AAD tenant ID" + type = string +} + +variable "subscription_id" { + description = "The Azure Subscription ID" + type = string +} + +variable "resource_location" { + description = "The Azure Region being deployed to." + type = string + default = "Australia East" +} + +variable "resource_group_name" { + type = string +} +#--------------------------------------------------------------- +# Tags +#--------------------------------------------------------------- + +variable "owner_tag" { + description = "The tags to apply to resources." + type = string + default = "opensource.microsoft.com" +} + +variable "author_tag" { + description = "The tags to apply to resources." + type = string + default = "opensource.microsoft.com" +} + +variable "environment_tag" { + description = "The name of the environment. Don't use spaces" + default = "dev" + type = string +} + + + +#--------------------------------------------------------------- +# Naming Prefix Settings +#--------------------------------------------------------------- +variable "prefix" { + description = "The prefix value to be used for autogenerated naming conventions" + default = "ark" + type = string +} +variable "app_name" { + description = "The app_name suffix value to be used for autogenerated naming conventions" + default = "ads" + type = string +} + + +#--------------------------------------------------------------- +# Override individual resource names +#--------------------------------------------------------------- + +variable "webapp_name" { + description = "The override name for the web app service. If empty, will be autogenerated based on prefix settings" + default = "" + type = string +} +variable "functionapp_name" { + description = "The override name for the function app service resource. If empty, will be autogenerated based on prefix settings" + default = "" + type = string +} + +variable "aad_webapp_name" { + description = "The override name for the AAD App registration for the web app. If empty, will be autogenerated based on prefix settings" + default = "" + type = string +} +variable "aad_functionapp_name" { + description = "The override name for the AAD App registration for the function app. If empty, will be autogenerated based on prefix settings" + default = "" + type = string +} + + + +#--------------------------------------------------------------- +# Feature Toggles +#--------------------------------------------------------------- + + +variable "deploy_web_app" { + description = "Feature toggle for deploying the Web App" + default = true + type = bool +} +variable "deploy_function_app" { + description = "Feature toggle for deploying the Function App" + default = true + type = bool +} + + +variable "deploy_azure_ad_web_app_registration" { + description = "Feature toggle for deploying the Azure AD App registration for the Web Portal" + default = true + type = bool +} +variable "deploy_azure_ad_function_app_registration" { + description = "Feature toggle for deploying the Azure AD App registration for the Function App" + default = true + type = bool +} \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer2/vars/admz/terragrunt.hcl b/solution/DeploymentV2/terraform_layer2/vars/admz/terragrunt.hcl new file mode 100644 index 00000000..88029312 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/vars/admz/terragrunt.hcl @@ -0,0 +1,30 @@ +remote_state { + backend = "azurerm" + generate = { + path = "backend.tf" + if_exists = "overwrite_terragrunt" + } + config = { + # You need to update the resource group and storage account here. + # You should have created these with the Prepare.ps1 script. + resource_group_name = "dlzdev08lite" + storage_account_name = "teststatedev08litestate" + container_name = "tstate" + key = "terraform_layer2.tfstate" + } +} + +# These inputs are provided to the terraform variables when deploying the environment +# If you are deploying using pipelines, these can be overridden from environment variables +# using TF_VAR_variablename +inputs = { + prefix = "ark" # All azure resources will be prefixed with this + domain = "arkahna.io" # Used when configuring AAD config for Azure functions + tenant_id = "0fee3d31-b963-4a1c-8f4a-ca367205aa65" # This is the Azure AD tenant ID + subscription_id = "14f299e1-be54-43e9-bf5e-696840f86fc4" # The azure subscription id to deploy to + resource_location = "Australia East" # The location of the resources + resource_group_name = "dlzdev08lite" # The resource group all resources will be deployed to + owner_tag = "Arkahna" # Owner tag value for Azure resources + environment_tag = "prod" # This is used on Azure tags as well as all resource names + ip_address = "101.179.193.89" # This is the ip address of the agent/current IP. Used to create firewall exemptions. +} diff --git a/solution/DeploymentV2/terraform_layer2/vars/local/terragrunt.hcl b/solution/DeploymentV2/terraform_layer2/vars/local/terragrunt.hcl new file mode 100644 index 00000000..e66904bb --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/vars/local/terragrunt.hcl @@ -0,0 +1,32 @@ +remote_state { + backend = "azurerm" + generate = { + path = "backend.tf" + if_exists = "overwrite_terragrunt" + } + config = { + # You need to update the resource group and storage account here. + # You should have created these with the Prepare.ps1 script. + resource_group_name = "gft2" + storage_account_name = "gft2state" + container_name = "tstate" + key = "terraform_layer2.tfstate" + } +} + +# These inputs are provided to the terraform variables when deploying the environment +# If you are deploying using pipelines, these can be overridden from environment variables +# using TF_VAR_variablename +inputs = { + prefix = "ads" # All azure resources will be prefixed with this + domain = "microsoft.com" # Used when configuring AAD config for Azure functions + tenant_id = "72f988bf-86f1-41af-91ab-2d7cd011db47" # This is the Azure AD tenant ID + subscription_id = "035a1364-f00d-48e2-b582-4fe125905ee3" # The azure subscription id to deploy to + resource_location = "Australia East" # The location of the resources + resource_group_name = "gft2" # The resource group all resources will be deployed to + owner_tag = "Contoso" # Owner tag value for Azure resources + environment_tag = "stg" # This is used on Azure tags as well as all resource names + ip_address = "144.138.148.220" # This is the ip address of the agent/current IP. Used to create firewall exemptions. + deploy_web_app = true + deploy_function_app = true +} \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer2/vars/production/terragrunt.hcl b/solution/DeploymentV2/terraform_layer2/vars/production/terragrunt.hcl new file mode 100644 index 00000000..cf068612 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/vars/production/terragrunt.hcl @@ -0,0 +1,33 @@ +remote_state { + backend = "azurerm" + generate = { + path = "backend.tf" + if_exists = "overwrite_terragrunt" + } + config = { + # You need to update the resource group and storage account here. + # You should have created these with the Prepare.ps1 script. + resource_group_name = "gft2" + storage_account_name = "gft2state" + container_name = "tstate" + key = "terraform_layer2.tfstate" + } +} + +# These inputs are provided to the terraform variables when deploying the environment +# If you are deploying using pipelines, these can be overridden from environment variables +# using TF_VAR_variablename +inputs = { + prefix = "ads" # All azure resources will be prefixed with this + domain = "microsoft.com" # Used when configuring AAD config for Azure functions + tenant_id = "72f988bf-86f1-41af-91ab-2d7cd011db47" # This is the Azure AD tenant ID + subscription_id = "035a1364-f00d-48e2-b582-4fe125905ee3" # The azure subscription id to deploy to + resource_location = "Australia East" # The location of the resources + resource_group_name = "gft2" # The resource group all resources will be deployed to + owner_tag = "Contoso" # Owner tag value for Azure resources + environment_tag = "stg" # This is used on Azure tags as well as all resource names + ip_address = "144.138.148.220" # This is the ip address of the agent/current IP. Used to create firewall exemptions. + deploy_web_app = true + deploy_function_app = true +} + diff --git a/solution/DeploymentV2/terraform_layer2/vars/staging/terragrunt.hcl b/solution/DeploymentV2/terraform_layer2/vars/staging/terragrunt.hcl new file mode 100644 index 00000000..42a89ed4 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/vars/staging/terragrunt.hcl @@ -0,0 +1,32 @@ +remote_state { + backend = "azurerm" + generate = { + path = "backend.tf" + if_exists = "overwrite_terragrunt" + } + config = { + # You need to update the resource group and storage account here. + # You should have created these with the Prepare.ps1 script. + resource_group_name = "gft2" + storage_account_name = "gft2state" + container_name = "tstate" + key = "terraform_layer2.tfstate" + } +} + +# These inputs are provided to the terraform variables when deploying the environment +# If you are deploying using pipelines, these can be overridden from environment variables +# using TF_VAR_variablename +inputs = { + prefix = "ads" # All azure resources will be prefixed with this + domain = "microsoft.com" # Used when configuring AAD config for Azure functions + tenant_id = "72f988bf-86f1-41af-91ab-2d7cd011db47" # This is the Azure AD tenant ID + subscription_id = "035a1364-f00d-48e2-b582-4fe125905ee3" # The azure subscription id to deploy to + resource_location = "Australia East" # The location of the resources + resource_group_name = "gft2" # The resource group all resources will be deployed to + owner_tag = "Contoso" # Owner tag value for Azure resources + environment_tag = "stg" # This is used on Azure tags as well as all resource names + ip_address = "144.138.148.220" # This is the ip address of the agent/current IP. Used to create firewall exemptions. + deploy_web_app = true + deploy_function_app = true +} From aeb2dfedccfee2d7160d062bece777a9ea86b01f Mon Sep 17 00:00:00 2001 From: John Rampono Date: Sat, 23 Jul 2022 16:31:19 +0800 Subject: [PATCH 040/151] Breaking up terraform into layers --- solution/DeploymentV2/Deploy.ps1 | 2 +- solution/DeploymentV2/terraform/layer1.tf | 13 + solution/DeploymentV2/terraform/main.tf | 1 + solution/DeploymentV2/terraform/outputs.tf | 8 + .../terraform/vars/staging/terragrunt.hcl | 21 +- .../DeploymentV2/terraform_layer1/outputs.tf | 29 +- .../DeploymentV2/terraform_layer1/readme.md | 9 + .../terraform_layer2/.terraform.lock.hcl | 19 + .../terraform_layer2/app_insights.tf | 27 + .../terraform_layer2/app_service.tf | 133 +++ .../terraform_layer2/app_service_plan.tf | 21 + .../DeploymentV2/terraform_layer2/bastion.tf | 56 ++ .../terraform_layer2/data_factory.tf | 156 ++++ .../terraform_layer2/data_factory_datasets.tf | 44 + .../data_factory_integration_runtimes.tf | 31 + .../data_factory_linked_services.tf | 567 +++++++++++++ .../data_factory_managed_private_endpoints.tf | 74 ++ .../data_factory_pipelines.tf | 50 ++ .../DeploymentV2/terraform_layer2/database.tf | 111 +++ .../terraform_layer2/function_app.tf | 127 +++ .../terraform_layer2/key_vault.tf | 281 +++++++ .../DeploymentV2/terraform_layer2/layer1.tf | 25 +- .../DeploymentV2/terraform_layer2/locals.tf | 102 ++- .../DeploymentV2/terraform_layer2/main.tf | 2 + .../modules/data_factory_datasets/README.md | 62 ++ .../arm/GDS_AzureBlobFS_Binary.json | 79 ++ .../arm/GDS_AzureBlobFS_DelimitedText.json | 91 +++ .../arm/GDS_AzureBlobFS_Excel.json | 96 +++ .../arm/GDS_AzureBlobFS_Json.json | 81 ++ .../arm/GDS_AzureBlobFS_Parquet.json | 81 ++ .../arm/GDS_AzureBlobStorage_Binary.json | 80 ++ .../GDS_AzureBlobStorage_DelimitedText.json | 90 +++ .../arm/GDS_AzureBlobStorage_Excel.json | 94 +++ .../arm/GDS_AzureBlobStorage_Json.json | 80 ++ .../arm/GDS_AzureBlobStorage_Parquet.json | 81 ++ .../arm/GDS_AzureSqlDWTable_NA.json | 77 ++ .../arm/GDS_AzureSqlTable_NA.json | 77 ++ .../arm/GDS_FileServer_Binary.json | 93 +++ .../arm/GDS_FileServer_Parquet.json | 93 +++ .../arm/GDS_OracleServerTable_NA.json | 105 +++ .../arm/GDS_Rest_Anonymous.json | 79 ++ .../arm/GDS_Rest_Basic.json | 108 +++ .../arm/GDS_Rest_OAuth2.json | 135 ++++ .../arm/GDS_Rest_ServicePrincipal.json | 126 +++ .../arm/GDS_SqlServerTable_NA.json | 98 +++ .../arm/GDS_SqlServerTable_NA_SqlAuth.json | 98 +++ .../modules/data_factory_datasets/main.tf | 312 +++++++ .../modules/data_factory_datasets/outputs.tf | 0 .../modules/data_factory_datasets/vars.tf | 78 ++ ...AzureBlobFS_Binary_AzureBlobFS_Binary.json | 118 +++ ...BlobFS_Binary_AzureBlobStorage_Binary.json | 118 +++ ...bFS_DelimitedText_AzureBlobFS_Parquet.json | 118 +++ ...elimitedText_AzureBlobStorage_Parquet.json | 118 +++ ...BlobFS_DelimitedText_AzureSqlTable_NA.json | 118 +++ ...lobFS_Excel_AzureBlobFS_DelimitedText.json | 118 +++ ...AzureBlobFS_Excel_AzureBlobFS_Parquet.json | 118 +++ ..._Excel_AzureBlobStorage_DelimitedText.json | 118 +++ ...BlobFS_Excel_AzureBlobStorage_Parquet.json | 118 +++ ...-1_AzureBlobFS_Excel_AzureSqlTable_NA.json | 118 +++ ...L-1_AzureBlobFS_Json_AzureSqlTable_NA.json | 118 +++ ...bFS_Parquet_AzureBlobFS_DelimitedText.json | 118 +++ ...arquet_AzureBlobStorage_DelimitedText.json | 118 +++ ..._AzureBlobFS_Parquet_AzureSqlTable_NA.json | 118 +++ ...BlobStorage_Binary_AzureBlobFS_Binary.json | 118 +++ ...torage_Binary_AzureBlobStorage_Binary.json | 118 +++ ...age_DelimitedText_AzureBlobFS_Parquet.json | 118 +++ ...elimitedText_AzureBlobStorage_Parquet.json | 118 +++ ...torage_DelimitedText_AzureSqlTable_NA.json | 118 +++ ...orage_Excel_AzureBlobFS_DelimitedText.json | 118 +++ ...BlobStorage_Excel_AzureBlobFS_Parquet.json | 118 +++ ..._Excel_AzureBlobStorage_DelimitedText.json | 118 +++ ...torage_Excel_AzureBlobStorage_Parquet.json | 118 +++ ...ureBlobStorage_Excel_AzureSqlTable_NA.json | 118 +++ ...zureBlobStorage_Json_AzureSqlTable_NA.json | 118 +++ ...age_Parquet_AzureBlobFS_DelimitedText.json | 118 +++ ...arquet_AzureBlobStorage_DelimitedText.json | 118 +++ ...eBlobStorage_Parquet_AzureSqlTable_NA.json | 118 +++ ..._AzureSqlTable_NA_AzureBlobFS_Parquet.json | 118 +++ ...eSqlTable_NA_AzureBlobStorage_Parquet.json | 118 +++ ...SqlServerTable_NA_AzureBlobFS_Parquet.json | 118 +++ ...rverTable_NA_AzureBlobStorage_Parquet.json | 118 +++ ...able_NA_AzureBlobFS_Parquet_Full_Load.json | 378 +++++++++ ...able_NA_AzureBlobFS_Parquet_Watermark.json | 381 +++++++++ ...NA_AzureBlobStorage_Parquet_Full_Load.json | 378 +++++++++ ...NA_AzureBlobStorage_Parquet_Watermark.json | 381 +++++++++ .../GPL0_AzureSqlTable_NA_Create_Table.json | 285 +++++++ .../arm/GPL0_AzureSqlTable_NA_Post_Copy.json | 582 +++++++++++++ ...able_NA_AzureBlobFS_Parquet_Full_Load.json | 390 +++++++++ ...able_NA_AzureBlobFS_Parquet_Watermark.json | 393 +++++++++ ...NA_AzureBlobStorage_Parquet_Full_Load.json | 390 +++++++++ ...NA_AzureBlobStorage_Parquet_Watermark.json | 393 +++++++++ ...A_AzureBlobFS_Parquet_Full_Load_Chunk.json | 143 ++++ ...A_AzureBlobFS_Parquet_Watermark_Chunk.json | 178 ++++ ...reBlobStorage_Parquet_Full_Load_Chunk.json | 143 ++++ ...reBlobStorage_Parquet_Watermark_Chunk.json | 178 ++++ ...A_AzureBlobFS_Parquet_Full_Load_Chunk.json | 143 ++++ ...A_AzureBlobFS_Parquet_Watermark_Chunk.json | 178 ++++ ...reBlobStorage_Parquet_Full_Load_Chunk.json | 143 ++++ ...reBlobStorage_Parquet_Watermark_Chunk.json | 178 ++++ ...AzureBlobFS_Binary_AzureBlobFS_Binary.json | 118 +++ ...bFS_Binary_AzureBlobFS_Binary_Primary.json | 223 +++++ ...BlobFS_Binary_AzureBlobStorage_Binary.json | 118 +++ ...inary_AzureBlobStorage_Binary_Primary.json | 223 +++++ ...bFS_DelimitedText_AzureBlobFS_Parquet.json | 118 +++ ...mitedText_AzureBlobFS_Parquet_Primary.json | 233 ++++++ ...elimitedText_AzureBlobStorage_Parquet.json | 118 +++ ...Text_AzureBlobStorage_Parquet_Primary.json | 233 ++++++ ...BlobFS_DelimitedText_AzureSqlTable_NA.json | 118 +++ ...elimitedText_AzureSqlTable_NA_Primary.json | 338 ++++++++ ...lobFS_Excel_AzureBlobFS_DelimitedText.json | 118 +++ ...cel_AzureBlobFS_DelimitedText_Primary.json | 226 ++++++ ...AzureBlobFS_Excel_AzureBlobFS_Parquet.json | 118 +++ ...bFS_Excel_AzureBlobFS_Parquet_Primary.json | 217 +++++ ..._Excel_AzureBlobStorage_DelimitedText.json | 118 +++ ...zureBlobStorage_DelimitedText_Primary.json | 226 ++++++ ...BlobFS_Excel_AzureBlobStorage_Parquet.json | 118 +++ ...xcel_AzureBlobStorage_Parquet_Primary.json | 217 +++++ ...PL_AzureBlobFS_Excel_AzureSqlTable_NA.json | 118 +++ ...BlobFS_Excel_AzureSqlTable_NA_Primary.json | 322 ++++++++ ...GPL_AzureBlobFS_Json_AzureSqlTable_NA.json | 118 +++ ...eBlobFS_Json_AzureSqlTable_NA_Primary.json | 316 ++++++++ ...bFS_Parquet_AzureBlobFS_DelimitedText.json | 118 +++ ...uet_AzureBlobFS_DelimitedText_Primary.json | 218 +++++ ...arquet_AzureBlobStorage_DelimitedText.json | 118 +++ ...zureBlobStorage_DelimitedText_Primary.json | 218 +++++ ..._AzureBlobFS_Parquet_AzureSqlTable_NA.json | 118 +++ ...obFS_Parquet_AzureSqlTable_NA_Primary.json | 326 ++++++++ ...BlobStorage_Binary_AzureBlobFS_Binary.json | 118 +++ ...age_Binary_AzureBlobFS_Binary_Primary.json | 223 +++++ ...torage_Binary_AzureBlobStorage_Binary.json | 118 +++ ...inary_AzureBlobStorage_Binary_Primary.json | 223 +++++ ...age_DelimitedText_AzureBlobFS_Parquet.json | 118 +++ ...mitedText_AzureBlobFS_Parquet_Primary.json | 233 ++++++ ...elimitedText_AzureBlobStorage_Parquet.json | 118 +++ ...Text_AzureBlobStorage_Parquet_Primary.json | 233 ++++++ ...torage_DelimitedText_AzureSqlTable_NA.json | 118 +++ ...elimitedText_AzureSqlTable_NA_Primary.json | 338 ++++++++ ...orage_Excel_AzureBlobFS_DelimitedText.json | 118 +++ ...cel_AzureBlobFS_DelimitedText_Primary.json | 226 ++++++ ...BlobStorage_Excel_AzureBlobFS_Parquet.json | 118 +++ ...age_Excel_AzureBlobFS_Parquet_Primary.json | 217 +++++ ..._Excel_AzureBlobStorage_DelimitedText.json | 118 +++ ...zureBlobStorage_DelimitedText_Primary.json | 226 ++++++ ...torage_Excel_AzureBlobStorage_Parquet.json | 118 +++ ...xcel_AzureBlobStorage_Parquet_Primary.json | 217 +++++ ...ureBlobStorage_Excel_AzureSqlTable_NA.json | 118 +++ ...torage_Excel_AzureSqlTable_NA_Primary.json | 322 ++++++++ ...zureBlobStorage_Json_AzureSqlTable_NA.json | 118 +++ ...Storage_Json_AzureSqlTable_NA_Primary.json | 317 ++++++++ ...age_Parquet_AzureBlobFS_DelimitedText.json | 118 +++ ...uet_AzureBlobFS_DelimitedText_Primary.json | 218 +++++ ...arquet_AzureBlobStorage_DelimitedText.json | 118 +++ ...zureBlobStorage_DelimitedText_Primary.json | 218 +++++ ...eBlobStorage_Parquet_AzureSqlTable_NA.json | 118 +++ ...rage_Parquet_AzureSqlTable_NA_Primary.json | 326 ++++++++ ..._AzureSqlTable_NA_AzureBlobFS_Parquet.json | 118 +++ ...lTable_NA_AzureBlobFS_Parquet_Primary.json | 551 +++++++++++++ ...eSqlTable_NA_AzureBlobStorage_Parquet.json | 118 +++ ...e_NA_AzureBlobStorage_Parquet_Primary.json | 551 +++++++++++++ ...SqlServerTable_NA_AzureBlobFS_Parquet.json | 118 +++ ...rTable_NA_AzureBlobFS_Parquet_Primary.json | 599 ++++++++++++++ ...rverTable_NA_AzureBlobStorage_Parquet.json | 118 +++ ...e_NA_AzureBlobStorage_Parquet_Primary.json | 599 ++++++++++++++ .../data_factory_pipelines_azure/main.tf | 109 +++ .../data_factory_pipelines_azure/outputs.tf | 0 .../data_factory_pipelines_azure/vars.tf | 28 + .../arm/GPL_AzureFunction_Common.json | 121 +++ .../arm/SPL_AzureFunction.json | 121 +++ .../data_factory_pipelines_common/main.tf | 14 + .../data_factory_pipelines_common/outputs.tf | 0 .../data_factory_pipelines_common/vars.tf | 18 + ...AzureBlobFS_Binary_AzureBlobFS_Binary.json | 118 +++ ...BlobFS_Binary_AzureBlobStorage_Binary.json | 118 +++ ...bFS_DelimitedText_AzureBlobFS_Parquet.json | 118 +++ ...elimitedText_AzureBlobStorage_Parquet.json | 118 +++ ...BlobFS_DelimitedText_AzureSqlTable_NA.json | 118 +++ ...lobFS_Excel_AzureBlobFS_DelimitedText.json | 118 +++ ...AzureBlobFS_Excel_AzureBlobFS_Parquet.json | 118 +++ ..._Excel_AzureBlobStorage_DelimitedText.json | 118 +++ ...BlobFS_Excel_AzureBlobStorage_Parquet.json | 118 +++ ...-1_AzureBlobFS_Excel_AzureSqlTable_NA.json | 118 +++ ...L-1_AzureBlobFS_Json_AzureSqlTable_NA.json | 118 +++ ...bFS_Parquet_AzureBlobFS_DelimitedText.json | 118 +++ ...arquet_AzureBlobStorage_DelimitedText.json | 118 +++ ..._AzureBlobFS_Parquet_AzureSqlTable_NA.json | 118 +++ ...BlobStorage_Binary_AzureBlobFS_Binary.json | 118 +++ ...torage_Binary_AzureBlobStorage_Binary.json | 118 +++ ...age_DelimitedText_AzureBlobFS_Parquet.json | 118 +++ ...elimitedText_AzureBlobStorage_Parquet.json | 118 +++ ...torage_DelimitedText_AzureSqlTable_NA.json | 118 +++ ...orage_Excel_AzureBlobFS_DelimitedText.json | 118 +++ ...BlobStorage_Excel_AzureBlobFS_Parquet.json | 118 +++ ..._Excel_AzureBlobStorage_DelimitedText.json | 118 +++ ...torage_Excel_AzureBlobStorage_Parquet.json | 118 +++ ...ureBlobStorage_Excel_AzureSqlTable_NA.json | 118 +++ ...zureBlobStorage_Json_AzureSqlTable_NA.json | 118 +++ ...age_Parquet_AzureBlobFS_DelimitedText.json | 118 +++ ...arquet_AzureBlobStorage_DelimitedText.json | 118 +++ ...eBlobStorage_Parquet_AzureSqlTable_NA.json | 118 +++ ..._AzureSqlTable_NA_AzureBlobFS_Parquet.json | 118 +++ ...eSqlTable_NA_AzureBlobStorage_Parquet.json | 118 +++ ...SqlServerTable_NA_AzureBlobFS_Parquet.json | 118 +++ ...rverTable_NA_AzureBlobStorage_Parquet.json | 118 +++ ...able_NA_AzureBlobFS_Parquet_Full_Load.json | 378 +++++++++ ...able_NA_AzureBlobFS_Parquet_Watermark.json | 381 +++++++++ ...NA_AzureBlobStorage_Parquet_Full_Load.json | 378 +++++++++ ...NA_AzureBlobStorage_Parquet_Watermark.json | 381 +++++++++ .../GPL0_AzureSqlTable_NA_Create_Table.json | 285 +++++++ .../arm/GPL0_AzureSqlTable_NA_Post_Copy.json | 582 +++++++++++++ ...able_NA_AzureBlobFS_Parquet_Full_Load.json | 390 +++++++++ ...able_NA_AzureBlobFS_Parquet_Watermark.json | 393 +++++++++ ...NA_AzureBlobStorage_Parquet_Full_Load.json | 390 +++++++++ ...NA_AzureBlobStorage_Parquet_Watermark.json | 393 +++++++++ ...A_AzureBlobFS_Parquet_Full_Load_Chunk.json | 143 ++++ ...A_AzureBlobFS_Parquet_Watermark_Chunk.json | 178 ++++ ...reBlobStorage_Parquet_Full_Load_Chunk.json | 143 ++++ ...reBlobStorage_Parquet_Watermark_Chunk.json | 178 ++++ ...A_AzureBlobFS_Parquet_Full_Load_Chunk.json | 143 ++++ ...A_AzureBlobFS_Parquet_Watermark_Chunk.json | 178 ++++ ...reBlobStorage_Parquet_Full_Load_Chunk.json | 143 ++++ ...reBlobStorage_Parquet_Watermark_Chunk.json | 178 ++++ ...AzureBlobFS_Binary_AzureBlobFS_Binary.json | 118 +++ ...bFS_Binary_AzureBlobFS_Binary_Primary.json | 223 +++++ ...BlobFS_Binary_AzureBlobStorage_Binary.json | 118 +++ ...inary_AzureBlobStorage_Binary_Primary.json | 223 +++++ ...bFS_DelimitedText_AzureBlobFS_Parquet.json | 118 +++ ...mitedText_AzureBlobFS_Parquet_Primary.json | 233 ++++++ ...elimitedText_AzureBlobStorage_Parquet.json | 118 +++ ...Text_AzureBlobStorage_Parquet_Primary.json | 233 ++++++ ...BlobFS_DelimitedText_AzureSqlTable_NA.json | 118 +++ ...elimitedText_AzureSqlTable_NA_Primary.json | 338 ++++++++ ...lobFS_Excel_AzureBlobFS_DelimitedText.json | 118 +++ ...cel_AzureBlobFS_DelimitedText_Primary.json | 226 ++++++ ...AzureBlobFS_Excel_AzureBlobFS_Parquet.json | 118 +++ ...bFS_Excel_AzureBlobFS_Parquet_Primary.json | 217 +++++ ..._Excel_AzureBlobStorage_DelimitedText.json | 118 +++ ...zureBlobStorage_DelimitedText_Primary.json | 226 ++++++ ...BlobFS_Excel_AzureBlobStorage_Parquet.json | 118 +++ ...xcel_AzureBlobStorage_Parquet_Primary.json | 217 +++++ ...PL_AzureBlobFS_Excel_AzureSqlTable_NA.json | 118 +++ ...BlobFS_Excel_AzureSqlTable_NA_Primary.json | 322 ++++++++ ...GPL_AzureBlobFS_Json_AzureSqlTable_NA.json | 118 +++ ...eBlobFS_Json_AzureSqlTable_NA_Primary.json | 316 ++++++++ ...bFS_Parquet_AzureBlobFS_DelimitedText.json | 118 +++ ...uet_AzureBlobFS_DelimitedText_Primary.json | 218 +++++ ...arquet_AzureBlobStorage_DelimitedText.json | 118 +++ ...zureBlobStorage_DelimitedText_Primary.json | 218 +++++ ..._AzureBlobFS_Parquet_AzureSqlTable_NA.json | 118 +++ ...obFS_Parquet_AzureSqlTable_NA_Primary.json | 326 ++++++++ ...BlobStorage_Binary_AzureBlobFS_Binary.json | 118 +++ ...age_Binary_AzureBlobFS_Binary_Primary.json | 223 +++++ ...torage_Binary_AzureBlobStorage_Binary.json | 118 +++ ...inary_AzureBlobStorage_Binary_Primary.json | 223 +++++ ...age_DelimitedText_AzureBlobFS_Parquet.json | 118 +++ ...mitedText_AzureBlobFS_Parquet_Primary.json | 233 ++++++ ...elimitedText_AzureBlobStorage_Parquet.json | 118 +++ ...Text_AzureBlobStorage_Parquet_Primary.json | 233 ++++++ ...torage_DelimitedText_AzureSqlTable_NA.json | 118 +++ ...elimitedText_AzureSqlTable_NA_Primary.json | 338 ++++++++ ...orage_Excel_AzureBlobFS_DelimitedText.json | 118 +++ ...cel_AzureBlobFS_DelimitedText_Primary.json | 226 ++++++ ...BlobStorage_Excel_AzureBlobFS_Parquet.json | 118 +++ ...age_Excel_AzureBlobFS_Parquet_Primary.json | 217 +++++ ..._Excel_AzureBlobStorage_DelimitedText.json | 118 +++ ...zureBlobStorage_DelimitedText_Primary.json | 226 ++++++ ...torage_Excel_AzureBlobStorage_Parquet.json | 118 +++ ...xcel_AzureBlobStorage_Parquet_Primary.json | 217 +++++ ...ureBlobStorage_Excel_AzureSqlTable_NA.json | 118 +++ ...torage_Excel_AzureSqlTable_NA_Primary.json | 322 ++++++++ ...zureBlobStorage_Json_AzureSqlTable_NA.json | 118 +++ ...Storage_Json_AzureSqlTable_NA_Primary.json | 317 ++++++++ ...age_Parquet_AzureBlobFS_DelimitedText.json | 118 +++ ...uet_AzureBlobFS_DelimitedText_Primary.json | 218 +++++ ...arquet_AzureBlobStorage_DelimitedText.json | 118 +++ ...zureBlobStorage_DelimitedText_Primary.json | 218 +++++ ...eBlobStorage_Parquet_AzureSqlTable_NA.json | 118 +++ ...rage_Parquet_AzureSqlTable_NA_Primary.json | 326 ++++++++ ..._AzureSqlTable_NA_AzureBlobFS_Parquet.json | 118 +++ ...lTable_NA_AzureBlobFS_Parquet_Primary.json | 551 +++++++++++++ ...eSqlTable_NA_AzureBlobStorage_Parquet.json | 118 +++ ...e_NA_AzureBlobStorage_Parquet_Primary.json | 551 +++++++++++++ ...SqlServerTable_NA_AzureBlobFS_Parquet.json | 118 +++ ...rTable_NA_AzureBlobFS_Parquet_Primary.json | 599 ++++++++++++++ ...rverTable_NA_AzureBlobStorage_Parquet.json | 118 +++ ...e_NA_AzureBlobStorage_Parquet_Primary.json | 599 ++++++++++++++ .../data_factory_pipelines_selfhosted/main.tf | 81 ++ .../outputs.tf | 0 .../data_factory_pipelines_selfhosted/vars.tf | 28 + .../arm/privatelinks.json | 355 ++++++++ .../main.tf | 44 + .../outputs.tf | 0 .../vars.tf | 62 ++ .../terraform_layer2/nsg_app_service.tf | 53 ++ .../terraform_layer2/nsg_bastion.tf | 207 +++++ .../terraform_layer2/nsg_plink.tf | 53 ++ .../DeploymentV2/terraform_layer2/nsg_vms.tf | 51 ++ .../DeploymentV2/terraform_layer2/outputs.tf | 262 +++++- .../terraform_layer2/private_dns.tf | 176 ++++ .../DeploymentV2/terraform_layer2/purview.tf | 120 +++ .../DeploymentV2/terraform_layer2/readme.md | 101 +++ .../DeploymentV2/terraform_layer2/security.tf | 112 +++ .../terraform_layer2/storage_adls.tf | 168 ++++ .../terraform_layer2/storage_blob.tf | 133 +++ .../DeploymentV2/terraform_layer2/subnet.tf | 63 ++ .../DeploymentV2/terraform_layer2/synapse.tf | 455 +++++++++++ .../DeploymentV2/terraform_layer2/vars.tf | 763 +++++++++++++++++- .../terraform_layer2/vars/admz/terragrunt.hcl | 57 +- .../vars/local/terragrunt.hcl | 43 +- .../vars/production/terragrunt.hcl | 36 +- .../vars/staging/terragrunt.hcl | 38 + .../terraform_layer2/virtual_machines.tf | 312 +++++++ .../DeploymentV2/terraform_layer2/vnet.tf | 15 + .../DeploymentV2/terraform_layer3/.gitignore | 39 + .../terraform_layer3/.terraform.lock.hcl | 62 ++ .../terraform_layer3/function_app.tf | 23 + .../terraform_layer3/key_vault.tf | 9 + .../DeploymentV2/terraform_layer3/layer2.tf | 13 + .../DeploymentV2/terraform_layer3/locals.tf | 20 + .../DeploymentV2/terraform_layer3/main.tf | 51 ++ .../DeploymentV2/terraform_layer3/outputs.tf | 32 + .../DeploymentV2/terraform_layer3/readme.md | 11 + .../DeploymentV2/terraform_layer3/vars.tf | 121 +++ .../terraform_layer3/vars/admz/terragrunt.hcl | 30 + .../vars/local/terragrunt.hcl | 32 + .../vars/production/terragrunt.hcl | 33 + .../vars/staging/terragrunt.hcl | 51 ++ 326 files changed, 53265 insertions(+), 84 deletions(-) create mode 100644 solution/DeploymentV2/terraform/layer1.tf create mode 100644 solution/DeploymentV2/terraform_layer1/readme.md create mode 100644 solution/DeploymentV2/terraform_layer2/app_insights.tf create mode 100644 solution/DeploymentV2/terraform_layer2/app_service.tf create mode 100644 solution/DeploymentV2/terraform_layer2/app_service_plan.tf create mode 100644 solution/DeploymentV2/terraform_layer2/bastion.tf create mode 100644 solution/DeploymentV2/terraform_layer2/data_factory.tf create mode 100644 solution/DeploymentV2/terraform_layer2/data_factory_datasets.tf create mode 100644 solution/DeploymentV2/terraform_layer2/data_factory_integration_runtimes.tf create mode 100644 solution/DeploymentV2/terraform_layer2/data_factory_linked_services.tf create mode 100644 solution/DeploymentV2/terraform_layer2/data_factory_managed_private_endpoints.tf create mode 100644 solution/DeploymentV2/terraform_layer2/data_factory_pipelines.tf create mode 100644 solution/DeploymentV2/terraform_layer2/database.tf create mode 100644 solution/DeploymentV2/terraform_layer2/function_app.tf create mode 100644 solution/DeploymentV2/terraform_layer2/key_vault.tf create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/README.md create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_AzureBlobFS_Binary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_AzureBlobFS_DelimitedText.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_AzureBlobFS_Excel.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_AzureBlobFS_Json.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_AzureBlobFS_Parquet.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_AzureBlobStorage_Binary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_AzureBlobStorage_DelimitedText.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_AzureBlobStorage_Excel.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_AzureBlobStorage_Json.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_AzureBlobStorage_Parquet.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_AzureSqlDWTable_NA.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_AzureSqlTable_NA.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_FileServer_Binary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_FileServer_Parquet.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_OracleServerTable_NA.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_Rest_Anonymous.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_Rest_Basic.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_Rest_OAuth2.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_Rest_ServicePrincipal.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_SqlServerTable_NA.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_SqlServerTable_NA_SqlAuth.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/main.tf create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/outputs.tf create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/vars.tf create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobFS_Binary_AzureBlobFS_Binary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobFS_Binary_AzureBlobStorage_Binary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobFS_DelimitedText_AzureBlobFS_Parquet.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobFS_DelimitedText_AzureBlobStorage_Parquet.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobFS_DelimitedText_AzureSqlTable_NA.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobFS_Excel_AzureBlobFS_DelimitedText.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobFS_Excel_AzureBlobFS_Parquet.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobFS_Excel_AzureBlobStorage_DelimitedText.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobFS_Excel_AzureBlobStorage_Parquet.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobFS_Excel_AzureSqlTable_NA.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobFS_Json_AzureSqlTable_NA.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobFS_Parquet_AzureBlobFS_DelimitedText.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobFS_Parquet_AzureBlobStorage_DelimitedText.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobFS_Parquet_AzureSqlTable_NA.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobStorage_Binary_AzureBlobFS_Binary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobStorage_Binary_AzureBlobStorage_Binary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobStorage_DelimitedText_AzureBlobFS_Parquet.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobStorage_DelimitedText_AzureBlobStorage_Parquet.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobStorage_DelimitedText_AzureSqlTable_NA.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobStorage_Excel_AzureBlobFS_DelimitedText.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobStorage_Excel_AzureBlobFS_Parquet.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobStorage_Excel_AzureBlobStorage_DelimitedText.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobStorage_Excel_AzureBlobStorage_Parquet.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobStorage_Excel_AzureSqlTable_NA.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobStorage_Json_AzureSqlTable_NA.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobStorage_Parquet_AzureBlobFS_DelimitedText.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobStorage_Parquet_AzureBlobStorage_DelimitedText.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobStorage_Parquet_AzureSqlTable_NA.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureSqlTable_NA_AzureBlobFS_Parquet.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureSqlTable_NA_AzureBlobStorage_Parquet.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_SqlServerTable_NA_AzureBlobFS_Parquet.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_SqlServerTable_NA_AzureBlobStorage_Parquet.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL0_AzureSqlTable_NA_AzureBlobFS_Parquet_Full_Load.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL0_AzureSqlTable_NA_AzureBlobFS_Parquet_Watermark.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL0_AzureSqlTable_NA_AzureBlobStorage_Parquet_Full_Load.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL0_AzureSqlTable_NA_AzureBlobStorage_Parquet_Watermark.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL0_AzureSqlTable_NA_Create_Table.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL0_AzureSqlTable_NA_Post_Copy.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL0_SqlServerTable_NA_AzureBlobFS_Parquet_Full_Load.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL0_SqlServerTable_NA_AzureBlobFS_Parquet_Watermark.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL0_SqlServerTable_NA_AzureBlobStorage_Parquet_Full_Load.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL0_SqlServerTable_NA_AzureBlobStorage_Parquet_Watermark.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL1_AzureSqlTable_NA_AzureBlobFS_Parquet_Full_Load_Chunk.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL1_AzureSqlTable_NA_AzureBlobFS_Parquet_Watermark_Chunk.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL1_AzureSqlTable_NA_AzureBlobStorage_Parquet_Full_Load_Chunk.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL1_AzureSqlTable_NA_AzureBlobStorage_Parquet_Watermark_Chunk.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL1_SqlServerTable_NA_AzureBlobFS_Parquet_Full_Load_Chunk.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL1_SqlServerTable_NA_AzureBlobFS_Parquet_Watermark_Chunk.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL1_SqlServerTable_NA_AzureBlobStorage_Parquet_Full_Load_Chunk.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL1_SqlServerTable_NA_AzureBlobStorage_Parquet_Watermark_Chunk.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Binary_AzureBlobFS_Binary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Binary_AzureBlobFS_Binary_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Binary_AzureBlobStorage_Binary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Binary_AzureBlobStorage_Binary_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_DelimitedText_AzureBlobFS_Parquet.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_DelimitedText_AzureBlobFS_Parquet_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_DelimitedText_AzureBlobStorage_Parquet.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_DelimitedText_AzureBlobStorage_Parquet_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_DelimitedText_AzureSqlTable_NA.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_DelimitedText_AzureSqlTable_NA_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Excel_AzureBlobFS_DelimitedText.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Excel_AzureBlobFS_DelimitedText_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Excel_AzureBlobFS_Parquet.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Excel_AzureBlobFS_Parquet_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Excel_AzureBlobStorage_DelimitedText.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Excel_AzureBlobStorage_DelimitedText_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Excel_AzureBlobStorage_Parquet.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Excel_AzureBlobStorage_Parquet_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Excel_AzureSqlTable_NA.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Excel_AzureSqlTable_NA_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Json_AzureSqlTable_NA.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Json_AzureSqlTable_NA_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Parquet_AzureBlobFS_DelimitedText.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Parquet_AzureBlobFS_DelimitedText_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Parquet_AzureBlobStorage_DelimitedText.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Parquet_AzureBlobStorage_DelimitedText_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Parquet_AzureSqlTable_NA.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Parquet_AzureSqlTable_NA_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Binary_AzureBlobFS_Binary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Binary_AzureBlobFS_Binary_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Binary_AzureBlobStorage_Binary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Binary_AzureBlobStorage_Binary_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_DelimitedText_AzureBlobFS_Parquet.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_DelimitedText_AzureBlobFS_Parquet_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_DelimitedText_AzureBlobStorage_Parquet.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_DelimitedText_AzureBlobStorage_Parquet_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_DelimitedText_AzureSqlTable_NA.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_DelimitedText_AzureSqlTable_NA_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Excel_AzureBlobFS_DelimitedText.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Excel_AzureBlobFS_DelimitedText_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Excel_AzureBlobFS_Parquet.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Excel_AzureBlobFS_Parquet_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Excel_AzureBlobStorage_DelimitedText.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Excel_AzureBlobStorage_DelimitedText_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Excel_AzureBlobStorage_Parquet.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Excel_AzureBlobStorage_Parquet_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Excel_AzureSqlTable_NA.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Excel_AzureSqlTable_NA_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Json_AzureSqlTable_NA.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Json_AzureSqlTable_NA_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Parquet_AzureBlobFS_DelimitedText.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Parquet_AzureBlobFS_DelimitedText_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Parquet_AzureBlobStorage_DelimitedText.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Parquet_AzureBlobStorage_DelimitedText_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Parquet_AzureSqlTable_NA.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Parquet_AzureSqlTable_NA_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureSqlTable_NA_AzureBlobFS_Parquet.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureSqlTable_NA_AzureBlobFS_Parquet_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureSqlTable_NA_AzureBlobStorage_Parquet.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureSqlTable_NA_AzureBlobStorage_Parquet_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_SqlServerTable_NA_AzureBlobFS_Parquet.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_SqlServerTable_NA_AzureBlobFS_Parquet_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_SqlServerTable_NA_AzureBlobStorage_Parquet.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_SqlServerTable_NA_AzureBlobStorage_Parquet_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/main.tf create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/outputs.tf create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/vars.tf create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_common/arm/GPL_AzureFunction_Common.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_common/arm/SPL_AzureFunction.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_common/main.tf create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_common/outputs.tf create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_common/vars.tf create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobFS_Binary_AzureBlobFS_Binary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobFS_Binary_AzureBlobStorage_Binary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobFS_DelimitedText_AzureBlobFS_Parquet.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobFS_DelimitedText_AzureBlobStorage_Parquet.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobFS_DelimitedText_AzureSqlTable_NA.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobFS_Excel_AzureBlobFS_DelimitedText.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobFS_Excel_AzureBlobFS_Parquet.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobFS_Excel_AzureBlobStorage_DelimitedText.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobFS_Excel_AzureBlobStorage_Parquet.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobFS_Excel_AzureSqlTable_NA.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobFS_Json_AzureSqlTable_NA.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobFS_Parquet_AzureBlobFS_DelimitedText.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobFS_Parquet_AzureBlobStorage_DelimitedText.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobFS_Parquet_AzureSqlTable_NA.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobStorage_Binary_AzureBlobFS_Binary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobStorage_Binary_AzureBlobStorage_Binary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobStorage_DelimitedText_AzureBlobFS_Parquet.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobStorage_DelimitedText_AzureBlobStorage_Parquet.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobStorage_DelimitedText_AzureSqlTable_NA.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobStorage_Excel_AzureBlobFS_DelimitedText.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobStorage_Excel_AzureBlobFS_Parquet.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobStorage_Excel_AzureBlobStorage_DelimitedText.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobStorage_Excel_AzureBlobStorage_Parquet.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobStorage_Excel_AzureSqlTable_NA.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobStorage_Json_AzureSqlTable_NA.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobStorage_Parquet_AzureBlobFS_DelimitedText.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobStorage_Parquet_AzureBlobStorage_DelimitedText.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobStorage_Parquet_AzureSqlTable_NA.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureSqlTable_NA_AzureBlobFS_Parquet.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureSqlTable_NA_AzureBlobStorage_Parquet.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_SqlServerTable_NA_AzureBlobFS_Parquet.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_SqlServerTable_NA_AzureBlobStorage_Parquet.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL0_AzureSqlTable_NA_AzureBlobFS_Parquet_Full_Load.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL0_AzureSqlTable_NA_AzureBlobFS_Parquet_Watermark.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL0_AzureSqlTable_NA_AzureBlobStorage_Parquet_Full_Load.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL0_AzureSqlTable_NA_AzureBlobStorage_Parquet_Watermark.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL0_AzureSqlTable_NA_Create_Table.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL0_AzureSqlTable_NA_Post_Copy.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL0_SqlServerTable_NA_AzureBlobFS_Parquet_Full_Load.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL0_SqlServerTable_NA_AzureBlobFS_Parquet_Watermark.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL0_SqlServerTable_NA_AzureBlobStorage_Parquet_Full_Load.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL0_SqlServerTable_NA_AzureBlobStorage_Parquet_Watermark.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL1_AzureSqlTable_NA_AzureBlobFS_Parquet_Full_Load_Chunk.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL1_AzureSqlTable_NA_AzureBlobFS_Parquet_Watermark_Chunk.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL1_AzureSqlTable_NA_AzureBlobStorage_Parquet_Full_Load_Chunk.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL1_AzureSqlTable_NA_AzureBlobStorage_Parquet_Watermark_Chunk.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL1_SqlServerTable_NA_AzureBlobFS_Parquet_Full_Load_Chunk.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL1_SqlServerTable_NA_AzureBlobFS_Parquet_Watermark_Chunk.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL1_SqlServerTable_NA_AzureBlobStorage_Parquet_Full_Load_Chunk.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL1_SqlServerTable_NA_AzureBlobStorage_Parquet_Watermark_Chunk.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Binary_AzureBlobFS_Binary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Binary_AzureBlobFS_Binary_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Binary_AzureBlobStorage_Binary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Binary_AzureBlobStorage_Binary_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_DelimitedText_AzureBlobFS_Parquet.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_DelimitedText_AzureBlobFS_Parquet_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_DelimitedText_AzureBlobStorage_Parquet.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_DelimitedText_AzureBlobStorage_Parquet_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_DelimitedText_AzureSqlTable_NA.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_DelimitedText_AzureSqlTable_NA_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Excel_AzureBlobFS_DelimitedText.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Excel_AzureBlobFS_DelimitedText_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Excel_AzureBlobFS_Parquet.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Excel_AzureBlobFS_Parquet_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Excel_AzureBlobStorage_DelimitedText.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Excel_AzureBlobStorage_DelimitedText_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Excel_AzureBlobStorage_Parquet.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Excel_AzureBlobStorage_Parquet_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Excel_AzureSqlTable_NA.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Excel_AzureSqlTable_NA_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Json_AzureSqlTable_NA.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Json_AzureSqlTable_NA_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Parquet_AzureBlobFS_DelimitedText.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Parquet_AzureBlobFS_DelimitedText_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Parquet_AzureBlobStorage_DelimitedText.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Parquet_AzureBlobStorage_DelimitedText_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Parquet_AzureSqlTable_NA.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Parquet_AzureSqlTable_NA_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Binary_AzureBlobFS_Binary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Binary_AzureBlobFS_Binary_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Binary_AzureBlobStorage_Binary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Binary_AzureBlobStorage_Binary_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_DelimitedText_AzureBlobFS_Parquet.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_DelimitedText_AzureBlobFS_Parquet_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_DelimitedText_AzureBlobStorage_Parquet.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_DelimitedText_AzureBlobStorage_Parquet_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_DelimitedText_AzureSqlTable_NA.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_DelimitedText_AzureSqlTable_NA_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Excel_AzureBlobFS_DelimitedText.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Excel_AzureBlobFS_DelimitedText_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Excel_AzureBlobFS_Parquet.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Excel_AzureBlobFS_Parquet_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Excel_AzureBlobStorage_DelimitedText.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Excel_AzureBlobStorage_DelimitedText_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Excel_AzureBlobStorage_Parquet.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Excel_AzureBlobStorage_Parquet_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Excel_AzureSqlTable_NA.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Excel_AzureSqlTable_NA_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Json_AzureSqlTable_NA.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Json_AzureSqlTable_NA_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Parquet_AzureBlobFS_DelimitedText.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Parquet_AzureBlobFS_DelimitedText_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Parquet_AzureBlobStorage_DelimitedText.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Parquet_AzureBlobStorage_DelimitedText_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Parquet_AzureSqlTable_NA.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Parquet_AzureSqlTable_NA_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureSqlTable_NA_AzureBlobFS_Parquet.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureSqlTable_NA_AzureBlobFS_Parquet_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureSqlTable_NA_AzureBlobStorage_Parquet.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureSqlTable_NA_AzureBlobStorage_Parquet_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_SqlServerTable_NA_AzureBlobFS_Parquet.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_SqlServerTable_NA_AzureBlobFS_Parquet_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_SqlServerTable_NA_AzureBlobStorage_Parquet.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_SqlServerTable_NA_AzureBlobStorage_Parquet_Primary.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/main.tf create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/outputs.tf create mode 100644 solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/vars.tf create mode 100644 solution/DeploymentV2/terraform_layer2/modules/purview_ingestion_private_endpoints/arm/privatelinks.json create mode 100644 solution/DeploymentV2/terraform_layer2/modules/purview_ingestion_private_endpoints/main.tf create mode 100644 solution/DeploymentV2/terraform_layer2/modules/purview_ingestion_private_endpoints/outputs.tf create mode 100644 solution/DeploymentV2/terraform_layer2/modules/purview_ingestion_private_endpoints/vars.tf create mode 100644 solution/DeploymentV2/terraform_layer2/nsg_app_service.tf create mode 100644 solution/DeploymentV2/terraform_layer2/nsg_bastion.tf create mode 100644 solution/DeploymentV2/terraform_layer2/nsg_plink.tf create mode 100644 solution/DeploymentV2/terraform_layer2/nsg_vms.tf create mode 100644 solution/DeploymentV2/terraform_layer2/private_dns.tf create mode 100644 solution/DeploymentV2/terraform_layer2/purview.tf create mode 100644 solution/DeploymentV2/terraform_layer2/readme.md create mode 100644 solution/DeploymentV2/terraform_layer2/security.tf create mode 100644 solution/DeploymentV2/terraform_layer2/storage_adls.tf create mode 100644 solution/DeploymentV2/terraform_layer2/storage_blob.tf create mode 100644 solution/DeploymentV2/terraform_layer2/subnet.tf create mode 100644 solution/DeploymentV2/terraform_layer2/synapse.tf create mode 100644 solution/DeploymentV2/terraform_layer2/virtual_machines.tf create mode 100644 solution/DeploymentV2/terraform_layer2/vnet.tf create mode 100644 solution/DeploymentV2/terraform_layer3/.gitignore create mode 100644 solution/DeploymentV2/terraform_layer3/.terraform.lock.hcl create mode 100644 solution/DeploymentV2/terraform_layer3/function_app.tf create mode 100644 solution/DeploymentV2/terraform_layer3/key_vault.tf create mode 100644 solution/DeploymentV2/terraform_layer3/layer2.tf create mode 100644 solution/DeploymentV2/terraform_layer3/locals.tf create mode 100644 solution/DeploymentV2/terraform_layer3/main.tf create mode 100644 solution/DeploymentV2/terraform_layer3/outputs.tf create mode 100644 solution/DeploymentV2/terraform_layer3/readme.md create mode 100644 solution/DeploymentV2/terraform_layer3/vars.tf create mode 100644 solution/DeploymentV2/terraform_layer3/vars/admz/terragrunt.hcl create mode 100644 solution/DeploymentV2/terraform_layer3/vars/local/terragrunt.hcl create mode 100644 solution/DeploymentV2/terraform_layer3/vars/production/terragrunt.hcl create mode 100644 solution/DeploymentV2/terraform_layer3/vars/staging/terragrunt.hcl diff --git a/solution/DeploymentV2/Deploy.ps1 b/solution/DeploymentV2/Deploy.ps1 index 49ae83ce..bf982ebc 100644 --- a/solution/DeploymentV2/Deploy.ps1 +++ b/solution/DeploymentV2/Deploy.ps1 @@ -77,7 +77,7 @@ $AddSpecificUserAsWebAppAdmin = $env:AdsGf_AddSpecificUserAsWebAppAdmin #------------------------------------------------------------------------------------------------------------ # Main Terraform #------------------------------------------------------------------------------------------------------------ -Invoke-Expression ./Deploy_1_Infra0.ps1 +#Invoke-Expression ./Deploy_1_Infra0.ps1 #------------------------------------------------------------------------------------------------------------ diff --git a/solution/DeploymentV2/terraform/layer1.tf b/solution/DeploymentV2/terraform/layer1.tf new file mode 100644 index 00000000..aa9dd0c6 --- /dev/null +++ b/solution/DeploymentV2/terraform/layer1.tf @@ -0,0 +1,13 @@ +# Generated by Terragrunt. Sig: nIlQXj57tbuaRZEa + data "terraform_remote_state" "layer1" { + # The settings here should match the "backend" settings in the + # configuration that manages the network resources. + backend = "azurerm" + + config = { + container_name = "tstate" + key = "terraform_layer1.tfstate" + resource_group_name = "gft2" + storage_account_name = "gft2state" + } + } diff --git a/solution/DeploymentV2/terraform/main.tf b/solution/DeploymentV2/terraform/main.tf index d8219722..3dd48614 100644 --- a/solution/DeploymentV2/terraform/main.tf +++ b/solution/DeploymentV2/terraform/main.tf @@ -36,6 +36,7 @@ data "azurerm_client_config" "current" { module "naming" { source = "Azure/naming/azurerm" version = "0.1.1" + unique-seed = data.terraform_remote_state.layer1.outputs.naming_unique_seed prefix = [ var.prefix, var.environment_tag diff --git a/solution/DeploymentV2/terraform/outputs.tf b/solution/DeploymentV2/terraform/outputs.tf index 5d189df6..66cc64fc 100644 --- a/solution/DeploymentV2/terraform/outputs.tf +++ b/solution/DeploymentV2/terraform/outputs.tf @@ -238,3 +238,11 @@ output "sif_database_name" { value = var.sif_database_name } +/*Variables for Naming Module*/ +output "naming_unique_seed" { + value = data.terraform_remote_state.layer1.outputs.naming_unique_seed +} + +output "naming_unique_suffix" { + value = data.terraform_remote_state.layer1.outputs.naming_unique_suffix +} \ No newline at end of file diff --git a/solution/DeploymentV2/terraform/vars/staging/terragrunt.hcl b/solution/DeploymentV2/terraform/vars/staging/terragrunt.hcl index 448ac76e..44d68995 100644 --- a/solution/DeploymentV2/terraform/vars/staging/terragrunt.hcl +++ b/solution/DeploymentV2/terraform/vars/staging/terragrunt.hcl @@ -1,3 +1,22 @@ +generate "layer1.tf" { + path = "layer1.tf" + if_exists = "overwrite_terragrunt" + contents = < ir + if(var.deploy_data_factory == true) && ((ir.is_azure == true || var.is_onprem_datafactory_ir_registered == true)) + } + source = "./modules/data_factory_datasets" + resource_group_name = var.resource_group_name + data_factory_name = local.data_factory_name + is_azure = each.value.is_azure + integration_runtime_name = each.value.name + integration_runtime_short_name = each.value.short_name + azure_sql_linkedservice_name = "${local.linkedservice_generic_azuresql_prefix}${each.value.short_name}" + azure_synapse_linkedservice_name = "${local.linkedservice_generic_synapse_prefix}${each.value.short_name}" + data_lake_linkedservice_name = "${local.linkedservice_generic_adls_prefix}${each.value.short_name}" + blob_linkedservice_name = "${local.linkedservice_generic_blob_prefix}${each.value.short_name}" + mssql_linkedservice_name = "${local.linkedservice_generic_mssql_prefix}${each.value.short_name}" + mssql_sqlauth_linkedservice_name = "${local.linkedservice_generic_mssql_prefix}sqlauth_${each.value.short_name}" + fileserver_linkedservice_name = "${local.linkedservice_generic_file_prefix}${each.value.short_name}" + rest_anonymous_linkedservice_name = "${local.linkedservice_generic_rest_prefix}Anonymous_${each.value.short_name}" + rest_basic_linkedservice_name = "${local.linkedservice_generic_rest_prefix}Basic_${each.value.short_name}" + rest_serviceprincipal_linkedservice_name = "${local.linkedservice_generic_rest_prefix}ServicePrincipal_${each.value.short_name}" + rest_oauth2_linkedservice_name = "${local.linkedservice_generic_rest_prefix}OAuth2_${each.value.short_name}" + oracledb_linkedservice_name = "${local.linkedservice_generic_oracledb_prefix}${each.value.short_name}" + + + name_suffix = random_id.rg_deployment_unique.id + depends_on = [ + azurerm_data_factory_linked_custom_service.generic_kv, + azurerm_data_factory_linked_custom_service.data_lake, + azurerm_data_factory_linked_custom_service.blob, + azurerm_data_factory_linked_custom_service.mssqldatabase, + azurerm_data_factory_linked_custom_service.database, + azurerm_data_factory_linked_custom_service.file, + azurerm_data_factory_linked_custom_service.mssqldatabase_sqlauth, + azurerm_data_factory_linked_custom_service.rest_anonymous, + azurerm_data_factory_linked_custom_service.rest_basic, + azurerm_data_factory_linked_custom_service.rest_serviceprincipal, + azurerm_data_factory_linked_custom_service.rest_oauth2, + azurerm_data_factory_linked_custom_service.oracledb + + ] +} + diff --git a/solution/DeploymentV2/terraform_layer2/data_factory_integration_runtimes.tf b/solution/DeploymentV2/terraform_layer2/data_factory_integration_runtimes.tf new file mode 100644 index 00000000..e2ccec26 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/data_factory_integration_runtimes.tf @@ -0,0 +1,31 @@ +# Azure Integration runtime +resource "azurerm_data_factory_integration_runtime_azure" "azure_ir" { + for_each = { + for ir in local.integration_runtimes : + ir.short_name => ir + if ir.is_azure && var.deploy_data_factory == true + } + name = each.value.name + data_factory_id = azurerm_data_factory.data_factory[0].id + location = var.resource_location + #resource_group_name = var.resource_group_name + time_to_live_min = 10 + virtual_network_enabled = var.is_vnet_isolated && each.value.is_managed_vnet + depends_on = [ + azurerm_data_factory.data_factory + ] +} + +resource "azurerm_data_factory_integration_runtime_self_hosted" "self_hosted_ir" { + for_each = { + for ir in local.integration_runtimes : + ir.short_name => ir + if ir.is_azure == false && var.deploy_data_factory == true + } + name = each.value.name + data_factory_id = azurerm_data_factory.data_factory[0].id + #resource_group_name = var.resource_group_name + depends_on = [ + azurerm_data_factory.data_factory + ] +} \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer2/data_factory_linked_services.tf b/solution/DeploymentV2/terraform_layer2/data_factory_linked_services.tf new file mode 100644 index 00000000..106313a7 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/data_factory_linked_services.tf @@ -0,0 +1,567 @@ +locals { + linkedservice_azure_function_name = "SLS_AzureFunctionApp" + linkedservice_keyvault_name = "SLS_AzureKeyVault" + linkedservice_generic_kv_prefix = "GLS_AzureKeyVault_" + linkedservice_generic_adls_prefix = "GLS_AzureBlobFS_" + linkedservice_generic_blob_prefix = "GLS_AzureBlobStorage_" + linkedservice_generic_azuresql_prefix = "GLS_AzureSqlDatabase_" + linkedservice_generic_synapse_prefix = "GLS_AzureSqlDW_" + linkedservice_generic_mssql_prefix = "GLS_SqlServerDatabase_" + linkedservice_generic_file_prefix = "GLS_FileServer_" + linkedservice_generic_rest_prefix = "GLS_RestService_Auth" + linkedservice_generic_oracledb_prefix = "GLS_OracleDatabase_SN_" + +} + +#Azure KeyVault - Non Generic +resource "azurerm_data_factory_linked_service_key_vault" "key_vault_default" { + count = var.deploy_data_factory ? 1 : 0 + name = local.linkedservice_keyvault_name + description = "Default Key Vault (Non-Dynamic)" + #resource_group_name = var.resource_group_name + data_factory_id = azurerm_data_factory.data_factory[0].id + key_vault_id = azurerm_key_vault.app_vault.id +} + +#Azure Function - Non Generic +resource "azurerm_data_factory_linked_service_azure_function" "function_app" { + count = var.deploy_data_factory && var.deploy_function_app ? 1 : 0 + name = local.linkedservice_azure_function_name + #resource_group_name = var.resource_group_name + data_factory_id = azurerm_data_factory.data_factory[0].id + url = local.functionapp_url + key_vault_key { + linked_service_name = azurerm_data_factory_linked_service_key_vault.key_vault_default[0].name + secret_name = "AdsGfCoreFunctionAppKey" + } +} + + +#------------------------------------------------------------------------------------------------------ +# Generic Linked Services (1 per Integration Runtime) +#------------------------------------------------------------------------------------------------------ +resource "azurerm_data_factory_linked_custom_service" "generic_kv" { + for_each = { + for ir in local.integration_runtimes : + ir.short_name => ir + if(var.deploy_data_factory == true) && ((ir.is_azure == true) || (ir.is_azure == false && var.is_onprem_datafactory_ir_registered == true)) + } + name = "${local.linkedservice_generic_kv_prefix}${each.value.short_name}" + data_factory_id = azurerm_data_factory.data_factory[0].id + type = "AzureKeyVault" + description = "Generic Key Vault" + type_properties_json = < ir + if(var.deploy_data_factory == true) && ((ir.is_azure == true) || (ir.is_azure == false && var.is_onprem_datafactory_ir_registered == true)) + } + name = "${local.linkedservice_generic_adls_prefix}${each.value.short_name}" + data_factory_id = azurerm_data_factory.data_factory[0].id + type = "AzureBlobFS" + description = "Generic Data Lake" + type_properties_json = < ir + if(var.deploy_data_factory == true) && ((ir.is_azure == true) || (ir.is_azure == false && var.is_onprem_datafactory_ir_registered == true)) + } + name = "${local.linkedservice_generic_blob_prefix}${each.value.short_name}" + data_factory_id = azurerm_data_factory.data_factory[0].id + type = "AzureBlobStorage" + description = "Generic Blob Storage" + type_properties_json = < ir + if(var.deploy_data_factory == true) && ((ir.is_azure == true) || (ir.is_azure == false && var.is_onprem_datafactory_ir_registered == true)) + } + name = "${local.linkedservice_generic_azuresql_prefix}${each.value.short_name}" + description = "Generic Azure SQL Server" + type = "AzureSqlDatabase" + data_factory_id = azurerm_data_factory.data_factory[0].id + integration_runtime { + name = each.value.name + } + type_properties_json = < ir + if(var.deploy_data_factory == true) && ((ir.is_azure == true) || (ir.is_azure == false && var.is_onprem_datafactory_ir_registered == true)) + } + name = "${local.linkedservice_generic_mssql_prefix}${each.value.short_name}" + data_factory_id = azurerm_data_factory.data_factory[0].id + type = "SqlServer" + description = "Generic SqlServer" + integration_runtime { + name = each.value.name + } + type_properties_json = < ir + if(var.deploy_data_factory == true) && ((ir.is_azure == true) || (ir.is_azure == false && var.is_onprem_datafactory_ir_registered == true)) + } + name = "${local.linkedservice_generic_mssql_prefix}sqlauth_${each.value.short_name}" + data_factory_id = azurerm_data_factory.data_factory[0].id + type = "SqlServer" + description = "Generic SqlServer" + integration_runtime { + name = each.value.name + } + type_properties_json = < ir + if(var.deploy_data_factory == true) && ((ir.is_azure == true) || (ir.is_azure == false && var.is_onprem_datafactory_ir_registered == true)) + } + name = "${local.linkedservice_generic_file_prefix}${each.value.short_name}" + data_factory_id = azurerm_data_factory.data_factory[0].id + type = "FileServer" + description = "Generic File Server" + integration_runtime { + name = each.value.name + } + type_properties_json = < ir + if(var.deploy_data_factory == true) && ((ir.is_azure == true) || (ir.is_azure == false && var.is_onprem_datafactory_ir_registered == true)) + } + name = "${local.linkedservice_generic_synapse_prefix}${each.value.short_name}" + data_factory_id = azurerm_data_factory.data_factory[0].id + type = "AzureSqlDW" + description = "Generic Azure Synapse Connection" + integration_runtime { + name = each.value.name + } + type_properties_json = < ir + if(var.deploy_data_factory == true) && ((ir.is_azure == true) || (ir.is_azure == false && var.is_onprem_datafactory_ir_registered == true)) + } + name = "${local.linkedservice_generic_rest_prefix}Anonymous_${each.value.short_name}" + data_factory_id = azurerm_data_factory.data_factory[0].id + type = "RestService" + description = "Generic Anonymous Rest Connection" + integration_runtime { + name = each.value.name + } + type_properties_json = < ir + if(var.deploy_data_factory == true) && ((ir.is_azure == true) || (ir.is_azure == false && var.is_onprem_datafactory_ir_registered == true)) + } + name = "${local.linkedservice_generic_rest_prefix}Basic_${each.value.short_name}" + data_factory_id = azurerm_data_factory.data_factory[0].id + type = "RestService" + description = "Generic Basic Rest Connection" + integration_runtime { + name = each.value.name + } + type_properties_json = < ir + if(var.deploy_data_factory == true) && ((ir.is_azure == true) || (ir.is_azure == false && var.is_onprem_datafactory_ir_registered == true)) + } + name = "${local.linkedservice_generic_rest_prefix}ServicePrincipal_${each.value.short_name}" + data_factory_id = azurerm_data_factory.data_factory[0].id + type = "RestService" + description = "Generic Service Principal Rest Connection" + integration_runtime { + name = each.value.name + } + type_properties_json = < ir + if(var.deploy_data_factory == true) && ((ir.is_azure == true) || (ir.is_azure == false && var.is_onprem_datafactory_ir_registered == true)) + } + name = "${local.linkedservice_generic_rest_prefix}OAuth2_${each.value.short_name}" + data_factory_id = azurerm_data_factory.data_factory[0].id + type = "RestService" + description = "Generic OAuth2 Rest Connection" + integration_runtime { + name = each.value.name + } + type_properties_json = < ir + if(var.deploy_data_factory == true) && ((ir.is_azure == true) || (ir.is_azure == false && var.is_onprem_datafactory_ir_registered == true)) + } + name = "${local.linkedservice_generic_oracledb_prefix}${each.value.short_name}" + data_factory_id = azurerm_data_factory.data_factory[0].id + type = "Oracle" + description = "Generic Service Principal Oracle DB Connection using Service Name" + integration_runtime { + name = each.value.name + } + type_properties_json = < ir +# if ir.is_azure == true +# } +# resource_group_name = var.resource_group_name +# data_factory_name = local.data_factory_name +# shared_keyvault_uri = "https://${local.key_vault_name}.vault.azure.net/" +# integration_runtime_name = each.value.name +# integration_runtime_short_name = each.value.short_name +# name_suffix = random_id.rg_deployment_unique.id +# depends_on = [ +# module.data_factory_datasets, +# module.data_factory_pipelines_common +# ] +# } + +# module "data_factory_pipelines_selfhosted" { +# source = "./modules/data_factory_pipelines_selfhosted" +# for_each = { +# for ir in local.integration_runtimes : +# ir.short_name => ir +# if(ir.is_azure == false && var.is_onprem_datafactory_ir_registered == true) +# } +# resource_group_name = var.resource_group_name +# data_factory_name = local.data_factory_name +# shared_keyvault_uri = "https://${local.key_vault_name}.vault.azure.net/" +# integration_runtime_name = each.value.name +# integration_runtime_short_name = each.value.short_name +# name_suffix = random_id.rg_deployment_unique.id +# depends_on = [ +# module.data_factory_datasets, +# module.data_factory_pipelines_common +# ] +# } \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer2/database.tf b/solution/DeploymentV2/terraform_layer2/database.tf new file mode 100644 index 00000000..62d9e460 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/database.tf @@ -0,0 +1,111 @@ + + +resource "random_password" "database" { + length = 32 + min_numeric = 1 + min_upper = 1 + min_lower = 1 + min_special = 1 + special = true + lower = true + number = true + upper = true +} + +# Database Server +resource "azurerm_mssql_server" "sqlserver" { + count = var.deploy_sql_server ? 1 : 0 + name = local.sql_server_name + resource_group_name = var.resource_group_name + location = var.resource_location + version = "12.0" + administrator_login = var.sql_admin_username + administrator_login_password = random_password.database.result + public_network_access_enabled = var.is_vnet_isolated == false || var.delay_private_access + minimum_tls_version = "1.2" + + azuread_administrator { + login_username = "sqladmin" + object_id = data.azurerm_client_config.current.object_id + } + identity { + type = "SystemAssigned" + } + tags = local.tags + lifecycle { + ignore_changes = [ + tags + ] + } + +} + +resource "azurerm_mssql_database" "web_db" { + count = var.deploy_sql_server && var.deploy_metadata_database ? 1 : 0 + name = local.metadata_database_name + server_id = azurerm_mssql_server.sqlserver[0].id + sku_name = "S0" + tags = local.tags + lifecycle { + ignore_changes = [ + tags + ] + } +} +resource "azurerm_mssql_database" "sample_db" { + count = var.deploy_sql_server ? 1 : 0 + name = local.sample_database_name + server_id = azurerm_mssql_server.sqlserver[0].id + sku_name = "S0" + sample_name = "AdventureWorksLT" + tags = local.tags + lifecycle { + ignore_changes = [ + tags + ] + } +} + +resource "azurerm_mssql_database" "staging_db" { + count = var.deploy_sql_server ? 1 : 0 + name = local.staging_database_name + server_id = azurerm_mssql_server.sqlserver[0].id + sku_name = "S0" + tags = local.tags + lifecycle { + ignore_changes = [ + tags + ] + } +} + +resource "azurerm_private_endpoint" "db_private_endpoint_with_dns" { + count = var.is_vnet_isolated ? 1 : 0 + name = "${local.sql_server_name}-plink" + location = var.resource_location + resource_group_name = var.resource_group_name + subnet_id = local.plink_subnet_id + + private_service_connection { + name = "${local.sql_server_name}-plink-conn" + private_connection_resource_id = azurerm_mssql_server.sqlserver[0].id + is_manual_connection = false + subresource_names = ["sqlServer"] + } + + private_dns_zone_group { + name = "privatednszonegroup" + private_dns_zone_ids = [local.private_dns_zone_db_id] + } + + depends_on = [ + azurerm_mssql_server.sqlserver[0] + ] + + tags = local.tags + lifecycle { + ignore_changes = [ + tags + ] + } +} diff --git a/solution/DeploymentV2/terraform_layer2/function_app.tf b/solution/DeploymentV2/terraform_layer2/function_app.tf new file mode 100644 index 00000000..14c68f9e --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/function_app.tf @@ -0,0 +1,127 @@ +resource "azurerm_function_app" "function_app" { + name = local.functionapp_name + count = var.deploy_function_app && var.deploy_app_service_plan ? 1 : 0 + location = var.resource_location + resource_group_name = var.resource_group_name + app_service_plan_id = azurerm_app_service_plan.app_service_plan[0].id + storage_account_name = azurerm_storage_account.storage_acccount_security_logs.name + storage_account_access_key = azurerm_storage_account.storage_acccount_security_logs.primary_access_key + version = "~4" + + https_only = true + + site_config { + always_on = true + dotnet_framework_version = "v6.0" + ftps_state = "Disabled" + vnet_route_all_enabled = var.is_vnet_isolated + dynamic "ip_restriction" { + for_each = var.is_vnet_isolated ? [1] : [] + content { + priority = 100 + name = "Allow Private Link Subnet" + action = "Allow" + virtual_network_subnet_id = local.plink_subnet_id + } + } + dynamic "ip_restriction" { + for_each = var.is_vnet_isolated ? [1] : [] + content { + priority = 110 + name = "Allow App Service Subnet" + action = "Allow" + virtual_network_subnet_id = local.app_service_subnet_id + } + } + dynamic "ip_restriction" { + for_each = var.is_vnet_isolated ? [1] : [] + content { + priority = 120 + name = "Allow Azure Service Tag" + action = "Allow" + service_tag = "AzureCloud" + } + } + dynamic "ip_restriction" { + for_each = var.is_vnet_isolated ? [1] : [] + content { + priority = 130 + name = "Allow Data Factory Service Tag" + action = "Allow" + service_tag = "DataFactory" + } + } + } + + app_settings = { + + WEBSITE_RUN_FROM_PACKAGE = 0 + + FUNCTIONS_WORKER_RUNTIME = "dotnet" + FUNCTIONS_EXTENSION_VERSION = "~4" + AzureWebJobsStorage = azurerm_storage_account.storage_acccount_security_logs.primary_connection_string + APPINSIGHTS_INSTRUMENTATIONKEY = azurerm_application_insights.app_insights[0].instrumentation_key + ApplicationOptions__UseMSI = true + ApplicationOptions__ServiceConnections__AdsGoFastTaskMetaDataDatabaseServer = var.deploy_metadata_database ? "${azurerm_mssql_server.sqlserver[0].name}.database.windows.net" : null + ApplicationOptions__ServiceConnections__AdsGoFastTaskMetaDataDatabaseName = var.deploy_metadata_database ? azurerm_mssql_database.web_db[0].name : null + ApplicationOptions__ServiceConnections__CoreFunctionsURL = local.functionapp_url + ApplicationOptions__ServiceConnections__AppInsightsWorkspaceId = azurerm_application_insights.app_insights[0].app_id + + AzureAdAzureServicesViaAppReg__Domain = var.domain + AzureAdAzureServicesViaAppReg__TenantId = var.tenant_id + AzureAdAzureServicesViaAppReg__Audience = "api://${local.functionapp_name}" + AzureAdAzureServicesViaAppReg__ClientSecret = "@Microsoft.KeyVault(VaultName=${azurerm_key_vault.app_vault.name};SecretName=AzureFunctionClientSecret)" + AzureAdAzureServicesViaAppReg__ClientId = data.terraform_remote_state.layer1.outputs.aad_funcreg_id + + #Setting to null as we are using MSI + AzureAdAzureServicesDirect__ClientId = null + AzureAdAzureServicesDirect__ClientId = null + } + identity { + type = "SystemAssigned" + } + tags = local.tags + lifecycle { + ignore_changes = [ + tags, + app_settings["WEBSITE_RUN_FROM_PACKAGE"], + app_settings["SCM_DO_BUILD_DURING_DEPLOYMENT"] + ] + } + depends_on = [ + azurerm_private_endpoint.storage_private_endpoint_with_dns + ] +} + +resource "azurerm_app_service_virtual_network_swift_connection" "vnet_integration_func" { + count = var.is_vnet_isolated && var.deploy_function_app ? 1 : 0 + app_service_id = azurerm_function_app.function_app[0].id + subnet_id = local.app_service_subnet_id +} + +# Diagnostic logs-------------------------------------------------------------------------- +resource "azurerm_monitor_diagnostic_setting" "function_diagnostic_logs" { + count = var.deploy_function_app ? 1 : 0 + name = "diagnosticlogs" + # ignore_changes is here given the bug https://github.com/terraform-providers/terraform-provider-azurerm/issues/10388 + lifecycle { + ignore_changes = [log, metric] + } + target_resource_id = azurerm_function_app.function_app[0].id + log_analytics_workspace_id = local.log_analytics_resource_id + + log { + category = "FunctionAppLogs" + enabled = true + retention_policy { + days = 0 + enabled = true + } + } + metric { + category = "AllMetrics" + enabled = false + } +} + + diff --git a/solution/DeploymentV2/terraform_layer2/key_vault.tf b/solution/DeploymentV2/terraform_layer2/key_vault.tf new file mode 100644 index 00000000..5a07b548 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/key_vault.tf @@ -0,0 +1,281 @@ +resource "azurerm_key_vault" "app_vault" { + name = local.key_vault_name + location = var.resource_location + resource_group_name = var.resource_group_name + enabled_for_disk_encryption = true + soft_delete_retention_days = 7 + purge_protection_enabled = false + tenant_id = var.tenant_id + sku_name = "standard" + + network_acls { + default_action = "Deny" + bypass = "AzureServices" + ip_rules = [var.ip_address] // This is required to allow us to set the secret values + } + + tags = local.tags + lifecycle { + ignore_changes = [ + tags + ] + } +} + +// Grant secret and key access to the current app to store the secret values -------------------------- +// Allows the deployment service principal to compare / check state later +resource "azurerm_key_vault_access_policy" "user_access" { + count = (var.cicd_sp_id == data.azurerm_client_config.current.object_id ? 0 : 1) + key_vault_id = azurerm_key_vault.app_vault.id + tenant_id = data.azurerm_client_config.current.tenant_id + object_id = data.azurerm_client_config.current.object_id + + key_permissions = [ + "Delete", "List", "Get", "Create", "Update", "Purge" + ] + + secret_permissions = [ + "Delete", "List", "Get", "Set", "Purge" + ] + depends_on = [ + azurerm_key_vault.app_vault, + ] +} + +resource "azurerm_key_vault_access_policy" "cicd_access" { + count = (var.cicd_sp_id == "" ? 0 : 1) + key_vault_id = azurerm_key_vault.app_vault.id + tenant_id = data.azurerm_client_config.current.tenant_id + object_id = (var.cicd_sp_id == data.azurerm_client_config.current.object_id ? var.cicd_sp_id : data.azurerm_client_config.current.object_id) + + key_permissions = [ + "Delete", "List", "Get", "Create", "Update", "Purge" + ] + + secret_permissions = [ + "Delete", "List", "Get", "Set", "Purge" + ] + depends_on = [ + azurerm_key_vault.app_vault, + ] +} + +resource "azurerm_key_vault_access_policy" "cicd_access_layers1and3" { + count = (var.deployment_principal_layers1and3 == "" ? 0 : 1) + key_vault_id = azurerm_key_vault.app_vault.id + tenant_id = data.azurerm_client_config.current.tenant_id + object_id = var.deployment_principal_layers1and3 + + key_permissions = [ + "Delete", "List", "Get", "Create", "Update", "Purge" + ] + + secret_permissions = [ + "Delete", "List", "Get", "Set", "Purge" + ] + depends_on = [ + azurerm_key_vault.app_vault, + ] +} + +resource "time_sleep" "cicd_access" { + depends_on = [azurerm_key_vault_access_policy.cicd_access, azurerm_key_vault_access_policy.user_access] + create_duration = "10s" +} + +// Allows the data factory to retrieve the azure function host key +resource "azurerm_key_vault_access_policy" "adf_access" { + count = var.deploy_data_factory ? 1 : 0 + key_vault_id = azurerm_key_vault.app_vault.id + tenant_id = var.tenant_id + object_id = azurerm_data_factory.data_factory[0].identity[0].principal_id + + key_permissions = [ + "Get", "List" + ] + + secret_permissions = [ + "List", "Get" + ] + depends_on = [ + azurerm_key_vault.app_vault, + ] +} + +// Allows purview to retrieve the IR service principal password +resource "azurerm_key_vault_access_policy" "purview_access" { + count = var.deploy_purview ? 1 : 0 + key_vault_id = azurerm_key_vault.app_vault.id + tenant_id = var.tenant_id + object_id = azurerm_purview_account.purview[0].identity[0].principal_id + + key_permissions = [ + "Get", "List" + ] + + secret_permissions = [ + "List", "Get" + ] + depends_on = [ + azurerm_key_vault.app_vault, + ] +} + +// Allows the Azure function to retrieve the Function App - AAD App Reg - Client Secret +resource "azurerm_key_vault_access_policy" "function_app" { + count = var.deploy_function_app ? 1 : 0 + key_vault_id = azurerm_key_vault.app_vault.id + tenant_id = var.tenant_id + object_id = azurerm_function_app.function_app[0].identity[0].principal_id + + key_permissions = [ + "Get", "List" + ] + + secret_permissions = [ + "List", "Get" + ] + depends_on = [ + azurerm_key_vault.app_vault, + ] +} + +// Allows the synapse workspace to retrieve the azure function host key +resource "azurerm_key_vault_access_policy" "synapse_access" { + count = var.deploy_synapse ? 1 : 0 + key_vault_id = azurerm_key_vault.app_vault.id + tenant_id = var.tenant_id + object_id = azurerm_synapse_workspace.synapse[0].identity[0].principal_id + + key_permissions = [ + "Get", "List" + ] + + secret_permissions = [ + "List", "Get" + ] + depends_on = [ + azurerm_key_vault.app_vault, + ] +} + + +// private endpoints -------------------------- +resource "azurerm_private_endpoint" "app_vault_private_endpoint_with_dns" { + count = var.is_vnet_isolated ? 1 : 0 + name = "${local.key_vault_name}-plink" + location = var.resource_location + resource_group_name = var.resource_group_name + subnet_id = local.plink_subnet_id + + private_service_connection { + name = "${local.key_vault_name}-plink-conn" + private_connection_resource_id = azurerm_key_vault.app_vault.id + is_manual_connection = false + subresource_names = ["vault"] + } + + private_dns_zone_group { + name = "privatednszonegroup" + private_dns_zone_ids = [local.private_dns_zone_kv_id] + } + + depends_on = [ + azurerm_key_vault.app_vault + ] + + tags = local.tags + lifecycle { + ignore_changes = [ + tags + ] + } +} + +// Diagnostic logs-------------------------------------------------------------------------- +resource "azurerm_monitor_diagnostic_setting" "app_vault_diagnostic_logs" { + name = "diagnosticlogs" + + target_resource_id = azurerm_key_vault.app_vault.id + log_analytics_workspace_id = local.log_analytics_resource_id + # ignore_changes is here given the bug https://github.com/terraform-providers/terraform-provider-azurerm/issues/10388 + lifecycle { + ignore_changes = [log, metric] + } + log { + category = "AuditEvent" + enabled = true + retention_policy { + days = 0 + enabled = true + } + } + log { + category = "AzurePolicyEvaluationDetails" + enabled = true + retention_policy { + days = 0 + enabled = true + } + } + metric { + category = "AllMetrics" + enabled = true + retention_policy { + days = 0 + enabled = true + } + } +} + + + +// Actual secrets ---------------------------------------------------------------------- +data "azurerm_function_app_host_keys" "function_app_host_key" { + count = var.deploy_function_app ? 1 : 0 + name = azurerm_function_app.function_app[0].name + resource_group_name = var.resource_group_name + depends_on = [ + time_sleep.cicd_access, + azurerm_app_service_virtual_network_swift_connection.vnet_integration_func + ] +} + + +resource "azurerm_key_vault_secret" "function_app_key" { + count = var.deploy_function_app ? 1 : 0 + name = "AdsGfCoreFunctionAppKey" + value = data.azurerm_function_app_host_keys.function_app_host_key[0].default_function_key + key_vault_id = azurerm_key_vault.app_vault.id + depends_on = [ + time_sleep.cicd_access, + azurerm_app_service_virtual_network_swift_connection.vnet_integration_func + ] +} + +resource "azurerm_key_vault_secret" "purview_ir_sp_password" { + count = var.deploy_purview && var.is_vnet_isolated ? 1 : 0 + name = "AzurePurviewIr" + value = azuread_application_password.purview_ir[0].value + key_vault_id = azurerm_key_vault.app_vault.id + depends_on = [ + time_sleep.cicd_access, + ] +} + +resource "azurerm_key_vault_secret" "selfhostedsql_password" { + count = var.deploy_selfhostedsql ? 1 : 0 + name = "selfhostedsqlpw" + value = random_password.selfhostedsql[0].result + key_vault_id = azurerm_key_vault.app_vault.id + depends_on = [ + time_sleep.cicd_access, + ] + lifecycle { + ignore_changes = [ + value + ] + } +} + + diff --git a/solution/DeploymentV2/terraform_layer2/layer1.tf b/solution/DeploymentV2/terraform_layer2/layer1.tf index d315121b..aa9dd0c6 100644 --- a/solution/DeploymentV2/terraform_layer2/layer1.tf +++ b/solution/DeploymentV2/terraform_layer2/layer1.tf @@ -1,12 +1,13 @@ -data "terraform_remote_state" "layer1" { - # The settings here should match the "backend" settings in the - # configuration that manages the network resources. - backend = "azurerm" - - config = { - container_name = "tstate" - key = "terraform_layer1.tfstate" - resource_group_name = "gft2" - storage_account_name = "gft2state" - } -} \ No newline at end of file +# Generated by Terragrunt. Sig: nIlQXj57tbuaRZEa + data "terraform_remote_state" "layer1" { + # The settings here should match the "backend" settings in the + # configuration that manages the network resources. + backend = "azurerm" + + config = { + container_name = "tstate" + key = "terraform_layer1.tfstate" + resource_group_name = "gft2" + storage_account_name = "gft2state" + } + } diff --git a/solution/DeploymentV2/terraform_layer2/locals.tf b/solution/DeploymentV2/terraform_layer2/locals.tf index 2de29d5d..e79209e4 100644 --- a/solution/DeploymentV2/terraform_layer2/locals.tf +++ b/solution/DeploymentV2/terraform_layer2/locals.tf @@ -1,11 +1,50 @@ locals { + data_factory_name = (var.data_factory_name != "" ? var.data_factory_name : module.naming.data_factory.name_unique) + key_vault_name = (var.key_vault_name != "" ? var.key_vault_name : module.naming.key_vault.name_unique) + app_insights_name = (var.app_insights_name != "" ? var.app_insights_name : module.naming.application_insights.name_unique) + app_service_plan_name = (var.app_service_plan_name != "" ? var.app_service_plan_name : module.naming.app_service_plan.name_unique) + sql_server_name = (var.sql_server_name != "" ? var.sql_server_name : module.naming.sql_server.name_unique) webapp_name = (var.webapp_name != "" ? var.webapp_name : module.naming.app_service.name_unique) webapp_url = "https://${local.webapp_name}.azurewebsites.net" functionapp_name = (var.functionapp_name != "" ? var.functionapp_name : module.naming.function_app.name_unique) functionapp_url = "https://${local.functionapp_name}.azurewebsites.net" aad_webapp_name = (var.aad_webapp_name != "" ? var.aad_webapp_name : "ADS GoFast Web Portal (${var.environment_tag})") aad_functionapp_name = (var.aad_functionapp_name != "" ? var.aad_functionapp_name : "ADS GoFast Orchestration App (${var.environment_tag})") - + vnet_name = (var.vnet_name != "" ? var.vnet_name : module.naming.virtual_network.name) + plink_subnet_name = (var.plink_subnet_name != "" ? var.plink_subnet_name : "${module.naming.subnet.name}-plink") + app_service_subnet_name = (var.app_service_subnet_name != "" ? var.plink_subnet_name : "${module.naming.subnet.name}-appservice") + vm_subnet_name = (var.vm_subnet_name != "" ? var.vm_subnet_name : "${module.naming.subnet.name}-vm") + logs_storage_account_name = (var.logs_storage_account_name != "" ? var.logs_storage_account_name : "${module.naming.storage_account.name_unique}log") + app_service_nsg_name = (var.app_service_nsg_name != "" ? var.app_service_nsg_name : "${module.naming.network_security_group.name}-appservice") + plink_nsg_name = (var.plink_nsg_name != "" ? var.plink_nsg_name : "${module.naming.network_security_group.name_unique}-plink") + bastion_nsg_name = (var.bastion_nsg_name != "" ? var.bastion_nsg_name : "${module.naming.network_security_group.name_unique}-bastion") + vm_nsg_name = (var.vm_nsg_name != "" ? var.vm_nsg_name : "${module.naming.network_security_group.name_unique}-vm") + log_analytics_workspace_name = (var.log_analytics_workspace_name != "" ? var.log_analytics_workspace_name : module.naming.log_analytics_workspace.name_unique) + metadata_database_name = "MetadataDb" + sample_database_name = "Samples" + staging_database_name = "Staging" + adls_storage_account_name = (var.adls_storage_account_name != "" ? var.adls_storage_account_name : "${module.naming.data_lake_store.name_unique}adsl") + blob_storage_account_name = (var.blob_storage_account_name != "" ? var.blob_storage_account_name : "${module.naming.data_lake_store.name_unique}blob") + bastion_name = (var.bastion_name != "" ? var.bastion_name : module.naming.bastion_host.name_unique) + bastion_ip_name = (var.bastion_ip_name != "" ? var.bastion_ip_name : module.naming.public_ip.name_unique) + purview_name = (var.purview_name != "" ? var.purview_name : "${var.prefix}${var.environment_tag}pur${var.app_name}${element(split("-", module.naming.data_factory.name_unique),length(split("-", module.naming.data_factory.name_unique))-1)}") + purview_account_plink = (var.purview_name != "" ? var.purview_name : "${var.prefix}-${var.environment_tag}-pura-${lower(var.app_name)}-plink-${element(split("-", module.naming.data_factory.name_unique),length(split("-", module.naming.data_factory.name_unique))-1)}") + purview_portal_plink = (var.purview_name != "" ? var.purview_name : "${var.prefix}-${var.environment_tag}-purp-${lower(var.app_name)}-plink-${element(split("-", module.naming.data_factory.name_unique),length(split("-", module.naming.data_factory.name_unique))-1)}") + purview_resource_group_name = "managed-${module.naming.resource_group.name_unique}-purview" + purview_ir_app_reg_name = (var.purview_ir_app_reg_name != "" ? var.purview_ir_app_reg_name : "ADS GoFast Purview Integration Runtime (${var.environment_tag})") + jumphost_vm_name = module.naming.virtual_machine.name + jumphost_nic_name = "${module.naming.virtual_machine.name}-jumphost_nic" + jumphost_password = ((var.is_vnet_isolated && var.jumphost_password == null) ? "" : var.jumphost_password) + synapse_data_lake_name = (var.synapse_data_lake_name != "" ? var.synapse_data_lake_name : module.naming.data_lake_store.name_unique) + synapse_workspace_name = (var.synapse_workspace_name != "" ? var.synapse_workspace_name : "${var.prefix}${var.environment_tag}synw${var.app_name}${element(split("-", module.naming.data_factory.name_unique), length(split("-", module.naming.data_factory.name_unique)) - 1)}") + synapse_dwpool_name = (var.synapse_dwpool_name != "" ? var.synapse_dwpool_name : "${var.prefix}${var.environment_tag}syndp${var.app_name}") + synapse_sppool_name = (var.synapse_sppool_name != "" ? var.synapse_sppool_name : "${var.prefix}${var.environment_tag}synsp${var.app_name}") + synapse_resource_group_name = "managed-${module.naming.resource_group.name_unique}-synapse" + synapse_sql_password = ((var.deploy_synapse && var.synapse_sql_password == null) ? "" : var.synapse_sql_password) + selfhostedsqlvm_name = replace(module.naming.virtual_machine.name,"-vm-ads","-vm-sql") + h2o-ai_name = replace(module.naming.virtual_machine.name,"-vm-ads","-vm-h2o") + custom_vm_name = replace(module.naming.virtual_machine.name,"-vm-ads","-vm-custom") + tags = { Environment = var.environment_tag @@ -15,6 +54,67 @@ locals { CreatedDate = timestamp() } + integration_runtimes = [ + { + name = "Azure-Integration-Runtime" + short_name = "Azure" + is_azure = true + is_managed_vnet = true + valid_source_systems = ["*"] + valid_pipeline_patterns = [ + { + Folder = "*" + SourceFormat = "*" + SourceType = "*" + TargetFormat = "*" + TargetType = "*" + TaskTypeId = "*" + } + ] + }, + { + name = "Onprem-Integration-Runtime" + short_name = "OnPrem" + is_azure = false + is_managed_vnet = false + valid_source_systems = ["-14", "-15", "-9", "-3", "-4"] + valid_pipeline_patterns = [ + { + Folder = "Azure-Storage-to-Azure-Storage" + SourceFormat = "*" + SourceType = "*" + TargetFormat = "*" + TargetType = "*" + TaskTypeId = "*" + }, + { + Folder = "Execute-SQL-Statement" + SourceFormat = "*" + SourceType = "*" + TargetFormat = "*" + TargetType = "*" + TaskTypeId = "*" + }, + { + Folder = "SQL-Database-to-Azure-Storage" + SourceFormat = "*" + SourceType = "*" + TargetFormat = "*" + TargetType = "*" + TaskTypeId = "*" + }, + { + Folder = "SQL-Database-to-Azure-Storage-CDC" + SourceFormat = "*" + SourceType = "*" + TargetFormat = "*" + TargetType = "*" + TaskTypeId = "*" + } + + ] + } + ] } diff --git a/solution/DeploymentV2/terraform_layer2/main.tf b/solution/DeploymentV2/terraform_layer2/main.tf index e8db32f1..3dd48614 100644 --- a/solution/DeploymentV2/terraform_layer2/main.tf +++ b/solution/DeploymentV2/terraform_layer2/main.tf @@ -36,6 +36,7 @@ data "azurerm_client_config" "current" { module "naming" { source = "Azure/naming/azurerm" version = "0.1.1" + unique-seed = data.terraform_remote_state.layer1.outputs.naming_unique_seed prefix = [ var.prefix, var.environment_tag @@ -45,6 +46,7 @@ module "naming" { ] } + resource "random_id" "rg_deployment_unique" { byte_length = 4 } diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/README.md b/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/README.md new file mode 100644 index 00000000..6107675c --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/README.md @@ -0,0 +1,62 @@ +# Purpose +The purpose of this module is to provide a way to create all of the appropriate datasets for each of the linked service types for a singular integration runtime. + +This module is parameterised to allow it to be run multiple times, once for each of the integration runtimes that is confiugred. +# For Datasets we support the sources + - Binary file + - Delimited Text + - Json + - Parquet + - SQL Table + + +# We also support the following generic linked service types + - Azure SQL + - MS SQL Server + - File Server + - Generic Blob + - Generic ADLS + - Generic PostgeSQL (** WIP) + - Generic Azure Synapse SQL (** WIP) + - Generic Azure Synapse Table (** WIP) + + +# The full super set of datasets is + - Azure SQL - SQL Table + - MS SQL Server - SQL Table + - File Server - Binary + - File Server - Delimited Text + - File Server - Excel Text + - File Server - JSON + - File Server - Parquet + - Generic Blob - Binary + - Generic Blob - Delimited Text + - Generic Blob - Excel Text + - Generic Blob - JSON + - Generic Blob - Parquet + - Generic ADLS - Binary + - Generic ADLS - Delimited Text + - Generic ADLS - Excel Text + - Generic ADLS - JSON + - Generic ADLS - Parquet + - Generic PostgeSQL (** WIP) - Table + - Generic Azure Synapse SQL (** WIP) - Table + - Generic Azure Synapse Data Lake (** WIP) - Binary + - Generic Azure Synapse Data Lake (** WIP) - Delimited Text + - Generic Azure Synapse Data Lake (** WIP) - Excel Text + - Generic Azure Synapse Data Lake (** WIP) - JSON + - Generic Azure Synapse Data Lake (** WIP) - Parquet + + +# Design notes & history on linked services + +- Because you cant parameterise the integration runtime for a a linked service you need to create a set of linked services for each integration runtime that you want to run within the environment. +- Because you need a set of separate linked services per IR, you also need a separate set of datasets per IR. +- For simplicity we are initially going to create a full set of IR --> Linked Services --> Data Sets +- We want to simplify the creation & maintenance of these as much as possible +- WARNING!!!!!!!!! + - azurerm_data_factory_custom_dataset provider doesnt support setting expressions + - with the linked_service.parameters + - An issue has been created here https://github.com/hashicorp/terraform-provider-azurerm/issues/14586 + - For now, I am pivoting to importing the data sets as parameterised arm templates + diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_AzureBlobFS_Binary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_AzureBlobFS_Binary.json new file mode 100644 index 00000000..b1b7c98a --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_AzureBlobFS_Binary.json @@ -0,0 +1,79 @@ +{ + "$schema": "http://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "name": { + "type": "String", + "metadata": "name of the resource" + }, + "dataFactoryName": { + "type": "String", + "metadata": "Data Factory name", + "defaultValue": "arkstgdfads" + }, + "linkedServiceName": { + "type": "String", + "metadata": "The name of the linked service that this dataset uses" + }, + "integrationRuntimeName": { + "type": "String", + "metadata": "The name of the integration runtime this dataset uses" + } + + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/', parameters('name'))]", + "properties": { + "linkedServiceName": { + "referenceName": "[parameters('linkedServiceName')]", + "type": "LinkedServiceReference", + "parameters": { + "StorageAccountEndpoint": { + "value": "@dataset().StorageAccountEndpoint", + "type": "Expression" + } + } + }, + "parameters": { + "StorageAccountEndpoint": { + "type": "String" + }, + "Directory": { + "type": "String" + }, + "FileSystem": { + "type": "String" + }, + "File": { + "type": "String" + } + }, + "folder": { + "name": "[concat('ADS Go Fast/Generic/', parameters('integrationRuntimeName'))]" + }, + "annotations": [], + "type": "Binary", + "typeProperties": { + "location": { + "type": "AzureBlobFSLocation", + "fileName": { + "value": "@dataset().File", + "type": "Expression" + }, + "folderPath": { + "value": "@dataset().Directory", + "type": "Expression" + }, + "fileSystem": { + "value": "@dataset().FileSystem", + "type": "Expression" + } + } + } + }, + "type": "Microsoft.DataFactory/factories/datasets" + } + ] +} \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_AzureBlobFS_DelimitedText.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_AzureBlobFS_DelimitedText.json new file mode 100644 index 00000000..5977b4cc --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_AzureBlobFS_DelimitedText.json @@ -0,0 +1,91 @@ + +{ + "$schema": "http://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "name": { + "type": "String", + "metadata": "name of the resource" + }, + "dataFactoryName": { + "type": "String", + "metadata": "Data Factory name", + "defaultValue": "arkstgdfads" + }, + "linkedServiceName": { + "type": "String", + "metadata": "The name of the linked service that this dataset uses" + }, + "integrationRuntimeName": { + "type": "String", + "metadata": "The name of the integration runtime this dataset uses" + } + + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/', parameters('name'))]", + "properties": { + "linkedServiceName": { + "referenceName": "[parameters('linkedServiceName')]", + "type": "LinkedServiceReference", + "parameters": { + "StorageAccountEndpoint": { + "value": "@dataset().StorageAccountEndpoint", + "type": "Expression" + } + } + }, + "parameters": { + "RelativePath": { + "type": "String" + }, + "FileName": { + "type": "String" + }, + "StorageAccountEndpoint": { + "type": "String" + }, + "StorageAccountContainerName": { + "type": "String" + }, + "FirstRowAsHeader": { + "type": "bool" + } + }, + "folder": { + "name": "[concat('ADS Go Fast/Generic/', parameters('integrationRuntimeName'))]" + }, + "annotations": [], + "type": "DelimitedText", + "typeProperties": { + "location": { + "type": "AzureBlobFSLocation", + "fileName": { + "value": "@dataset().FileName", + "type": "Expression" + }, + "folderPath": { + "value": "@dataset().RelativePath", + "type": "Expression" + }, + "fileSystem": { + "value": "@dataset().StorageAccountContainerName", + "type": "Expression" + } + }, + "columnDelimiter": ",", + "escapeChar": "\\", + "firstRowAsHeader": { + "value": "@dataset().FirstRowAsHeader", + "type": "Expression" + }, + "quoteChar": "\"" + }, + "schema": [] + }, + "type": "Microsoft.DataFactory/factories/datasets" + } + ] +} \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_AzureBlobFS_Excel.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_AzureBlobFS_Excel.json new file mode 100644 index 00000000..b2056853 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_AzureBlobFS_Excel.json @@ -0,0 +1,96 @@ + +{ + "$schema": "http://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "name": { + "type": "String", + "metadata": "name of the resource" + }, + "dataFactoryName": { + "type": "String", + "metadata": "Data Factory name", + "defaultValue": "arkstgdfads" + }, + "linkedServiceName": { + "type": "String", + "metadata": "The name of the linked service that this dataset uses" + }, + "integrationRuntimeName": { + "type": "String", + "metadata": "The name of the integration runtime this dataset uses" + } + + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/', parameters('name'))]", + "properties": { + "linkedServiceName": { + "referenceName": "[parameters('linkedServiceName')]", + + "type": "LinkedServiceReference", + "parameters": { + "StorageAccountEndpoint": { + "value": "@dataset().StorageAccountEndpoint", + "type": "Expression" + } + } + }, + "parameters": { + "RelativePath": { + "type": "String" + }, + "FileName": { + "type": "String" + }, + "StorageAccountEndpoint": { + "type": "String" + }, + "StorageAccountContainerName": { + "type": "String" + }, + "SheetName": { + "type": "String" + }, + "FirstRowAsHeader": { + "type": "bool" + } + }, + "folder": { + "name": "[concat('ADS Go Fast/Generic/', parameters('integrationRuntimeName'))]" + }, + "annotations": [], + "type": "Excel", + "typeProperties": { + "sheetName": { + "value": "@dataset().SheetName", + "type": "Expression" + }, + "location": { + "type": "AzureBlobFSLocation", + "fileName": { + "value": "@dataset().FileName", + "type": "Expression" + }, + "folderPath": { + "value": "@dataset().RelativePath", + "type": "Expression" + }, + "fileSystem": { + "value": "@dataset().StorageAccountContainerName", + "type": "Expression" + } + }, + "firstRowAsHeader": { + "value": "@dataset().FirstRowAsHeader", + "type": "Expression" + } + }, + "schema": [] + }, + "type": "Microsoft.DataFactory/factories/datasets" + } + ] +} \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_AzureBlobFS_Json.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_AzureBlobFS_Json.json new file mode 100644 index 00000000..86c42cc8 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_AzureBlobFS_Json.json @@ -0,0 +1,81 @@ +{ + "$schema": "http://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "name": { + "type": "String", + "metadata": "name of the resource" + }, + "dataFactoryName": { + "type": "String", + "metadata": "Data Factory name", + "defaultValue": "arkstgdfads" + }, + "linkedServiceName": { + "type": "String", + "metadata": "The name of the linked service that this dataset uses" + }, + "integrationRuntimeName": { + "type": "String", + "metadata": "The name of the integration runtime this dataset uses" + } + + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/', parameters('name'))]", + "properties": { + "linkedServiceName": { + "referenceName": "[parameters('linkedServiceName')]", + + "type": "LinkedServiceReference", + "parameters": { + "StorageAccountEndpoint": { + "value": "@dataset().StorageAccountEndpoint", + "type": "Expression" + } + } + }, + "parameters": { + "RelativePath": { + "type": "String" + }, + "FileName": { + "type": "String" + }, + "StorageAccountEndpoint": { + "type": "String" + }, + "StorageAccountContainerName": { + "type": "String" + } + }, + "folder": { + "name": "[concat('ADS Go Fast/Generic/', parameters('integrationRuntimeName'))]" + }, + "annotations": [], + "type": "Json", + "typeProperties": { + "location": { + "type": "AzureBlobFSLocation", + "fileName": { + "value": "@dataset().FileName", + "type": "Expression" + }, + "folderPath": { + "value": "@dataset().RelativePath", + "type": "Expression" + }, + "fileSystem": { + "value": "@dataset().StorageAccountContainerName", + "type": "Expression" + } + } + }, + "schema": {} + }, + "type": "Microsoft.DataFactory/factories/datasets" + } + ] +} \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_AzureBlobFS_Parquet.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_AzureBlobFS_Parquet.json new file mode 100644 index 00000000..91309979 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_AzureBlobFS_Parquet.json @@ -0,0 +1,81 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "name": { + "type": "String", + "metadata": "name of the resource" + }, + "dataFactoryName": { + "type": "String", + "metadata": "Data Factory name", + "defaultValue": "arkstgdfads" + }, + "linkedServiceName": { + "type": "String", + "metadata": "The name of the linked service that this dataset uses" + }, + "integrationRuntimeName": { + "type": "String", + "metadata": "The name of the integration runtime this dataset uses" + } + + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/', parameters('name'))]", + "properties": { + "linkedServiceName": { + "referenceName": "[parameters('linkedServiceName')]", + "type": "LinkedServiceReference", + "parameters": { + "StorageAccountEndpoint": { + "value": "@dataset().StorageAccountEndpoint", + "type": "Expression" + } + } + }, + "parameters": { + "RelativePath": { + "type": "String" + }, + "FileName": { + "type": "String" + }, + "StorageAccountEndpoint": { + "type": "String" + }, + "StorageAccountContainerName": { + "type": "String" + } + }, + "folder": { + "name": "[concat('ADS Go Fast/Generic/', parameters('integrationRuntimeName'))]" + }, + "annotations": [], + "type": "Parquet", + "typeProperties": { + "location": { + "type": "AzureBlobFSLocation", + "fileName": { + "value": "@dataset().FileName", + "type": "Expression" + }, + "folderPath": { + "value": "@dataset().RelativePath", + "type": "Expression" + }, + "fileSystem": { + "value": "@dataset().StorageAccountContainerName", + "type": "Expression" + } + }, + "compressionCodec": "gzip" + }, + "schema": [] + }, + "type": "Microsoft.DataFactory/factories/datasets" + } + ] +} \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_AzureBlobStorage_Binary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_AzureBlobStorage_Binary.json new file mode 100644 index 00000000..8ce1c00f --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_AzureBlobStorage_Binary.json @@ -0,0 +1,80 @@ +{ + "$schema": "http://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "name": { + "type": "String", + "metadata": "name of the resource" + }, + "dataFactoryName": { + "type": "String", + "metadata": "Data Factory name", + "defaultValue": "arkstgdfads" + }, + "linkedServiceName": { + "type": "String", + "metadata": "The name of the linked service that this dataset uses" + }, + "integrationRuntimeName": { + "type": "String", + "metadata": "The name of the integration runtime this dataset uses" + } + + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/', parameters('name'))]", + "properties": { + "linkedServiceName": { + "referenceName": "[parameters('linkedServiceName')]", + "type": "LinkedServiceReference", + "parameters": { + "StorageAccountEndpoint": { + "value": "@dataset().StorageAccountEndpoint", + "type": "Expression" + } + } + }, + "parameters": { + "StorageAccountEndpoint": { + "type": "String" + }, + "FileSystem": { + "type": "String" + }, + "Directory": { + "type": "String" + }, + "File": { + "type": "String" + } + }, + "folder": { + "name": "[concat('ADS Go Fast/Generic/', parameters('integrationRuntimeName'))]" + }, + "annotations": [], + "type": "Binary", + "typeProperties": { + "location": { + "type": "AzureBlobStorageLocation", + "fileName": { + "value": "@dataset().File", + "type": "Expression" + }, + "folderPath": { + "value": "@dataset().Directory", + "type": "Expression" + }, + "container": { + "value": "@dataset().FileSystem", + "type": "Expression" + } + } + } + }, + "type": "Microsoft.DataFactory/factories/datasets" + } + + ] +} \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_AzureBlobStorage_DelimitedText.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_AzureBlobStorage_DelimitedText.json new file mode 100644 index 00000000..b761d6c2 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_AzureBlobStorage_DelimitedText.json @@ -0,0 +1,90 @@ +{ + "$schema": "http://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "name": { + "type": "String", + "metadata": "name of the resource" + }, + "dataFactoryName": { + "type": "String", + "metadata": "Data Factory name", + "defaultValue": "arkstgdfads" + }, + "linkedServiceName": { + "type": "String", + "metadata": "The name of the linked service that this dataset uses" + }, + "integrationRuntimeName": { + "type": "String", + "metadata": "The name of the integration runtime this dataset uses" + } + + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/', parameters('name'))]", + "properties": { + "linkedServiceName": { + "referenceName": "[parameters('linkedServiceName')]", + "type": "LinkedServiceReference", + "parameters": { + "StorageAccountEndpoint": { + "value": "@dataset().StorageAccountEndpoint", + "type": "Expression" + } + } + }, + "parameters": { + "RelativePath": { + "type": "String" + }, + "FileName": { + "type": "String" + }, + "StorageAccountEndpoint": { + "type": "String" + }, + "StorageAccountContainerName": { + "type": "String" + }, + "FirstRowAsHeader": { + "type": "bool" + } + }, + "folder": { + "name": "[concat('ADS Go Fast/Generic/', parameters('integrationRuntimeName'))]" + }, + "annotations": [], + "type": "DelimitedText", + "typeProperties": { + "location": { + "type": "AzureBlobStorageLocation", + "fileName": { + "value": "@dataset().FileName", + "type": "Expression" + }, + "folderPath": { + "value": "@dataset().RelativePath", + "type": "Expression" + }, + "container": { + "value": "@dataset().StorageAccountContainerName", + "type": "Expression" + } + }, + "columnDelimiter": ",", + "escapeChar": "\\", + "firstRowAsHeader": { + "value": "@dataset().FirstRowAsHeader", + "type": "Expression" + }, + "quoteChar": "\"" + }, + "schema": [] + }, + "type": "Microsoft.DataFactory/factories/datasets" + } + ] +} \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_AzureBlobStorage_Excel.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_AzureBlobStorage_Excel.json new file mode 100644 index 00000000..dd18e30a --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_AzureBlobStorage_Excel.json @@ -0,0 +1,94 @@ +{ + "$schema": "http://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "name": { + "type": "String", + "metadata": "name of the resource" + }, + "dataFactoryName": { + "type": "String", + "metadata": "Data Factory name", + "defaultValue": "arkstgdfads" + }, + "linkedServiceName": { + "type": "String", + "metadata": "The name of the linked service that this dataset uses" + }, + "integrationRuntimeName": { + "type": "String", + "metadata": "The name of the integration runtime this dataset uses" + } + + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/', parameters('name'))]", + "properties": { + "linkedServiceName": { + "referenceName": "[parameters('linkedServiceName')]", + "type": "LinkedServiceReference", + "parameters": { + "StorageAccountEndpoint": { + "value": "@dataset().StorageAccountEndpoint", + "type": "Expression" + } + } + }, + "parameters": { + "RelativePath": { + "type": "String" + }, + "FileName": { + "type": "String" + }, + "StorageAccountEndpoint": { + "type": "String" + }, + "StorageAccountContainerName": { + "type": "String" + }, + "SheetName": { + "type": "String" + }, + "FirstRowAsHeader": { + "type": "bool" + } + }, + "folder": { + "name": "[concat('ADS Go Fast/Generic/', parameters('integrationRuntimeName'))]" + }, + "annotations": [], + "type": "Excel", + "typeProperties": { + "sheetName": { + "value": "@dataset().SheetName", + "type": "Expression" + }, + "location": { + "type": "AzureBlobStorageLocation", + "fileName": { + "value": "@dataset().FileName", + "type": "Expression" + }, + "folderPath": { + "value": "@dataset().RelativePath", + "type": "Expression" + }, + "container": { + "value": "@dataset().StorageAccountContainerName", + "type": "Expression" + } + }, + "firstRowAsHeader": { + "value": "@dataset().FirstRowAsHeader", + "type": "Expression" + } + }, + "schema": [] + }, + "type": "Microsoft.DataFactory/factories/datasets" + } + ] +} \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_AzureBlobStorage_Json.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_AzureBlobStorage_Json.json new file mode 100644 index 00000000..bbf9996f --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_AzureBlobStorage_Json.json @@ -0,0 +1,80 @@ +{ + "$schema": "http://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "name": { + "type": "String", + "metadata": "name of the resource" + }, + "dataFactoryName": { + "type": "String", + "metadata": "Data Factory name", + "defaultValue": "arkstgdfads" + }, + "linkedServiceName": { + "type": "String", + "metadata": "The name of the linked service that this dataset uses" + }, + "integrationRuntimeName": { + "type": "String", + "metadata": "The name of the integration runtime this dataset uses" + } + + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/', parameters('name'))]", + "properties": { + "linkedServiceName": { + "referenceName": "[parameters('linkedServiceName')]", + "type": "LinkedServiceReference", + "parameters": { + "StorageAccountEndpoint": { + "value": "@dataset().StorageAccountEndpoint", + "type": "Expression" + } + } + }, + "parameters": { + "RelativePath": { + "type": "String" + }, + "FileName": { + "type": "String" + }, + "StorageAccountEndpoint": { + "type": "String" + }, + "StorageAccountContainerName": { + "type": "String" + } + }, + "folder": { + "name": "[concat('ADS Go Fast/Generic/', parameters('integrationRuntimeName'))]" + }, + "annotations": [], + "type": "Json", + "typeProperties": { + "location": { + "type": "AzureBlobStorageLocation", + "fileName": { + "value": "@dataset().FileName", + "type": "Expression" + }, + "folderPath": { + "value": "@dataset().RelativePath", + "type": "Expression" + }, + "container": { + "value": "@dataset().StorageAccountContainerName", + "type": "Expression" + } + } + }, + "schema": {} + }, + "type": "Microsoft.DataFactory/factories/datasets" + } + ] +} \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_AzureBlobStorage_Parquet.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_AzureBlobStorage_Parquet.json new file mode 100644 index 00000000..367812cf --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_AzureBlobStorage_Parquet.json @@ -0,0 +1,81 @@ +{ + "$schema": "http://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "name": { + "type": "String", + "metadata": "name of the resource" + }, + "dataFactoryName": { + "type": "String", + "metadata": "Data Factory name", + "defaultValue": "arkstgdfads" + }, + "linkedServiceName": { + "type": "String", + "metadata": "The name of the linked service that this dataset uses" + }, + "integrationRuntimeName": { + "type": "String", + "metadata": "The name of the integration runtime this dataset uses" + } + + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/', parameters('name'))]", + "properties": { + "linkedServiceName": { + "referenceName": "[parameters('linkedServiceName')]", + "type": "LinkedServiceReference", + "parameters": { + "StorageAccountEndpoint": { + "value": "@dataset().StorageAccountEndpoint", + "type": "Expression" + } + } + }, + "parameters": { + "RelativePath": { + "type": "String" + }, + "FileName": { + "type": "String" + }, + "StorageAccountEndpoint": { + "type": "String" + }, + "StorageAccountContainerName": { + "type": "String" + } + }, + "folder": { + "name": "[concat('ADS Go Fast/Generic/', parameters('integrationRuntimeName'))]" + }, + "annotations": [], + "type": "Parquet", + "typeProperties": { + "location": { + "type": "AzureBlobStorageLocation", + "fileName": { + "value": "@dataset().FileName", + "type": "Expression" + }, + "folderPath": { + "value": "@dataset().RelativePath", + "type": "Expression" + }, + "container": { + "value": "@dataset().StorageAccountContainerName", + "type": "Expression" + } + }, + "compressionCodec": "gzip" + }, + "schema": [] + }, + "type": "Microsoft.DataFactory/factories/datasets" + } + ] +} \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_AzureSqlDWTable_NA.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_AzureSqlDWTable_NA.json new file mode 100644 index 00000000..9539020b --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_AzureSqlDWTable_NA.json @@ -0,0 +1,77 @@ +{ + "$schema": "http://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "name": { + "type": "String", + "metadata": "name of the resource" + }, + "dataFactoryName": { + "type": "String", + "metadata": "Data Factory name", + "defaultValue": "arkstgdfads" + }, + "linkedServiceName": { + "type": "String", + "metadata": "The name of the linked service that this dataset uses" + }, + "integrationRuntimeName": { + "type": "String", + "metadata": "The name of the integration runtime this dataset uses" + } + + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/', parameters('name'))]", + "properties": { + "linkedServiceName": { + "referenceName": "[parameters('linkedServiceName')]", + "type": "LinkedServiceReference", + "parameters": { + "Server": { + "value": "@dataset().Server", + "type": "Expression" + }, + "Database": { + "value": "@dataset().Database", + "type": "Expression" + } + } + }, + "parameters": { + "Schema": { + "type": "String" + }, + "Table": { + "type": "String" + }, + "Server": { + "type": "String" + }, + "Database": { + "type": "String" + } + }, + "folder": { + "name": "[concat('ADS Go Fast/Generic/', parameters('integrationRuntimeName'))]" + }, + "annotations": [], + "type": "AzureSqlDWTable", + "schema": [], + "typeProperties": { + "schema": { + "value": "@dataset().Schema", + "type": "Expression" + }, + "table": { + "value": "@dataset().Table", + "type": "Expression" + } + } + }, + "type": "Microsoft.DataFactory/factories/datasets" + } + ] +} \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_AzureSqlTable_NA.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_AzureSqlTable_NA.json new file mode 100644 index 00000000..b6fd7c91 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_AzureSqlTable_NA.json @@ -0,0 +1,77 @@ +{ + "$schema": "http://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "name": { + "type": "String", + "metadata": "name of the resource" + }, + "dataFactoryName": { + "type": "String", + "metadata": "Data Factory name", + "defaultValue": "arkstgdfads" + }, + "linkedServiceName": { + "type": "String", + "metadata": "The name of the linked service that this dataset uses" + }, + "integrationRuntimeName": { + "type": "String", + "metadata": "The name of the integration runtime this dataset uses" + } + + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/', parameters('name'))]", + "properties": { + "linkedServiceName": { + "referenceName": "[parameters('linkedServiceName')]", + "type": "LinkedServiceReference", + "parameters": { + "Server": { + "value": "@dataset().Server", + "type": "Expression" + }, + "Database": { + "value": "@dataset().Database", + "type": "Expression" + } + } + }, + "parameters": { + "Schema": { + "type": "String" + }, + "Table": { + "type": "String" + }, + "Server": { + "type": "String" + }, + "Database": { + "type": "String" + } + }, + "folder": { + "name": "[concat('ADS Go Fast/Generic/', parameters('integrationRuntimeName'))]" + }, + "annotations": [], + "type": "AzureSqlTable", + "schema": [], + "typeProperties": { + "schema": { + "value": "@dataset().Schema", + "type": "Expression" + }, + "table": { + "value": "@dataset().Table", + "type": "Expression" + } + } + }, + "type": "Microsoft.DataFactory/factories/datasets" + } + ] +} \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_FileServer_Binary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_FileServer_Binary.json new file mode 100644 index 00000000..16a1cfea --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_FileServer_Binary.json @@ -0,0 +1,93 @@ +{ + "$schema": "http://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "name": { + "type": "String", + "metadata": "name of the resource" + }, + "dataFactoryName": { + "type": "String", + "metadata": "Data Factory name", + "defaultValue": "arkstgdfads" + }, + "linkedServiceName": { + "type": "String", + "metadata": "The name of the linked service that this dataset uses" + }, + "integrationRuntimeName": { + "type": "String", + "metadata": "The name of the integration runtime this dataset uses" + } + + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/', parameters('name'))]", + "properties": { + "linkedServiceName": { + "referenceName": "[parameters('linkedServiceName')]", + "type": "LinkedServiceReference", + "parameters": { + "Host": { + "value": "@dataset().Host", + "type": "Expression" + }, + "UserId": { + "value": "@dataset().UserId", + "type": "Expression" + }, + "Secret": { + "value": "@dataset().Secret", + "type": "Expression" + }, + "KeyVaultBaseUrl": { + "value": "@dataset().KeyVaultBaseUrl", + "type": "Expression" + } + } + }, + "parameters": { + "Host": { + "type": "string" + }, + "UserId": { + "type": "string" + }, + "Secret": { + "type": "string" + }, + "Directory": { + "type": "string" + }, + "File": { + "type": "string" + }, + "KeyVaultBaseUrl": { + "type": "string" + } + }, + "folder": { + "name": "[concat('ADS Go Fast/Generic/', parameters('integrationRuntimeName'))]" + }, + "annotations": [], + "type": "Binary", + "typeProperties": { + "location": { + "type": "FileServerLocation", + "fileName": { + "value": "@dataset().File", + "type": "Expression" + }, + "folderPath": { + "value": "@dataset().Directory", + "type": "Expression" + } + } + } + }, + "type": "Microsoft.DataFactory/factories/datasets" + } + ] +} \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_FileServer_Parquet.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_FileServer_Parquet.json new file mode 100644 index 00000000..16a1cfea --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_FileServer_Parquet.json @@ -0,0 +1,93 @@ +{ + "$schema": "http://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "name": { + "type": "String", + "metadata": "name of the resource" + }, + "dataFactoryName": { + "type": "String", + "metadata": "Data Factory name", + "defaultValue": "arkstgdfads" + }, + "linkedServiceName": { + "type": "String", + "metadata": "The name of the linked service that this dataset uses" + }, + "integrationRuntimeName": { + "type": "String", + "metadata": "The name of the integration runtime this dataset uses" + } + + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/', parameters('name'))]", + "properties": { + "linkedServiceName": { + "referenceName": "[parameters('linkedServiceName')]", + "type": "LinkedServiceReference", + "parameters": { + "Host": { + "value": "@dataset().Host", + "type": "Expression" + }, + "UserId": { + "value": "@dataset().UserId", + "type": "Expression" + }, + "Secret": { + "value": "@dataset().Secret", + "type": "Expression" + }, + "KeyVaultBaseUrl": { + "value": "@dataset().KeyVaultBaseUrl", + "type": "Expression" + } + } + }, + "parameters": { + "Host": { + "type": "string" + }, + "UserId": { + "type": "string" + }, + "Secret": { + "type": "string" + }, + "Directory": { + "type": "string" + }, + "File": { + "type": "string" + }, + "KeyVaultBaseUrl": { + "type": "string" + } + }, + "folder": { + "name": "[concat('ADS Go Fast/Generic/', parameters('integrationRuntimeName'))]" + }, + "annotations": [], + "type": "Binary", + "typeProperties": { + "location": { + "type": "FileServerLocation", + "fileName": { + "value": "@dataset().File", + "type": "Expression" + }, + "folderPath": { + "value": "@dataset().Directory", + "type": "Expression" + } + } + } + }, + "type": "Microsoft.DataFactory/factories/datasets" + } + ] +} \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_OracleServerTable_NA.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_OracleServerTable_NA.json new file mode 100644 index 00000000..0bfd7960 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_OracleServerTable_NA.json @@ -0,0 +1,105 @@ +{ + "$schema": "http://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "name": { + "type": "String", + "metadata": "name of the resource" + }, + "dataFactoryName": { + "type": "String", + "metadata": "Data Factory name", + "defaultValue": "arkstgdfads" + }, + "linkedServiceName": { + "type": "String", + "metadata": "The name of the linked service that this dataset uses" + }, + "integrationRuntimeName": { + "type": "String", + "metadata": "The name of the integration runtime this dataset uses" + } + + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/', parameters('name'))]", + "properties": { + "linkedServiceName": { + "referenceName": "[parameters('linkedServiceName')]", + "type": "LinkedServiceReference", + "parameters": { + "Host": { + "value": "@dataset().Host", + "type": "Expression" + }, + "Port": { + "value": "@dataset().Port", + "type": "Expression" + }, + "ServiceName": { + "value": "@dataset().ServiceName", + "type": "Expression" + }, + "UserName": { + "value": "@dataset().UserName", + "type": "Expression" + }, + "KeyVaultBaseUrl": { + "value": "@dataset().KeyVaultBaseUrl", + "type": "Expression" + }, + "Secret": { + "value": "@dataset().Secret", + "type": "Expression" + } + } + }, + "parameters": { + "Host": { + "type": "string" + }, + "Port": { + "type": "string" + }, + "ServiceName": { + "type": "string" + }, + "UserName": { + "type": "string" + }, + "KeyVaultBaseUrl": { + "type": "string" + }, + "Secret": { + "type": "string" + }, + "TableSchema": { + "type": "string" + }, + "TableName": { + "type": "string" + } + }, + "folder": { + "name": "[concat('ADS Go Fast/Generic/', parameters('integrationRuntimeName'))]" + }, + "annotations": [], + "type": "OracleTable", + "schema": [], + "typeProperties": { + "schema": { + "value": "@dataset().TableSchema", + "type": "Expression" + }, + "table": { + "value": "@dataset().TableName", + "type": "Expression" + } + } + }, + "type": "Microsoft.DataFactory/factories/datasets" + } + ] +} \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_Rest_Anonymous.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_Rest_Anonymous.json new file mode 100644 index 00000000..c7fed0e8 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_Rest_Anonymous.json @@ -0,0 +1,79 @@ +{ + "$schema": "http://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "name": { + "type": "String", + "metadata": "name of the resource" + }, + "dataFactoryName": { + "type": "String", + "metadata": "Data Factory name", + "defaultValue": "arkstgdfads" + }, + "linkedServiceName": { + "type": "String", + "metadata": "The name of the linked service that this dataset uses" + }, + "integrationRuntimeName": { + "type": "String", + "metadata": "The name of the integration runtime this dataset uses" + } + + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/', parameters('name'))]", + "properties": { + "linkedServiceName": { + "referenceName": "[parameters('linkedServiceName')]", + "type": "LinkedServiceReference", + "parameters": { + "BaseUrl": { + "value": "@dataset().BaseUrl", + "type": "Expression" + } + } + }, + "parameters": { + "BaseUrl": { + "type": "string" + }, + "RelativeUrl": { + "type": "string" + }, + "RequestMethod": + { + "type": "string" + }, + "RequestBody": + { + "type": "string" + } + + }, + "folder": { + "name": "[concat('ADS Go Fast/Generic/', parameters('integrationRuntimeName'))]" + }, + "annotations": [], + "type": "RestResource", + "typeProperties": { + "relativeUrl": { + "value": "@dataset().RelativeUrl", + "type": "Expression" + }, + "requestMethod": { + "value": "@dataset().RequestMethod", + "type": "Expression" + }, + "requestBody": { + "value": "@dataset().RequestBody", + "type": "Expression" + } + } + }, + "type": "Microsoft.DataFactory/factories/datasets" + } + ] +} \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_Rest_Basic.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_Rest_Basic.json new file mode 100644 index 00000000..b95d9777 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_Rest_Basic.json @@ -0,0 +1,108 @@ +{ + "$schema": "http://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "name": { + "type": "String", + "metadata": "name of the resource" + }, + "dataFactoryName": { + "type": "String", + "metadata": "Data Factory name", + "defaultValue": "arkstgdfads" + }, + "linkedServiceName": { + "type": "String", + "metadata": "The name of the linked service that this dataset uses" + }, + "integrationRuntimeName": { + "type": "String", + "metadata": "The name of the integration runtime this dataset uses" + } + + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/', parameters('name'))]", + "properties": { + "linkedServiceName": { + "referenceName": "[parameters('linkedServiceName')]", + "type": "LinkedServiceReference", + "parameters": { + "BaseUrl": + { + "value": "@dataset().BaseUrl", + "type": "Expression" + }, + "UserName": + { + "value": "@dataset().UserName", + "type": "Expression" + }, + "KeyVaultBaseUrl": + { + "value": "@dataset().KeyVaultBaseUrl", + "type": "Expression" + }, + "PasswordSecret": + { + "value": "@dataset().PasswordSecret", + "type": "Expression" + } + } + }, + "parameters": { + "BaseUrl": + { + "type": "string" + }, + "UserName": + { + "type": "string" + }, + "KeyVaultBaseUrl": + { + "type": "string" + }, + "PasswordSecret": + { + "type": "string" + }, + "RelativeUrl": + { + "type": "string" + }, + "RequestMethod": + { + "type": "string" + }, + "RequestBody": + { + "type": "string" + } + }, + "folder": { + "name": "[concat('ADS Go Fast/Generic/', parameters('integrationRuntimeName'))]" + }, + "annotations": [], + "type": "RestResource", + "typeProperties": { + "relativeUrl": { + "value": "@dataset().RelativeUrl", + "type": "Expression" + }, + "requestMethod": { + "value": "@dataset().RequestMethod", + "type": "Expression" + }, + "requestBody": { + "value": "@dataset().RequestBody", + "type": "Expression" + } + } + }, + "type": "Microsoft.DataFactory/factories/datasets" + } + ] +} \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_Rest_OAuth2.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_Rest_OAuth2.json new file mode 100644 index 00000000..f184bfe8 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_Rest_OAuth2.json @@ -0,0 +1,135 @@ +{ + "$schema": "http://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "name": { + "type": "String", + "metadata": "name of the resource" + }, + "dataFactoryName": { + "type": "String", + "metadata": "Data Factory name", + "defaultValue": "arkstgdfads" + }, + "linkedServiceName": { + "type": "String", + "metadata": "The name of the linked service that this dataset uses" + }, + "integrationRuntimeName": { + "type": "String", + "metadata": "The name of the integration runtime this dataset uses" + } + + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/', parameters('name'))]", + "properties": { + "linkedServiceName": { + "referenceName": "[parameters('linkedServiceName')]", + "type": "LinkedServiceReference", + "parameters": { + "BaseUrl": + { + "value": "@dataset().BaseUrl", + "type": "Expression" + }, + "ClientId": + { + "value": "@dataset().ClientId", + "type": "Expression" + }, + "KeyVaultBaseUrl": + { + "value": "@dataset().KeyVaultBaseUrl", + "type": "Expression" + }, + "PasswordSecret": + { + "value": "@dataset().PasswordSecret", + "type": "Expression" + }, + "TokenEndpoint": + { + "value": "@dataset().TokenEndpoint", + "type": "Expression" + }, + "Scope": + { + "value": "@dataset().Scope", + "type": "Expression" + }, + "Resource": + { + "value": "@dataset().Resource", + "type": "Expression" + } + } + }, + "parameters": { + "BaseUrl": + { + "type": "string" + }, + "ClientId": + { + "type": "string" + }, + "KeyVaultBaseUrl": + { + "type": "string" + }, + "PasswordSecret": + { + "type": "string" + }, + "TokenEndpoint": + { + "type": "string" + }, + "Scope": + { + "type": "string" + }, + "Resource": + { + "type": "string" + }, + "RelativeUrl": + { + "type": "string" + }, + "RequestMethod": + { + "type": "string" + }, + "RequestBody": + { + "type": "string" + } + }, + "folder": { + "name": "[concat('ADS Go Fast/Generic/', parameters('integrationRuntimeName'))]" + }, + "annotations": [], + "type": "RestResource", + "typeProperties": { + "relativeUrl": { + "value": "@dataset().RelativeUrl", + "type": "Expression" + }, + "requestMethod": { + "value": "@dataset().RequestMethod", + "type": "Expression" + }, + "requestBody": { + "value": "@dataset().RequestBody", + "type": "Expression" + } + } + }, + "type": "Microsoft.DataFactory/factories/datasets" + } + ] +} \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_Rest_ServicePrincipal.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_Rest_ServicePrincipal.json new file mode 100644 index 00000000..c39ca1ee --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_Rest_ServicePrincipal.json @@ -0,0 +1,126 @@ +{ + "$schema": "http://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "name": { + "type": "String", + "metadata": "name of the resource" + }, + "dataFactoryName": { + "type": "String", + "metadata": "Data Factory name", + "defaultValue": "arkstgdfads" + }, + "linkedServiceName": { + "type": "String", + "metadata": "The name of the linked service that this dataset uses" + }, + "integrationRuntimeName": { + "type": "String", + "metadata": "The name of the integration runtime this dataset uses" + } + + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/', parameters('name'))]", + "properties": { + "linkedServiceName": { + "referenceName": "[parameters('linkedServiceName')]", + "type": "LinkedServiceReference", + "parameters": { + "BaseUrl": + { + "value": "@dataset().BaseUrl", + "type": "Expression" + }, + "ServicePrincipalId": + { + "value": "@dataset().ServicePrincipalId", + "type": "Expression" + }, + "KeyVaultBaseUrl": + { + "value": "@dataset().KeyVaultBaseUrl", + "type": "Expression" + }, + "PasswordSecret": + { + "value": "@dataset().PasswordSecret", + "type": "Expression" + }, + "TenantId": + { + "value": "@dataset().TenantId", + "type": "Expression" + }, + "AadResourceId": + { + "value": "@dataset().AadResourceId", + "type": "Expression" + } + } + }, + "parameters": { + "BaseUrl": + { + "type": "string" + }, + "ServicePrincipalId": + { + "type": "string" + }, + "KeyVaultBaseUrl": + { + "type": "string" + }, + "PasswordSecret": + { + "type": "string" + }, + "TenantId": + { + "type": "string" + }, + "AadResourceId": + { + "type": "string" + }, + "RelativeUrl": + { + "type": "string" + }, + "RequestMethod": + { + "type": "string" + }, + "RequestBody": + { + "type": "string" + } + }, + "folder": { + "name": "[concat('ADS Go Fast/Generic/', parameters('integrationRuntimeName'))]" + }, + "annotations": [], + "type": "RestResource", + "typeProperties": { + "relativeUrl": { + "value": "@dataset().RelativeUrl", + "type": "Expression" + }, + "requestMethod": { + "value": "@dataset().RequestMethod", + "type": "Expression" + }, + "requestBody": { + "value": "@dataset().RequestBody", + "type": "Expression" + } + } + }, + "type": "Microsoft.DataFactory/factories/datasets" + } + ] +} \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_SqlServerTable_NA.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_SqlServerTable_NA.json new file mode 100644 index 00000000..0f4abf51 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_SqlServerTable_NA.json @@ -0,0 +1,98 @@ +{ + "$schema": "http://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "name": { + "type": "String", + "metadata": "name of the resource" + }, + "dataFactoryName": { + "type": "String", + "metadata": "Data Factory name", + "defaultValue": "arkstgdfads" + }, + "linkedServiceName": { + "type": "String", + "metadata": "The name of the linked service that this dataset uses" + }, + "integrationRuntimeName": { + "type": "String", + "metadata": "The name of the integration runtime this dataset uses" + } + + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/', parameters('name'))]", + "properties": { + "linkedServiceName": { + "referenceName": "[parameters('linkedServiceName')]", + "type": "LinkedServiceReference", + "parameters": { + "KeyVaultBaseUrl": { + "value": "@dataset().KeyVaultBaseUrl", + "type": "Expression" + }, + "PasswordSecret": { + "value": "@dataset().PasswordSecret", + "type": "Expression" + }, + "Server": { + "value": "@dataset().Server", + "type": "Expression" + }, + "Database": { + "value": "@dataset().Database", + "type": "Expression" + }, + "UserName": { + "value": "@dataset().UserName", + "type": "Expression" + } + } + }, + "parameters": { + "TableSchema": { + "type": "String" + }, + "TableName": { + "type": "String" + }, + "KeyVaultBaseUrl": { + "type": "String" + }, + "PasswordSecret": { + "type": "String" + }, + "Server": { + "type": "String" + }, + "Database": { + "type": "String" + }, + "UserName": { + "type": "String" + } + }, + "folder": { + "name": "[concat('ADS Go Fast/Generic/', parameters('integrationRuntimeName'))]" + }, + "annotations": [], + "type": "SqlServerTable", + "schema": [], + "typeProperties": { + "schema": { + "value": "@dataset().TableSchema", + "type": "Expression" + }, + "table": { + "value": "@dataset().TableName", + "type": "Expression" + } + } + }, + "type": "Microsoft.DataFactory/factories/datasets" + } + ] +} \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_SqlServerTable_NA_SqlAuth.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_SqlServerTable_NA_SqlAuth.json new file mode 100644 index 00000000..0f4abf51 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/arm/GDS_SqlServerTable_NA_SqlAuth.json @@ -0,0 +1,98 @@ +{ + "$schema": "http://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "name": { + "type": "String", + "metadata": "name of the resource" + }, + "dataFactoryName": { + "type": "String", + "metadata": "Data Factory name", + "defaultValue": "arkstgdfads" + }, + "linkedServiceName": { + "type": "String", + "metadata": "The name of the linked service that this dataset uses" + }, + "integrationRuntimeName": { + "type": "String", + "metadata": "The name of the integration runtime this dataset uses" + } + + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/', parameters('name'))]", + "properties": { + "linkedServiceName": { + "referenceName": "[parameters('linkedServiceName')]", + "type": "LinkedServiceReference", + "parameters": { + "KeyVaultBaseUrl": { + "value": "@dataset().KeyVaultBaseUrl", + "type": "Expression" + }, + "PasswordSecret": { + "value": "@dataset().PasswordSecret", + "type": "Expression" + }, + "Server": { + "value": "@dataset().Server", + "type": "Expression" + }, + "Database": { + "value": "@dataset().Database", + "type": "Expression" + }, + "UserName": { + "value": "@dataset().UserName", + "type": "Expression" + } + } + }, + "parameters": { + "TableSchema": { + "type": "String" + }, + "TableName": { + "type": "String" + }, + "KeyVaultBaseUrl": { + "type": "String" + }, + "PasswordSecret": { + "type": "String" + }, + "Server": { + "type": "String" + }, + "Database": { + "type": "String" + }, + "UserName": { + "type": "String" + } + }, + "folder": { + "name": "[concat('ADS Go Fast/Generic/', parameters('integrationRuntimeName'))]" + }, + "annotations": [], + "type": "SqlServerTable", + "schema": [], + "typeProperties": { + "schema": { + "value": "@dataset().TableSchema", + "type": "Expression" + }, + "table": { + "value": "@dataset().TableName", + "type": "Expression" + } + } + }, + "type": "Microsoft.DataFactory/factories/datasets" + } + ] +} \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/main.tf b/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/main.tf new file mode 100644 index 00000000..9c638341 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/main.tf @@ -0,0 +1,312 @@ +resource "azurerm_resource_group_template_deployment" "adls_dataset" { + for_each = { + for ir in fileset(path.module, "arm/GDS_AzureBlobFS*.json"): + ir => ir + #if var.is_azure == true + } + name = "${replace(replace(each.value, ".json", ""), "arm/", "")}_${var.integration_runtime_short_name}_${var.name_suffix}" + resource_group_name = var.resource_group_name + deployment_mode = "Incremental" + parameters_content = jsonencode({ + "name" = { + value = "${replace(replace(each.value, ".json", ""), "arm/", "")}_${var.integration_runtime_short_name}" + } + "linkedServiceName" = { + value = var.data_lake_linkedservice_name + } + "dataFactoryName" = { + value = var.data_factory_name + } + "integrationRuntimeName" = { + value = var.integration_runtime_name + } + }) + template_content = file("${path.module}/${each.value}") +} + +resource "azurerm_resource_group_template_deployment" "blob_dataset" { + for_each = { + for ir in fileset(path.module, "arm/GDS_AzureBlobStorage*.json"): + ir => ir + # if var.is_azure == true + } + name = "${replace(replace(each.value, ".json", ""), "arm/", "")}_${var.integration_runtime_short_name}_${var.name_suffix}" + resource_group_name = var.resource_group_name + deployment_mode = "Incremental" + parameters_content = jsonencode({ + "name" = { + value = "${replace(replace(each.value, ".json", ""), "arm/", "")}_${var.integration_runtime_short_name}" + } + "linkedServiceName" = { + value = var.blob_linkedservice_name + } + "dataFactoryName" = { + value = var.data_factory_name + } + "integrationRuntimeName" = { + value = var.integration_runtime_name + } + }) + template_content = file("${path.module}/${each.value}") +} + +resource "azurerm_resource_group_template_deployment" "azuresql_dataset" { + for_each = { + for ir in fileset(path.module, "arm/GDS_AzureSqlTable*.json"): + ir => ir + #if var.is_azure == true + } + name = "${replace(replace(each.value, ".json", ""), "arm/", "")}_${var.integration_runtime_short_name}_${var.name_suffix}" + resource_group_name = var.resource_group_name + deployment_mode = "Incremental" + parameters_content = jsonencode({ + "name" = { + value = "${replace(replace(each.value, ".json", ""), "arm/", "")}_${var.integration_runtime_short_name}" + } + "linkedServiceName" = { + value = var.azure_sql_linkedservice_name + } + "dataFactoryName" = { + value = var.data_factory_name + } + "integrationRuntimeName" = { + value = var.integration_runtime_name + } + }) + template_content = file("${path.module}/${each.value}") +} + +resource "azurerm_resource_group_template_deployment" "azuressynapse_dataset" { + for_each = { + for ir in fileset(path.module, "arm/GDS_AzureSqlDWTable*.json"): + ir => ir + #if var.is_azure == true + } + name = "${replace(replace(each.value, ".json", ""), "arm/", "")}_${var.integration_runtime_short_name}_${var.name_suffix}" + resource_group_name = var.resource_group_name + deployment_mode = "Incremental" + parameters_content = jsonencode({ + "name" = { + value = "${replace(replace(each.value, ".json", ""), "arm/", "")}_${var.integration_runtime_short_name}" + } + "linkedServiceName" = { + value = var.azure_synapse_linkedservice_name + } + "dataFactoryName" = { + value = var.data_factory_name + } + "integrationRuntimeName" = { + value = var.integration_runtime_name + } + }) + template_content = file("${path.module}/${each.value}") +} + +resource "azurerm_resource_group_template_deployment" "mssql_dataset" { + for_each = { + for ir in fileset(path.module, "arm/GDS_SqlServerTable_NA.json"): + ir => ir + #if var.is_azure == false + } + name = "${replace(replace(each.value, ".json", ""), "arm/", "")}_${var.integration_runtime_short_name}_${var.name_suffix}" + resource_group_name = var.resource_group_name + deployment_mode = "Incremental" + parameters_content = jsonencode({ + "name" = { + value = "${replace(replace(each.value, ".json", ""), "arm/", "")}_${var.integration_runtime_short_name}" + } + "linkedServiceName" = { + value = var.mssql_linkedservice_name + } + "dataFactoryName" = { + value = var.data_factory_name + } + "integrationRuntimeName" = { + value = var.integration_runtime_name + } + }) + template_content = file("${path.module}/${each.value}") +} + +resource "azurerm_resource_group_template_deployment" "mssql_dataset_sqlauth" { + for_each = { + for ir in fileset(path.module, "arm/GDS_SqlServerTable_NA_SqlAuth.json"): + ir => ir + #if var.is_azure == false + } + name = "${replace(replace(each.value, ".json", ""), "arm/", "")}_${var.integration_runtime_short_name}_${var.name_suffix}" + resource_group_name = var.resource_group_name + deployment_mode = "Incremental" + parameters_content = jsonencode({ + "name" = { + value = "${replace(replace(each.value, ".json", ""), "arm/", "")}_${var.integration_runtime_short_name}" + } + "linkedServiceName" = { + value = var.mssql_sqlauth_linkedservice_name + } + "dataFactoryName" = { + value = var.data_factory_name + } + "integrationRuntimeName" = { + value = var.integration_runtime_name + } + }) + template_content = file("${path.module}/${each.value}") +} + +resource "azurerm_resource_group_template_deployment" "oracledb_dataset" { + for_each = { + for ir in fileset(path.module, "arm/GDS_OracleServerTable_NA.json"): + ir => ir + #if var.is_azure == false + } + name = "${replace(replace(each.value, ".json", ""), "arm/", "")}_${var.integration_runtime_short_name}_${var.name_suffix}" + resource_group_name = var.resource_group_name + deployment_mode = "Incremental" + parameters_content = jsonencode({ + "name" = { + value = "${replace(replace(each.value, ".json", ""), "arm/", "")}_${var.integration_runtime_short_name}" + } + "linkedServiceName" = { + value = var.oracledb_linkedservice_name + } + "dataFactoryName" = { + value = var.data_factory_name + } + "integrationRuntimeName" = { + value = var.integration_runtime_name + } + }) + template_content = file("${path.module}/${each.value}") +} + +resource "azurerm_resource_group_template_deployment" "file_dataset" { + for_each = { + for ir in fileset(path.module, "arm/GDS_File*.json"): + ir => ir + #if var.is_azure == false + } + name = "${replace(replace(each.value, ".json", ""), "arm/", "")}_${var.integration_runtime_short_name}_${var.name_suffix}" + resource_group_name = var.resource_group_name + deployment_mode = "Incremental" + parameters_content = jsonencode({ + "name" = { + value = "${replace(replace(each.value, ".json", ""), "arm/", "")}_${var.integration_runtime_short_name}" + } + "linkedServiceName" = { + value = var.fileserver_linkedservice_name + } + "dataFactoryName" = { + value = var.data_factory_name + } + "integrationRuntimeName" = { + value = var.integration_runtime_name + } + }) + template_content = file("${path.module}/${each.value}") +} + + +resource "azurerm_resource_group_template_deployment" "rest_anonymous_dataset" { + for_each = { + for ir in fileset(path.module, "arm/GDS_Rest_Anonymous.json"): + ir => ir + #if var.is_azure == false + } + name = "${replace(replace(each.value, ".json", ""), "arm/", "")}_${var.integration_runtime_short_name}_${var.name_suffix}" + resource_group_name = var.resource_group_name + deployment_mode = "Incremental" + parameters_content = jsonencode({ + "name" = { + value = "${replace(replace(each.value, ".json", ""), "arm/", "")}_${var.integration_runtime_short_name}" + } + "linkedServiceName" = { + value = var.rest_anonymous_linkedservice_name + } + "dataFactoryName" = { + value = var.data_factory_name + } + "integrationRuntimeName" = { + value = var.integration_runtime_name + } + }) + template_content = file("${path.module}/${each.value}") +} + +resource "azurerm_resource_group_template_deployment" "rest_basic_dataset" { + for_each = { + for ir in fileset(path.module, "arm/GDS_Rest_Basic.json"): + ir => ir + #if var.is_azure == false + } + name = "${replace(replace(each.value, ".json", ""), "arm/", "")}_${var.integration_runtime_short_name}_${var.name_suffix}" + resource_group_name = var.resource_group_name + deployment_mode = "Incremental" + parameters_content = jsonencode({ + "name" = { + value = "${replace(replace(each.value, ".json", ""), "arm/", "")}_${var.integration_runtime_short_name}" + } + "linkedServiceName" = { + value = var.rest_basic_linkedservice_name + } + "dataFactoryName" = { + value = var.data_factory_name + } + "integrationRuntimeName" = { + value = var.integration_runtime_name + } + }) + template_content = file("${path.module}/${each.value}") +} + +resource "azurerm_resource_group_template_deployment" "rest_serviceprincipal_dataset" { + for_each = { + for ir in fileset(path.module, "arm/GDS_Rest_ServicePrincipal.json"): + ir => ir + #if var.is_azure == false + } + name = "${replace(replace(each.value, ".json", ""), "arm/", "")}_${var.integration_runtime_short_name}_${var.name_suffix}" + resource_group_name = var.resource_group_name + deployment_mode = "Incremental" + parameters_content = jsonencode({ + "name" = { + value = "${replace(replace(each.value, ".json", ""), "arm/", "")}_${var.integration_runtime_short_name}" + } + "linkedServiceName" = { + value = var.rest_serviceprincipal_linkedservice_name + } + "dataFactoryName" = { + value = var.data_factory_name + } + "integrationRuntimeName" = { + value = var.integration_runtime_name + } + }) + template_content = file("${path.module}/${each.value}") +} + +resource "azurerm_resource_group_template_deployment" "rest_oauth2_dataset" { + for_each = { + for ir in fileset(path.module, "arm/GDS_Rest_OAuth2.json"): + ir => ir + #if var.is_azure == false + } + name = "${replace(replace(each.value, ".json", ""), "arm/", "")}_${var.integration_runtime_short_name}_${var.name_suffix}" + resource_group_name = var.resource_group_name + deployment_mode = "Incremental" + parameters_content = jsonencode({ + "name" = { + value = "${replace(replace(each.value, ".json", ""), "arm/", "")}_${var.integration_runtime_short_name}" + } + "linkedServiceName" = { + value = var.rest_oauth2_linkedservice_name + } + "dataFactoryName" = { + value = var.data_factory_name + } + "integrationRuntimeName" = { + value = var.integration_runtime_name + } + }) + template_content = file("${path.module}/${each.value}") +} \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/outputs.tf b/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/outputs.tf new file mode 100644 index 00000000..e69de29b diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/vars.tf b/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/vars.tf new file mode 100644 index 00000000..29e4880d --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_datasets/vars.tf @@ -0,0 +1,78 @@ +variable "resource_group_name" { + description = "The name of the resource group to deploy into" + type = string +} +variable "is_azure" { + description = "Is the integration runtime azure hosted?" + type = bool +} +variable "integration_runtime_name" { + description = "The name of the integration runtime" + type = string +} +variable "integration_runtime_short_name" { + description = "The name of the integration runtime" + type = string +} +variable "data_factory_name" { + description = "The name of the data factory" + type = string +} +variable "data_lake_linkedservice_name" { + description = "The name of the linked service for data lake" + type = string +} +variable "blob_linkedservice_name" { + description = "The name of the linked service for blob" + type = string +} +variable "azure_sql_linkedservice_name" { + description = "The name of the linked service for AZURE SQL server" + type = string +} + +variable "azure_synapse_linkedservice_name" { + description = "The name of the linked service for AZURE Synapse" + type = string +} + +variable "mssql_linkedservice_name" { + description = "The name of the linked service for SQL server" + type = string +} + +variable "mssql_sqlauth_linkedservice_name" { + description = "The name of the linked service for SQL server" + type = string +} + +variable "fileserver_linkedservice_name" { + description = "The name of the linked service for File Server" + type = string +} + +variable "rest_anonymous_linkedservice_name" { + description = "The name of the linked service for Rest Anonymous" + type = string +} + +variable "rest_basic_linkedservice_name" { + description = "The name of the linked service for Rest Basic" + type = string +} +variable "rest_serviceprincipal_linkedservice_name" { + description = "The name of the linked service for Rest Service Principal" + type = string +} +variable "rest_oauth2_linkedservice_name" { + description = "The name of the linked service for Rest OAuth2" + type = string +} +variable "oracledb_linkedservice_name" { + description = "The name of the linked service for Oracle DB" + type = string +} +variable "name_suffix" { + description = "Used to give resource group deployments unique names for an environment" + type = string +} \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobFS_Binary_AzureBlobFS_Binary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobFS_Binary_AzureBlobFS_Binary.json new file mode 100644 index 00000000..afd55f21 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobFS_Binary_AzureBlobFS_Binary.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Binary_AzureBlobFS_Binary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobFS_Binary_AzureBlobFS_Binary_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobFS_Binary_AzureBlobStorage_Binary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobFS_Binary_AzureBlobStorage_Binary.json new file mode 100644 index 00000000..843a0ddf --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobFS_Binary_AzureBlobStorage_Binary.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Binary_AzureBlobStorage_Binary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobFS_Binary_AzureBlobStorage_Binary_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobFS_DelimitedText_AzureBlobFS_Parquet.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobFS_DelimitedText_AzureBlobFS_Parquet.json new file mode 100644 index 00000000..fd537022 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobFS_DelimitedText_AzureBlobFS_Parquet.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_DelimitedText_AzureBlobFS_Parquet_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobFS_DelimitedText_AzureBlobFS_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobFS_DelimitedText_AzureBlobStorage_Parquet.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobFS_DelimitedText_AzureBlobStorage_Parquet.json new file mode 100644 index 00000000..f36fe9dc --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobFS_DelimitedText_AzureBlobStorage_Parquet.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_DelimitedText_AzureBlobStorage_Parquet_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobFS_DelimitedText_AzureBlobStorage_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobFS_DelimitedText_AzureSqlTable_NA.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobFS_DelimitedText_AzureSqlTable_NA.json new file mode 100644 index 00000000..33a0372a --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobFS_DelimitedText_AzureSqlTable_NA.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_DelimitedText_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobFS_DelimitedText_AzureSqlTable_NA_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobFS_Excel_AzureBlobFS_DelimitedText.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobFS_Excel_AzureBlobFS_DelimitedText.json new file mode 100644 index 00000000..aeed462e --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobFS_Excel_AzureBlobFS_DelimitedText.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Excel_AzureBlobFS_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobFS_Excel_AzureBlobFS_DelimitedText_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobFS_Excel_AzureBlobFS_Parquet.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobFS_Excel_AzureBlobFS_Parquet.json new file mode 100644 index 00000000..ccb0177c --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobFS_Excel_AzureBlobFS_Parquet.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Excel_AzureBlobFS_Parquet_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobFS_Excel_AzureBlobFS_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobFS_Excel_AzureBlobStorage_DelimitedText.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobFS_Excel_AzureBlobStorage_DelimitedText.json new file mode 100644 index 00000000..7c075177 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobFS_Excel_AzureBlobStorage_DelimitedText.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Excel_AzureBlobStorage_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobFS_Excel_AzureBlobStorage_DelimitedText_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobFS_Excel_AzureBlobStorage_Parquet.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobFS_Excel_AzureBlobStorage_Parquet.json new file mode 100644 index 00000000..797e5b41 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobFS_Excel_AzureBlobStorage_Parquet.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Excel_AzureBlobStorage_Parquet_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobFS_Excel_AzureBlobStorage_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobFS_Excel_AzureSqlTable_NA.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobFS_Excel_AzureSqlTable_NA.json new file mode 100644 index 00000000..3bdb5e4b --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobFS_Excel_AzureSqlTable_NA.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Excel_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobFS_Excel_AzureSqlTable_NA_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobFS_Json_AzureSqlTable_NA.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobFS_Json_AzureSqlTable_NA.json new file mode 100644 index 00000000..4e2b95c7 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobFS_Json_AzureSqlTable_NA.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Json_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobFS_Json_AzureSqlTable_NA_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobFS_Parquet_AzureBlobFS_DelimitedText.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobFS_Parquet_AzureBlobFS_DelimitedText.json new file mode 100644 index 00000000..2b6fcbd4 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobFS_Parquet_AzureBlobFS_DelimitedText.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Parquet_AzureBlobFS_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobFS_Parquet_AzureBlobFS_DelimitedText_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobFS_Parquet_AzureBlobStorage_DelimitedText.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobFS_Parquet_AzureBlobStorage_DelimitedText.json new file mode 100644 index 00000000..2119ff26 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobFS_Parquet_AzureBlobStorage_DelimitedText.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Parquet_AzureBlobStorage_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobFS_Parquet_AzureBlobStorage_DelimitedText_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobFS_Parquet_AzureSqlTable_NA.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobFS_Parquet_AzureSqlTable_NA.json new file mode 100644 index 00000000..a2fdce05 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobFS_Parquet_AzureSqlTable_NA.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Parquet_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobFS_Parquet_AzureSqlTable_NA_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobStorage_Binary_AzureBlobFS_Binary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobStorage_Binary_AzureBlobFS_Binary.json new file mode 100644 index 00000000..dff71093 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobStorage_Binary_AzureBlobFS_Binary.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Binary_AzureBlobFS_Binary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobStorage_Binary_AzureBlobFS_Binary_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobStorage_Binary_AzureBlobStorage_Binary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobStorage_Binary_AzureBlobStorage_Binary.json new file mode 100644 index 00000000..d320aea5 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobStorage_Binary_AzureBlobStorage_Binary.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Binary_AzureBlobStorage_Binary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobStorage_Binary_AzureBlobStorage_Binary_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobStorage_DelimitedText_AzureBlobFS_Parquet.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobStorage_DelimitedText_AzureBlobFS_Parquet.json new file mode 100644 index 00000000..0b77f6a1 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobStorage_DelimitedText_AzureBlobFS_Parquet.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_DelimitedText_AzureBlobFS_Parquet_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobStorage_DelimitedText_AzureBlobFS_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobStorage_DelimitedText_AzureBlobStorage_Parquet.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobStorage_DelimitedText_AzureBlobStorage_Parquet.json new file mode 100644 index 00000000..c828662b --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobStorage_DelimitedText_AzureBlobStorage_Parquet.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_DelimitedText_AzureBlobStorage_Parquet_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobStorage_DelimitedText_AzureBlobStorage_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobStorage_DelimitedText_AzureSqlTable_NA.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobStorage_DelimitedText_AzureSqlTable_NA.json new file mode 100644 index 00000000..c8babcaf --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobStorage_DelimitedText_AzureSqlTable_NA.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_DelimitedText_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobStorage_DelimitedText_AzureSqlTable_NA_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobStorage_Excel_AzureBlobFS_DelimitedText.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobStorage_Excel_AzureBlobFS_DelimitedText.json new file mode 100644 index 00000000..82f6a770 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobStorage_Excel_AzureBlobFS_DelimitedText.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Excel_AzureBlobFS_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobStorage_Excel_AzureBlobFS_DelimitedText_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobStorage_Excel_AzureBlobFS_Parquet.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobStorage_Excel_AzureBlobFS_Parquet.json new file mode 100644 index 00000000..e9d89967 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobStorage_Excel_AzureBlobFS_Parquet.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Excel_AzureBlobFS_Parquet_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobStorage_Excel_AzureBlobFS_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobStorage_Excel_AzureBlobStorage_DelimitedText.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobStorage_Excel_AzureBlobStorage_DelimitedText.json new file mode 100644 index 00000000..59170429 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobStorage_Excel_AzureBlobStorage_DelimitedText.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Excel_AzureBlobStorage_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobStorage_Excel_AzureBlobStorage_DelimitedText_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobStorage_Excel_AzureBlobStorage_Parquet.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobStorage_Excel_AzureBlobStorage_Parquet.json new file mode 100644 index 00000000..50677e09 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobStorage_Excel_AzureBlobStorage_Parquet.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Excel_AzureBlobStorage_Parquet_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobStorage_Excel_AzureBlobStorage_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobStorage_Excel_AzureSqlTable_NA.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobStorage_Excel_AzureSqlTable_NA.json new file mode 100644 index 00000000..1a439be0 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobStorage_Excel_AzureSqlTable_NA.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Excel_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobStorage_Excel_AzureSqlTable_NA_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobStorage_Json_AzureSqlTable_NA.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobStorage_Json_AzureSqlTable_NA.json new file mode 100644 index 00000000..e8ee0d9f --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobStorage_Json_AzureSqlTable_NA.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Json_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobStorage_Json_AzureSqlTable_NA_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobStorage_Parquet_AzureBlobFS_DelimitedText.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobStorage_Parquet_AzureBlobFS_DelimitedText.json new file mode 100644 index 00000000..154c39d5 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobStorage_Parquet_AzureBlobFS_DelimitedText.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Parquet_AzureBlobFS_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobStorage_Parquet_AzureBlobFS_DelimitedText_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobStorage_Parquet_AzureBlobStorage_DelimitedText.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobStorage_Parquet_AzureBlobStorage_DelimitedText.json new file mode 100644 index 00000000..a3c684f0 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobStorage_Parquet_AzureBlobStorage_DelimitedText.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Parquet_AzureBlobStorage_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobStorage_Parquet_AzureBlobStorage_DelimitedText_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobStorage_Parquet_AzureSqlTable_NA.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobStorage_Parquet_AzureSqlTable_NA.json new file mode 100644 index 00000000..91f65897 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureBlobStorage_Parquet_AzureSqlTable_NA.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Parquet_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobStorage_Parquet_AzureSqlTable_NA_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureSqlTable_NA_AzureBlobFS_Parquet.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureSqlTable_NA_AzureBlobFS_Parquet.json new file mode 100644 index 00000000..8760c73b --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureSqlTable_NA_AzureBlobFS_Parquet.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureSqlTable_NA_AzureBlobFS_Parquet_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_AzureBlobFS_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureSqlTable_NA_AzureBlobStorage_Parquet.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureSqlTable_NA_AzureBlobStorage_Parquet.json new file mode 100644 index 00000000..be74a9e4 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_AzureSqlTable_NA_AzureBlobStorage_Parquet.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureSqlTable_NA_AzureBlobStorage_Parquet_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_AzureBlobStorage_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_SqlServerTable_NA_AzureBlobFS_Parquet.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_SqlServerTable_NA_AzureBlobFS_Parquet.json new file mode 100644 index 00000000..d0a6d554 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_SqlServerTable_NA_AzureBlobFS_Parquet.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_SqlServerTable_NA_AzureBlobFS_Parquet_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_SqlServerTable_NA_AzureBlobFS_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_SqlServerTable_NA_AzureBlobStorage_Parquet.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_SqlServerTable_NA_AzureBlobStorage_Parquet.json new file mode 100644 index 00000000..a6daa874 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL-1_SqlServerTable_NA_AzureBlobStorage_Parquet.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_SqlServerTable_NA_AzureBlobStorage_Parquet_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_SqlServerTable_NA_AzureBlobStorage_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL0_AzureSqlTable_NA_AzureBlobFS_Parquet_Full_Load.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL0_AzureSqlTable_NA_AzureBlobFS_Parquet_Full_Load.json new file mode 100644 index 00000000..f6953cc1 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL0_AzureSqlTable_NA_AzureBlobFS_Parquet_Full_Load.json @@ -0,0 +1,378 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureSqlTable_NA_AzureBlobFS_Parquet_Full_Load_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "Pipeline AF Log - Copy Start", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Set SQLStatement", + "type": "SetVariable", + "typeProperties": { + "value": { + "type": "Expression", + "value": "@replace(replace(pipeline().parameters.TaskObject.Source.SQLStatement,'',string(pipeline().parameters.BatchCount)),'',string(pipeline().parameters.Item))" + }, + "variableName": "SQLStatement" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Set SQLStatement", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Database" + }, + "Schema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableSchema" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + }, + "Table": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableName" + } + }, + "referenceName": "[concat('GDS_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy SQL to Storage", + "outputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@if(equals(pipeline().parameters.TaskObject.Source.ChunkSize,0),\n pipeline().parameters.TaskObject.Target.DataFileName,\n replace(\n pipeline().parameters.TaskObject.Target.DataFileName,\n '.parquet',\n concat('.chunk_', string(pipeline().parameters.Item),'.parquet')\n )\n)" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.Instance.TargetRelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "storeSettings": { + "type": "AzureBlobFSWriteSettings" + }, + "type": "ParquetSink" + }, + "source": { + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@variables('SQLStatement')" + }, + "type": "AzureSqlSource" + }, + "translator": { + "type": "Expression", + "value": "@pipeline().parameters.Mapping" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy SQL to Storage", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Copy Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy SQL to Storage\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy SQL to Storage').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Get Parquet Metadata", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Persist Parquet Metadata", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(\n concat('{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"StorageAccountName\":\"', \n string(pipeline().parameters.TaskObject.Target.System.SystemServer), \n '\",\"StorageAccountContainer\":\"', \n string(pipeline().parameters.TaskObject.Target.System.Container), \n '\",\"RelativePath\":\"', \n string(pipeline().parameters.TaskObject.Target.Instance.TargetRelativePath), \n '\",\"SchemaFileName\":\"', \n string(pipeline().parameters.TaskObject.Target.SchemaFileName), \n '\",\"SourceType\":\"', \n string(pipeline().parameters.TaskObject.Source.System.Type), \n '\",\"TargetType\":\"', \n if(\n contains(\n string(pipeline().parameters.TaskObject.Target.System.SystemServer),\n '.dfs.core.windows.net'\n ),\n 'ADLS',\n 'Azure Blob'), \n '\",\"Data\":',\n string(activity('Get Parquet Metadata').output),\n ',\"MetadataType\":\"Parquet\"}')\n)" + }, + "functionName": "TaskExecutionSchemaFile", + "method": "POST" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy SQL to Storage", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Get Parquet Metadata", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "GetMetadata", + "typeProperties": { + "dataset": { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@if(equals(pipeline().parameters.TaskObject.Source.ChunkSize,0),\n pipeline().parameters.TaskObject.Target.DataFileName,\n replace(\n pipeline().parameters.TaskObject.Target.DataFileName,\n '.parquet',\n concat('.chunk_', string(pipeline().parameters.Item),'.parquet')\n )\n)" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.Instance.TargetRelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "fieldList": [ + "structure" + ], + "storeSettings": { + "recursive": true, + "type": "AzureBlobFSReadSettings" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Pipeline AF Log - Copy Start", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy SQL to Storage\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy SQL to Storage", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Copy Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy SQL to Storage\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy SQL to Storage').output.rowsCopied), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'), '/Components')]" + }, + "parameters": { + "BatchCount": { + "type": "int" + }, + "Item": { + "type": "int" + }, + "Mapping": { + "type": "object" + }, + "TaskObject": { + "defaultValue": { + "DataFactory": { + "ADFPipeline": "AZ_SQL_AZ_Storage_Parquet_Azure", + "Id": 1, + "Name": "adsgofastdatakakeacceladf", + "ResourceGroup": "AdsGoFastDataLakeAccel", + "SubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3" + }, + "DegreeOfCopyParallelism": 1, + "Enabled": 1, + "ExecutionUid": "2c5924ee-b855-4d2b-bb7e-4f5dde4c4dd3", + "KeyVaultBaseUrl": "https://adsgofastkeyvault.vault.azure.net/", + "NumberOfRetries": 111, + "ScheduleMasterId": 2, + "Source": { + "Database": { + "AuthenticationType": "MSI", + "Name": "AWSample", + "SystemName": "adsgofastdatakakeaccelsqlsvr.database.windows.net" + }, + "Extraction": { + "FullOrIncremental": "Full", + "IncrementalType": null, + "TableName": "SalesOrderHeader", + "TableSchema": "SalesLT", + "Type": "Table" + }, + "Type": "Azure SQL" + }, + "Target": { + "DataFileName": "SalesLT.SalesOrderHeader.parquet", + "FirstRowAsHeader": null, + "MaxConcorrentConnections": null, + "RelativePath": "/AwSample/SalesLT/SalesOrderHeader/2020/7/9/14/12/", + "SchemaFileName": "SalesLT.SalesOrderHeader", + "SheetName": null, + "SkipLineCount": null, + "StorageAccountAccessMethod": "MSI", + "StorageAccountContainer": "datalakeraw", + "StorageAccountName": "https://adsgofastdatalakeaccelst.blob.core.windows.net", + "Type": "Azure Blob" + }, + "TaskGroupConcurrency": 10, + "TaskGroupPriority": 0, + "TaskInstanceId": 75, + "TaskMasterId": 12, + "TaskStatus": "Untried", + "TaskType": "SQL Database to Azure Storage" + }, + "type": "object" + } + }, + "variables": { + "SQLStatement": { + "type": "String" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL0_AzureSqlTable_NA_AzureBlobFS_Parquet_Watermark.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL0_AzureSqlTable_NA_AzureBlobFS_Parquet_Watermark.json new file mode 100644 index 00000000..b4b84ca9 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL0_AzureSqlTable_NA_AzureBlobFS_Parquet_Watermark.json @@ -0,0 +1,381 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureSqlTable_NA_AzureBlobFS_Parquet_Watermark_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "Pipeline AF Log - Copy Start", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Set SQLStatement", + "type": "SetVariable", + "typeProperties": { + "value": { + "type": "Expression", + "value": "@replace(\n replace(\n replace(\n pipeline().parameters.TaskObject.Source.SQLStatement,\n '',\n string(pipeline().parameters.BatchCount)\n ),\n '',string(pipeline().parameters.Item)),\n '',string(pipeline().parameters.NewWaterMark)\n)" + }, + "variableName": "SQLStatement" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Set SQLStatement", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Database" + }, + "Schema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableSchema" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + }, + "Table": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableName" + } + }, + "referenceName": "[concat('GDS_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy SQL to Storage", + "outputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@if(equals(pipeline().parameters.TaskObject.Source.ChunkSize,0),\n pipeline().parameters.TaskObject.Target.DataFileName,\n replace(\n pipeline().parameters.TaskObject.Target.DataFileName,\n '.parquet',\n concat('.chunk_', string(pipeline().parameters.Item),'.parquet')\n )\n)" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.Instance.TargetRelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "storeSettings": { + "type": "AzureBlobFSWriteSettings" + }, + "type": "ParquetSink" + }, + "source": { + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@variables('SQLStatement')" + }, + "type": "AzureSqlSource" + }, + "translator": { + "type": "Expression", + "value": "@pipeline().parameters.Mapping" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy SQL to Storage", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Copy Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy SQL to Storage\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy SQL to Storage').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Get Parquet Metadata", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Persist Parquet Metadata", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"StorageAccountName\":\"', \n string(pipeline().parameters.TaskObject.Target.System.SystemServer), \n '\",\"StorageAccountContainer\":\"', \n string(pipeline().parameters.TaskObject.Target.System.Container), \n '\",\"RelativePath\":\"', \n string(pipeline().parameters.TaskObject.Target.Instance.TargetRelativePath), \n '\",\"SchemaFileName\":\"', \n string(pipeline().parameters.TaskObject.Target.SchemaFileName), \n '\",\"SourceType\":\"', \n string(pipeline().parameters.TaskObject.Source.System.Type), \n '\",\"TargetType\":\"', \n if(\n contains(\n string(pipeline().parameters.TaskObject.Target.System.SystemServer),\n '.dfs.core.windows.net'\n ),\n 'ADLS',\n 'Azure Blob'), \n '\",\"Data\":',\n string(activity('Get Parquet Metadata').output),\n ',\"MetadataType\":\"Parquet\"}')\n)" + }, + "functionName": "TaskExecutionSchemaFile", + "method": "POST" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy SQL to Storage", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Get Parquet Metadata", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "GetMetadata", + "typeProperties": { + "dataset": { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@if(equals(pipeline().parameters.TaskObject.Source.ChunkSize,0),\n pipeline().parameters.TaskObject.Target.DataFileName,\n replace(\n pipeline().parameters.TaskObject.Target.DataFileName,\n '.parquet',\n concat('.chunk_', string(pipeline().parameters.Item),'.parquet')\n )\n)" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.Instance.TargetRelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "fieldList": [ + "structure" + ], + "storeSettings": { + "recursive": true, + "type": "AzureBlobFSReadSettings" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Pipeline AF Log - Copy Start", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy SQL to Storage\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy SQL to Storage", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Copy Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy SQL to Storage\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy SQL to Storage').output.rowsCopied), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'), '/Components')]" + }, + "parameters": { + "BatchCount": { + "type": "int" + }, + "Item": { + "type": "int" + }, + "Mapping": { + "type": "object" + }, + "NewWaterMark": { + "type": "string" + }, + "TaskObject": { + "defaultValue": { + "DataFactory": { + "ADFPipeline": "AZ_SQL_AZ_Storage_Parquet_Azure", + "Id": 1, + "Name": "adsgofastdatakakeacceladf", + "ResourceGroup": "AdsGoFastDataLakeAccel", + "SubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3" + }, + "DegreeOfCopyParallelism": 1, + "Enabled": 1, + "ExecutionUid": "2c5924ee-b855-4d2b-bb7e-4f5dde4c4dd3", + "KeyVaultBaseUrl": "https://adsgofastkeyvault.vault.azure.net/", + "NumberOfRetries": 111, + "ScheduleMasterId": 2, + "Source": { + "Database": { + "AuthenticationType": "MSI", + "Name": "AWSample", + "SystemName": "adsgofastdatakakeaccelsqlsvr.database.windows.net" + }, + "Extraction": { + "FullOrIncremental": "Full", + "IncrementalType": null, + "TableName": "SalesOrderHeader", + "TableSchema": "SalesLT", + "Type": "Table" + }, + "Type": "Azure SQL" + }, + "Target": { + "DataFileName": "SalesLT.SalesOrderHeader.parquet", + "FirstRowAsHeader": null, + "MaxConcorrentConnections": null, + "RelativePath": "/AwSample/SalesLT/SalesOrderHeader/2020/7/9/14/12/", + "SchemaFileName": "SalesLT.SalesOrderHeader", + "SheetName": null, + "SkipLineCount": null, + "StorageAccountAccessMethod": "MSI", + "StorageAccountContainer": "datalakeraw", + "StorageAccountName": "https://adsgofastdatalakeaccelst.blob.core.windows.net", + "Type": "Azure Blob" + }, + "TaskGroupConcurrency": 10, + "TaskGroupPriority": 0, + "TaskInstanceId": 75, + "TaskMasterId": 12, + "TaskStatus": "Untried", + "TaskType": "SQL Database to Azure Storage" + }, + "type": "object" + } + }, + "variables": { + "SQLStatement": { + "type": "String" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL0_AzureSqlTable_NA_AzureBlobStorage_Parquet_Full_Load.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL0_AzureSqlTable_NA_AzureBlobStorage_Parquet_Full_Load.json new file mode 100644 index 00000000..6737eecd --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL0_AzureSqlTable_NA_AzureBlobStorage_Parquet_Full_Load.json @@ -0,0 +1,378 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureSqlTable_NA_AzureBlobStorage_Parquet_Full_Load_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "Pipeline AF Log - Copy Start", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Set SQLStatement", + "type": "SetVariable", + "typeProperties": { + "value": { + "type": "Expression", + "value": "@replace(replace(pipeline().parameters.TaskObject.Source.SQLStatement,'',string(pipeline().parameters.BatchCount)),'',string(pipeline().parameters.Item))" + }, + "variableName": "SQLStatement" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Set SQLStatement", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Database" + }, + "Schema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableSchema" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + }, + "Table": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableName" + } + }, + "referenceName": "[concat('GDS_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy SQL to Storage", + "outputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@if(equals(pipeline().parameters.TaskObject.Source.ChunkSize,0),\n pipeline().parameters.TaskObject.Target.DataFileName,\n replace(\n pipeline().parameters.TaskObject.Target.DataFileName,\n '.parquet',\n concat('.chunk_', string(pipeline().parameters.Item),'.parquet')\n )\n)" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.Instance.TargetRelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "storeSettings": { + "type": "AzureBlobStorageWriteSettings" + }, + "type": "ParquetSink" + }, + "source": { + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@variables('SQLStatement')" + }, + "type": "AzureSqlSource" + }, + "translator": { + "type": "Expression", + "value": "@pipeline().parameters.Mapping" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy SQL to Storage", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Copy Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy SQL to Storage\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy SQL to Storage').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Get Parquet Metadata", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Persist Parquet Metadata", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(\n concat('{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"StorageAccountName\":\"', \n string(pipeline().parameters.TaskObject.Target.System.SystemServer), \n '\",\"StorageAccountContainer\":\"', \n string(pipeline().parameters.TaskObject.Target.System.Container), \n '\",\"RelativePath\":\"', \n string(pipeline().parameters.TaskObject.Target.Instance.TargetRelativePath), \n '\",\"SchemaFileName\":\"', \n string(pipeline().parameters.TaskObject.Target.SchemaFileName), \n '\",\"SourceType\":\"', \n string(pipeline().parameters.TaskObject.Source.System.Type), \n '\",\"TargetType\":\"', \n if(\n contains(\n string(pipeline().parameters.TaskObject.Target.System.SystemServer),\n '.dfs.core.windows.net'\n ),\n 'ADLS',\n 'Azure Blob'), \n '\",\"Data\":',\n string(activity('Get Parquet Metadata').output),\n ',\"MetadataType\":\"Parquet\"}')\n)" + }, + "functionName": "TaskExecutionSchemaFile", + "method": "POST" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy SQL to Storage", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Get Parquet Metadata", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "GetMetadata", + "typeProperties": { + "dataset": { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@if(equals(pipeline().parameters.TaskObject.Source.ChunkSize,0),\n pipeline().parameters.TaskObject.Target.DataFileName,\n replace(\n pipeline().parameters.TaskObject.Target.DataFileName,\n '.parquet',\n concat('.chunk_', string(pipeline().parameters.Item),'.parquet')\n )\n)" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.Instance.TargetRelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "fieldList": [ + "structure" + ], + "storeSettings": { + "recursive": true, + "type": "AzureBlobFSReadSettings" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Pipeline AF Log - Copy Start", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy SQL to Storage\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy SQL to Storage", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Copy Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy SQL to Storage\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy SQL to Storage').output.rowsCopied), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'), '/Components')]" + }, + "parameters": { + "BatchCount": { + "type": "int" + }, + "Item": { + "type": "int" + }, + "Mapping": { + "type": "object" + }, + "TaskObject": { + "defaultValue": { + "DataFactory": { + "ADFPipeline": "AZ_SQL_AZ_Storage_Parquet_Azure", + "Id": 1, + "Name": "adsgofastdatakakeacceladf", + "ResourceGroup": "AdsGoFastDataLakeAccel", + "SubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3" + }, + "DegreeOfCopyParallelism": 1, + "Enabled": 1, + "ExecutionUid": "2c5924ee-b855-4d2b-bb7e-4f5dde4c4dd3", + "KeyVaultBaseUrl": "https://adsgofastkeyvault.vault.azure.net/", + "NumberOfRetries": 111, + "ScheduleMasterId": 2, + "Source": { + "Database": { + "AuthenticationType": "MSI", + "Name": "AWSample", + "SystemName": "adsgofastdatakakeaccelsqlsvr.database.windows.net" + }, + "Extraction": { + "FullOrIncremental": "Full", + "IncrementalType": null, + "TableName": "SalesOrderHeader", + "TableSchema": "SalesLT", + "Type": "Table" + }, + "Type": "Azure SQL" + }, + "Target": { + "DataFileName": "SalesLT.SalesOrderHeader.parquet", + "FirstRowAsHeader": null, + "MaxConcorrentConnections": null, + "RelativePath": "/AwSample/SalesLT/SalesOrderHeader/2020/7/9/14/12/", + "SchemaFileName": "SalesLT.SalesOrderHeader", + "SheetName": null, + "SkipLineCount": null, + "StorageAccountAccessMethod": "MSI", + "StorageAccountContainer": "datalakeraw", + "StorageAccountName": "https://adsgofastdatalakeaccelst.blob.core.windows.net", + "Type": "Azure Blob" + }, + "TaskGroupConcurrency": 10, + "TaskGroupPriority": 0, + "TaskInstanceId": 75, + "TaskMasterId": 12, + "TaskStatus": "Untried", + "TaskType": "SQL Database to Azure Storage" + }, + "type": "object" + } + }, + "variables": { + "SQLStatement": { + "type": "String" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL0_AzureSqlTable_NA_AzureBlobStorage_Parquet_Watermark.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL0_AzureSqlTable_NA_AzureBlobStorage_Parquet_Watermark.json new file mode 100644 index 00000000..778457dd --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL0_AzureSqlTable_NA_AzureBlobStorage_Parquet_Watermark.json @@ -0,0 +1,381 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureSqlTable_NA_AzureBlobStorage_Parquet_Watermark_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "Pipeline AF Log - Copy Start", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Set SQLStatement", + "type": "SetVariable", + "typeProperties": { + "value": { + "type": "Expression", + "value": "@replace(\n replace(\n replace(\n pipeline().parameters.TaskObject.Source.SQLStatement,\n '',\n string(pipeline().parameters.BatchCount)\n ),\n '',string(pipeline().parameters.Item)),\n '',string(pipeline().parameters.NewWaterMark)\n)" + }, + "variableName": "SQLStatement" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Set SQLStatement", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Database" + }, + "Schema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableSchema" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + }, + "Table": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableName" + } + }, + "referenceName": "[concat('GDS_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy SQL to Storage", + "outputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@if(equals(pipeline().parameters.TaskObject.Source.ChunkSize,0),\n pipeline().parameters.TaskObject.Target.DataFileName,\n replace(\n pipeline().parameters.TaskObject.Target.DataFileName,\n '.parquet',\n concat('.chunk_', string(pipeline().parameters.Item),'.parquet')\n )\n)" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.Instance.TargetRelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "storeSettings": { + "type": "AzureBlobStorageWriteSettings" + }, + "type": "ParquetSink" + }, + "source": { + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@variables('SQLStatement')" + }, + "type": "AzureSqlSource" + }, + "translator": { + "type": "Expression", + "value": "@pipeline().parameters.Mapping" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy SQL to Storage", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Copy Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy SQL to Storage\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy SQL to Storage').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Get Parquet Metadata", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Persist Parquet Metadata", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"StorageAccountName\":\"', \n string(pipeline().parameters.TaskObject.Target.System.SystemServer), \n '\",\"StorageAccountContainer\":\"', \n string(pipeline().parameters.TaskObject.Target.System.Container), \n '\",\"RelativePath\":\"', \n string(pipeline().parameters.TaskObject.Target.Instance.TargetRelativePath), \n '\",\"SchemaFileName\":\"', \n string(pipeline().parameters.TaskObject.Target.SchemaFileName), \n '\",\"SourceType\":\"', \n string(pipeline().parameters.TaskObject.Source.System.Type), \n '\",\"TargetType\":\"', \n if(\n contains(\n string(pipeline().parameters.TaskObject.Target.System.SystemServer),\n '.dfs.core.windows.net'\n ),\n 'ADLS',\n 'Azure Blob'), \n '\",\"Data\":',\n string(activity('Get Parquet Metadata').output),\n ',\"MetadataType\":\"Parquet\"}')\n)" + }, + "functionName": "TaskExecutionSchemaFile", + "method": "POST" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy SQL to Storage", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Get Parquet Metadata", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "GetMetadata", + "typeProperties": { + "dataset": { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@if(equals(pipeline().parameters.TaskObject.Source.ChunkSize,0),\n pipeline().parameters.TaskObject.Target.DataFileName,\n replace(\n pipeline().parameters.TaskObject.Target.DataFileName,\n '.parquet',\n concat('.chunk_', string(pipeline().parameters.Item),'.parquet')\n )\n)" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.Instance.TargetRelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "fieldList": [ + "structure" + ], + "storeSettings": { + "recursive": true, + "type": "AzureBlobFSReadSettings" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Pipeline AF Log - Copy Start", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy SQL to Storage\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy SQL to Storage", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Copy Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy SQL to Storage\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy SQL to Storage').output.rowsCopied), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'), '/Components')]" + }, + "parameters": { + "BatchCount": { + "type": "int" + }, + "Item": { + "type": "int" + }, + "Mapping": { + "type": "object" + }, + "NewWaterMark": { + "type": "string" + }, + "TaskObject": { + "defaultValue": { + "DataFactory": { + "ADFPipeline": "AZ_SQL_AZ_Storage_Parquet_Azure", + "Id": 1, + "Name": "adsgofastdatakakeacceladf", + "ResourceGroup": "AdsGoFastDataLakeAccel", + "SubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3" + }, + "DegreeOfCopyParallelism": 1, + "Enabled": 1, + "ExecutionUid": "2c5924ee-b855-4d2b-bb7e-4f5dde4c4dd3", + "KeyVaultBaseUrl": "https://adsgofastkeyvault.vault.azure.net/", + "NumberOfRetries": 111, + "ScheduleMasterId": 2, + "Source": { + "Database": { + "AuthenticationType": "MSI", + "Name": "AWSample", + "SystemName": "adsgofastdatakakeaccelsqlsvr.database.windows.net" + }, + "Extraction": { + "FullOrIncremental": "Full", + "IncrementalType": null, + "TableName": "SalesOrderHeader", + "TableSchema": "SalesLT", + "Type": "Table" + }, + "Type": "Azure SQL" + }, + "Target": { + "DataFileName": "SalesLT.SalesOrderHeader.parquet", + "FirstRowAsHeader": null, + "MaxConcorrentConnections": null, + "RelativePath": "/AwSample/SalesLT/SalesOrderHeader/2020/7/9/14/12/", + "SchemaFileName": "SalesLT.SalesOrderHeader", + "SheetName": null, + "SkipLineCount": null, + "StorageAccountAccessMethod": "MSI", + "StorageAccountContainer": "datalakeraw", + "StorageAccountName": "https://adsgofastdatalakeaccelst.blob.core.windows.net", + "Type": "Azure Blob" + }, + "TaskGroupConcurrency": 10, + "TaskGroupPriority": 0, + "TaskInstanceId": 75, + "TaskMasterId": 12, + "TaskStatus": "Untried", + "TaskType": "SQL Database to Azure Storage" + }, + "type": "object" + } + }, + "variables": { + "SQLStatement": { + "type": "String" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL0_AzureSqlTable_NA_Create_Table.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL0_AzureSqlTable_NA_Create_Table.json new file mode 100644 index 00000000..1038fcd7 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL0_AzureSqlTable_NA_Create_Table.json @@ -0,0 +1,285 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureSqlTable_NA_Create_Table_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "If exist Staging TableName", + "type": "IfCondition", + "typeProperties": { + "expression": { + "type": "Expression", + "value": "@not(empty(pipeline().parameters.TaskObject.Target.TableName))" + }, + "ifTrueActivities": [ + { + "dependsOn": [ ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Get SQL Create Statement Staging", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"TableSchema\":\"',string(pipeline().parameters.TaskObject.Target.TableSchema), '\",\"TableName\":\"', string(pipeline().parameters.TaskObject.Target.TableName),'\",\"StorageAccountName\":\"', string(pipeline().parameters.TaskObject.Source.System.SystemServer), '\",\"StorageAccountContainer\":\"', string(pipeline().parameters.TaskObject.Source.System.Container), '\",\"RelativePath\":\"', string(pipeline().parameters.TaskObject.Source.Instance.SourceRelativePath), '\",\"SchemaFileName\":\"', string(pipeline().parameters.TaskObject.Source.SchemaFileName), '\"}'))" + }, + "functionName": "GetSQLCreateStatementFromSchema", + "method": "POST" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "AF Get SQL Create Statement Staging", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Lookup Create Staging Table", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Lookup", + "typeProperties": { + "dataset": { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Database" + }, + "Schema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.StagingTableSchema" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + }, + "Table": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.StagingTableName" + } + }, + "referenceName": "[concat('GDS_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "firstRowOnly": false, + "source": { + "partitionOption": "None", + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@activity('AF Get SQL Create Statement Staging').output.CreateStatement" + }, + "type": "AzureSqlSource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Lookup Create Staging Table", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "AF Log - Create Staging Table Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Create Staging Table\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Lookup Create Staging Table').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ] + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "If exist Target TableName", + "type": "IfCondition", + "typeProperties": { + "expression": { + "type": "Expression", + "value": "@not(empty(pipeline().parameters.TaskObject.Target.StagingTableName))" + }, + "ifTrueActivities": [ + { + "dependsOn": [ ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Get SQL Create Statement Target", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"TableSchema\":\"',string(pipeline().parameters.TaskObject.Target.StagingTableSchema), '\",\"TableName\":\"', string(pipeline().parameters.TaskObject.Target.StagingTableName),'\",\"StorageAccountName\":\"', string(pipeline().parameters.TaskObject.Source.System.SystemServer), '\",\"StorageAccountContainer\":\"', string(pipeline().parameters.TaskObject.Source.System.Container), '\",\"RelativePath\":\"', string(pipeline().parameters.TaskObject.Source.Instance.SourceRelativePath), '\",\"SchemaFileName\":\"', string(pipeline().parameters.TaskObject.Source.SchemaFileName), '\",\"DropIfExist\":\"True\"}'))" + }, + "functionName": "GetSQLCreateStatementFromSchema", + "method": "POST" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "AF Get SQL Create Statement Target", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Lookup Create Target Table", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Lookup", + "typeProperties": { + "dataset": { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Database" + }, + "Schema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.TableSchema" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + }, + "Table": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.TableName" + } + }, + "referenceName": "[concat('GDS_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "firstRowOnly": false, + "source": { + "partitionOption": "None", + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@activity('AF Get SQL Create Statement Target').output.CreateStatement" + }, + "type": "AzureSqlSource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Lookup Create Target Table", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "AF Log - Create Target Table Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Create Target Table\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Lookup Create Target Table').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ] + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-04T13:09:30Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL0_AzureSqlTable_NA_Post_Copy.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL0_AzureSqlTable_NA_Post_Copy.json new file mode 100644 index 00000000..8753f739 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL0_AzureSqlTable_NA_Post_Copy.json @@ -0,0 +1,582 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureSqlTable_NA_Post_Copy_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "If Exist PostCopySQL", + "type": "IfCondition", + "typeProperties": { + "expression": { + "type": "Expression", + "value": "@not(empty(pipeline().parameters.TaskObject.Target.PostCopySQL))" + }, + "ifTrueActivities": [ + { + "dependsOn": [ ], + "name": "Run PostCopySQL", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Lookup", + "typeProperties": { + "dataset": { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Database" + }, + "Schema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.TableSchema" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + }, + "Table": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.TableName" + } + }, + "referenceName": "[concat('GDS_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "source": { + "partitionOption": "None", + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.PostCopySQL" + }, + "type": "AzureSqlSource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Run PostCopySQL", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "AF Log - Run PostCopySQL Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Run PostCopySQL\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Run PostCopySQL').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ] + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "If Exist PostCopySQL", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "If Exist MergeSQL", + "type": "IfCondition", + "typeProperties": { + "expression": { + "type": "Expression", + "value": "@not(empty(pipeline().parameters.TaskObject.Target.MergeSQL))" + }, + "ifTrueActivities": [ + { + "dependsOn": [ ], + "name": "Run MergeSQL", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Lookup", + "typeProperties": { + "dataset": { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Database" + }, + "Schema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.TableSchema" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + }, + "Table": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.TableName" + } + }, + "referenceName": "[concat('GDS_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "firstRowOnly": true, + "source": { + "partitionOption": "None", + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.MergeSQL" + }, + "type": "AzureSqlSource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Run MergeSQL", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "AF Log - Run MergeSQL Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Run MergeSQL\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Run MergeSQL').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ] + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "If Exist PostCopySQL", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "If AutoGenerateMerge", + "type": "IfCondition", + "typeProperties": { + "expression": { + "type": "Expression", + "value": "@bool(pipeline().parameters.TaskObject.Target.AutoGenerateMerge)" + }, + "ifTrueActivities": [ + { + "dependsOn": [ + { + "activity": "AF Get Merge Statement", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Run MergeStatement", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Lookup", + "typeProperties": { + "dataset": { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Database" + }, + "Schema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.TableSchema" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + }, + "Table": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.TableName" + } + }, + "referenceName": "[concat('GDS_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "firstRowOnly": false, + "source": { + "partitionOption": "None", + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@activity('AF Get Merge Statement').output.MergeStatement" + }, + "type": "AzureSqlSource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Get Information Schema SQL Stage", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"TableSchema\":\"', string(pipeline().parameters.TaskObject.Target.StagingTableSchema), '\",\"TableName\":\"', string(pipeline().parameters.TaskObject.Target.StagingTableName),'\"}'))" + }, + "functionName": "GetInformationSchemaSQL", + "method": "POST" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Lookup Get Metadata Stage", + "dependencyConditions": [ + "Succeeded" + ] + }, + { + "activity": "Lookup Get Metadata Target", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Get Merge Statement", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId),'\",\"TargetTableSchema\":\"',string(pipeline().parameters.TaskObject.Target.TableSchema),'\",\"TargetTableName\":\"',string(pipeline().parameters.TaskObject.Target.TableName),'\",\"StagingTableSchema\":\"',string(pipeline().parameters.TaskObject.Target.StagingTableSchema),'\",\"StagingTableName\":\"',string(pipeline().parameters.TaskObject.Target.StagingTableName),'\",\"Stage\":', string(activity('Lookup Get Metadata Stage').output.value), ',\"Target\":', string(activity('Lookup Get Metadata Target').output.value),'}'))" + }, + "functionName": "GetSQLMergeStatement", + "method": "POST" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "AF Get Information Schema SQL Stage", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Lookup Get Metadata Stage", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Lookup", + "typeProperties": { + "dataset": { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Database" + }, + "Schema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.StagingTableSchema" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + }, + "Table": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.StagingTableName" + } + }, + "referenceName": "[concat('GDS_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "firstRowOnly": false, + "source": { + "partitionOption": "None", + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@activity('AF Get Information Schema SQL Stage').output.InformationSchemaSQL" + }, + "type": "AzureSqlSource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "AF Get Information Schema SQL Target", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Lookup Get Metadata Target", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Lookup", + "typeProperties": { + "dataset": { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Database" + }, + "Schema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.TableSchema" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + }, + "Table": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.TableName" + } + }, + "referenceName": "[concat('GDS_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "firstRowOnly": false, + "source": { + "partitionOption": "None", + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@activity('AF Get Information Schema SQL Target').output.InformationSchemaSQL" + }, + "type": "AzureSqlSource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Get Information Schema SQL Target", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"TableSchema\":\"', string(pipeline().parameters.TaskObject.Target.TableSchema), '\",\"TableName\":\"', string(pipeline().parameters.TaskObject.Target.TableName),'\"}'))" + }, + "functionName": "GetInformationSchemaSQL", + "method": "POST" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Run MergeStatement", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "AF Log - Run AutoMerge Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Run AutoMerge\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Run MergeStatement').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Lookup Get Metadata Target", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "AF Log - Run Lookup Get Metadata Target Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Lookup Get Metadata Target\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Lookup Get Metadata Target').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Lookup Get Metadata Stage", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "AF Log - Lookup Get Metadata Stage Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Lookup Get Metadata Stage\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Lookup Get Metadata Stage').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ] + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'), '/Common')]" + }, + "lastPublishTime": "2020-08-04T13:09:30Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL0_SqlServerTable_NA_AzureBlobFS_Parquet_Full_Load.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL0_SqlServerTable_NA_AzureBlobFS_Parquet_Full_Load.json new file mode 100644 index 00000000..ee57a52a --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL0_SqlServerTable_NA_AzureBlobFS_Parquet_Full_Load.json @@ -0,0 +1,390 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_SqlServerTable_NA_AzureBlobFS_Parquet_Full_Load_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "Pipeline AF Log - Copy Start", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Set SQLStatement", + "type": "SetVariable", + "typeProperties": { + "value": { + "type": "Expression", + "value": "@replace(replace(pipeline().parameters.TaskObject.Source.SQLStatement,'',string(pipeline().parameters.BatchCount)),'',string(pipeline().parameters.Item))" + }, + "variableName": "SQLStatement" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Set SQLStatement", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Database" + }, + "KeyVaultBaseUrl": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.KeyVaultBaseUrl" + }, + "PasswordSecret": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.PasswordKeyVaultSecretName" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + }, + "TableName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableName" + }, + "TableSchema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableSchema" + }, + "UserName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Username" + } + }, + "referenceName": "[concat('GDS_SqlServerTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy SQL to Storage", + "outputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@if(equals(pipeline().parameters.TaskObject.Source.ChunkSize,0),\n pipeline().parameters.TaskObject.Target.DataFileName,\n replace(\n pipeline().parameters.TaskObject.Target.DataFileName,\n '.parquet',\n concat('.chunk_', string(pipeline().parameters.Item),'.parquet')\n )\n)" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.Instance.TargetRelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "storeSettings": { + "type": "AzureBlobFSWriteSettings" + }, + "type": "ParquetSink" + }, + "source": { + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@variables('SQLStatement')" + }, + "type": "SqlServerSource" + }, + "translator": { + "type": "Expression", + "value": "@pipeline().parameters.Mapping" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy SQL to Storage", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Copy Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy SQL to Storage\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy SQL to Storage').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Get Parquet Metadata", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Persist Parquet Metadata", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(\n concat('{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"StorageAccountName\":\"', \n string(pipeline().parameters.TaskObject.Target.System.SystemServer), \n '\",\"StorageAccountContainer\":\"', \n string(pipeline().parameters.TaskObject.Target.System.Container), \n '\",\"RelativePath\":\"', \n string(pipeline().parameters.TaskObject.Target.Instance.TargetRelativePath), \n '\",\"SchemaFileName\":\"', \n string(pipeline().parameters.TaskObject.Target.SchemaFileName), \n '\",\"SourceType\":\"', \n string(pipeline().parameters.TaskObject.Source.System.Type), \n '\",\"TargetType\":\"', \n if(\n contains(\n string(pipeline().parameters.TaskObject.Target.System.SystemServer),\n '.dfs.core.windows.net'\n ),\n 'ADLS',\n 'Azure Blob'), \n '\",\"Data\":',\n string(activity('Get Parquet Metadata').output),\n ',\"MetadataType\":\"Parquet\"}')\n)" + }, + "functionName": "TaskExecutionSchemaFile", + "method": "POST" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy SQL to Storage", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Get Parquet Metadata", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "GetMetadata", + "typeProperties": { + "dataset": { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@if(equals(pipeline().parameters.TaskObject.Source.ChunkSize,0),\n pipeline().parameters.TaskObject.Target.DataFileName,\n replace(\n pipeline().parameters.TaskObject.Target.DataFileName,\n '.parquet',\n concat('.chunk_', string(pipeline().parameters.Item),'.parquet')\n )\n)" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.Instance.TargetRelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "fieldList": [ + "structure" + ], + "storeSettings": { + "recursive": true, + "type": "AzureBlobFSReadSettings" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Pipeline AF Log - Copy Start", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy SQL to Storage\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy SQL to Storage", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Copy Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy SQL to Storage\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy SQL to Storage').output.rowsCopied), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'), '/Components')]" + }, + "parameters": { + "BatchCount": { + "type": "int" + }, + "Item": { + "type": "int" + }, + "Mapping": { + "type": "object" + }, + "TaskObject": { + "defaultValue": { + "DataFactory": { + "ADFPipeline": "AZ_SQL_AZ_Storage_Parquet_Azure", + "Id": 1, + "Name": "adsgofastdatakakeacceladf", + "ResourceGroup": "AdsGoFastDataLakeAccel", + "SubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3" + }, + "DegreeOfCopyParallelism": 1, + "Enabled": 1, + "ExecutionUid": "2c5924ee-b855-4d2b-bb7e-4f5dde4c4dd3", + "KeyVaultBaseUrl": "https://adsgofastkeyvault.vault.azure.net/", + "NumberOfRetries": 111, + "ScheduleMasterId": 2, + "Source": { + "Database": { + "AuthenticationType": "MSI", + "Name": "AWSample", + "SystemName": "adsgofastdatakakeaccelsqlsvr.database.windows.net" + }, + "Extraction": { + "FullOrIncremental": "Full", + "IncrementalType": null, + "TableName": "SalesOrderHeader", + "TableSchema": "SalesLT", + "Type": "Table" + }, + "Type": "Azure SQL" + }, + "Target": { + "DataFileName": "SalesLT.SalesOrderHeader.parquet", + "FirstRowAsHeader": null, + "MaxConcorrentConnections": null, + "RelativePath": "/AwSample/SalesLT/SalesOrderHeader/2020/7/9/14/12/", + "SchemaFileName": "SalesLT.SalesOrderHeader", + "SheetName": null, + "SkipLineCount": null, + "StorageAccountAccessMethod": "MSI", + "StorageAccountContainer": "datalakeraw", + "StorageAccountName": "https://adsgofastdatalakeaccelst.blob.core.windows.net", + "Type": "Azure Blob" + }, + "TaskGroupConcurrency": 10, + "TaskGroupPriority": 0, + "TaskInstanceId": 75, + "TaskMasterId": 12, + "TaskStatus": "Untried", + "TaskType": "SQL Database to Azure Storage" + }, + "type": "object" + } + }, + "variables": { + "SQLStatement": { + "type": "String" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL0_SqlServerTable_NA_AzureBlobFS_Parquet_Watermark.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL0_SqlServerTable_NA_AzureBlobFS_Parquet_Watermark.json new file mode 100644 index 00000000..fe879d00 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL0_SqlServerTable_NA_AzureBlobFS_Parquet_Watermark.json @@ -0,0 +1,393 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_SqlServerTable_NA_AzureBlobFS_Parquet_Watermark_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "Pipeline AF Log - Copy Start", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Set SQLStatement", + "type": "SetVariable", + "typeProperties": { + "value": { + "type": "Expression", + "value": "@replace(\n replace(\n replace(\n pipeline().parameters.TaskObject.Source.SQLStatement,\n '',\n string(pipeline().parameters.BatchCount)\n ),\n '',string(pipeline().parameters.Item)),\n '',string(pipeline().parameters.NewWaterMark)\n)" + }, + "variableName": "SQLStatement" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Set SQLStatement", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Database" + }, + "KeyVaultBaseUrl": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.KeyVaultBaseUrl" + }, + "PasswordSecret": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.PasswordKeyVaultSecretName" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + }, + "TableName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableName" + }, + "TableSchema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableSchema" + }, + "UserName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Username" + } + }, + "referenceName": "[concat('GDS_SqlServerTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy SQL to Storage", + "outputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@if(equals(pipeline().parameters.TaskObject.Source.ChunkSize,0),\n pipeline().parameters.TaskObject.Target.DataFileName,\n replace(\n pipeline().parameters.TaskObject.Target.DataFileName,\n '.parquet',\n concat('.chunk_', string(pipeline().parameters.Item),'.parquet')\n )\n)" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.Instance.TargetRelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "storeSettings": { + "type": "AzureBlobFSWriteSettings" + }, + "type": "ParquetSink" + }, + "source": { + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@variables('SQLStatement')" + }, + "type": "SqlServerSource" + }, + "translator": { + "type": "Expression", + "value": "@pipeline().parameters.Mapping" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy SQL to Storage", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Copy Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy SQL to Storage\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy SQL to Storage').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Get Parquet Metadata", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Persist Parquet Metadata", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"StorageAccountName\":\"', \n string(pipeline().parameters.TaskObject.Target.System.SystemServer), \n '\",\"StorageAccountContainer\":\"', \n string(pipeline().parameters.TaskObject.Target.System.Container), \n '\",\"RelativePath\":\"', \n string(pipeline().parameters.TaskObject.Target.Instance.TargetRelativePath), \n '\",\"SchemaFileName\":\"', \n string(pipeline().parameters.TaskObject.Target.SchemaFileName), \n '\",\"SourceType\":\"', \n string(pipeline().parameters.TaskObject.Source.System.Type), \n '\",\"TargetType\":\"', \n if(\n contains(\n string(pipeline().parameters.TaskObject.Target.System.SystemServer),\n '.dfs.core.windows.net'\n ),\n 'ADLS',\n 'Azure Blob'), \n '\",\"Data\":',\n string(activity('Get Parquet Metadata').output),\n ',\"MetadataType\":\"Parquet\"}')\n)" + }, + "functionName": "TaskExecutionSchemaFile", + "method": "POST" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy SQL to Storage", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Get Parquet Metadata", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "GetMetadata", + "typeProperties": { + "dataset": { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@if(equals(pipeline().parameters.TaskObject.Source.ChunkSize,0),\n pipeline().parameters.TaskObject.Target.DataFileName,\n replace(\n pipeline().parameters.TaskObject.Target.DataFileName,\n '.parquet',\n concat('.chunk_', string(pipeline().parameters.Item),'.parquet')\n )\n)" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.Instance.TargetRelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "fieldList": [ + "structure" + ], + "storeSettings": { + "recursive": true, + "type": "AzureBlobFSReadSettings" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Pipeline AF Log - Copy Start", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy SQL to Storage\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy SQL to Storage", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Copy Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy SQL to Storage\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy SQL to Storage').output.rowsCopied), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'), '/Components')]" + }, + "parameters": { + "BatchCount": { + "type": "int" + }, + "Item": { + "type": "int" + }, + "Mapping": { + "type": "object" + }, + "NewWaterMark": { + "type": "string" + }, + "TaskObject": { + "defaultValue": { + "DataFactory": { + "ADFPipeline": "AZ_SQL_AZ_Storage_Parquet_Azure", + "Id": 1, + "Name": "adsgofastdatakakeacceladf", + "ResourceGroup": "AdsGoFastDataLakeAccel", + "SubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3" + }, + "DegreeOfCopyParallelism": 1, + "Enabled": 1, + "ExecutionUid": "2c5924ee-b855-4d2b-bb7e-4f5dde4c4dd3", + "KeyVaultBaseUrl": "https://adsgofastkeyvault.vault.azure.net/", + "NumberOfRetries": 111, + "ScheduleMasterId": 2, + "Source": { + "Database": { + "AuthenticationType": "MSI", + "Name": "AWSample", + "SystemName": "adsgofastdatakakeaccelsqlsvr.database.windows.net" + }, + "Extraction": { + "FullOrIncremental": "Full", + "IncrementalType": null, + "TableName": "SalesOrderHeader", + "TableSchema": "SalesLT", + "Type": "Table" + }, + "Type": "Azure SQL" + }, + "Target": { + "DataFileName": "SalesLT.SalesOrderHeader.parquet", + "FirstRowAsHeader": null, + "MaxConcorrentConnections": null, + "RelativePath": "/AwSample/SalesLT/SalesOrderHeader/2020/7/9/14/12/", + "SchemaFileName": "SalesLT.SalesOrderHeader", + "SheetName": null, + "SkipLineCount": null, + "StorageAccountAccessMethod": "MSI", + "StorageAccountContainer": "datalakeraw", + "StorageAccountName": "https://adsgofastdatalakeaccelst.blob.core.windows.net", + "Type": "Azure Blob" + }, + "TaskGroupConcurrency": 10, + "TaskGroupPriority": 0, + "TaskInstanceId": 75, + "TaskMasterId": 12, + "TaskStatus": "Untried", + "TaskType": "SQL Database to Azure Storage" + }, + "type": "object" + } + }, + "variables": { + "SQLStatement": { + "type": "String" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL0_SqlServerTable_NA_AzureBlobStorage_Parquet_Full_Load.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL0_SqlServerTable_NA_AzureBlobStorage_Parquet_Full_Load.json new file mode 100644 index 00000000..90dc9bde --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL0_SqlServerTable_NA_AzureBlobStorage_Parquet_Full_Load.json @@ -0,0 +1,390 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_SqlServerTable_NA_AzureBlobStorage_Parquet_Full_Load_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "Pipeline AF Log - Copy Start", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Set SQLStatement", + "type": "SetVariable", + "typeProperties": { + "value": { + "type": "Expression", + "value": "@replace(replace(pipeline().parameters.TaskObject.Source.SQLStatement,'',string(pipeline().parameters.BatchCount)),'',string(pipeline().parameters.Item))" + }, + "variableName": "SQLStatement" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Set SQLStatement", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Database" + }, + "KeyVaultBaseUrl": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.KeyVaultBaseUrl" + }, + "PasswordSecret": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.PasswordKeyVaultSecretName" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + }, + "TableName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableName" + }, + "TableSchema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableSchema" + }, + "UserName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Username" + } + }, + "referenceName": "[concat('GDS_SqlServerTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy SQL to Storage", + "outputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@if(equals(pipeline().parameters.TaskObject.Source.ChunkSize,0),\n pipeline().parameters.TaskObject.Target.DataFileName,\n replace(\n pipeline().parameters.TaskObject.Target.DataFileName,\n '.parquet',\n concat('.chunk_', string(pipeline().parameters.Item),'.parquet')\n )\n)" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.Instance.TargetRelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "storeSettings": { + "type": "AzureBlobStorageWriteSettings" + }, + "type": "ParquetSink" + }, + "source": { + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@variables('SQLStatement')" + }, + "type": "SqlServerSource" + }, + "translator": { + "type": "Expression", + "value": "@pipeline().parameters.Mapping" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy SQL to Storage", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Copy Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy SQL to Storage\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy SQL to Storage').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Get Parquet Metadata", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Persist Parquet Metadata", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(\n concat('{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"StorageAccountName\":\"', \n string(pipeline().parameters.TaskObject.Target.System.SystemServer), \n '\",\"StorageAccountContainer\":\"', \n string(pipeline().parameters.TaskObject.Target.System.Container), \n '\",\"RelativePath\":\"', \n string(pipeline().parameters.TaskObject.Target.Instance.TargetRelativePath), \n '\",\"SchemaFileName\":\"', \n string(pipeline().parameters.TaskObject.Target.SchemaFileName), \n '\",\"SourceType\":\"', \n string(pipeline().parameters.TaskObject.Source.System.Type), \n '\",\"TargetType\":\"', \n if(\n contains(\n string(pipeline().parameters.TaskObject.Target.System.SystemServer),\n '.dfs.core.windows.net'\n ),\n 'ADLS',\n 'Azure Blob'), \n '\",\"Data\":',\n string(activity('Get Parquet Metadata').output),\n ',\"MetadataType\":\"Parquet\"}')\n)" + }, + "functionName": "TaskExecutionSchemaFile", + "method": "POST" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy SQL to Storage", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Get Parquet Metadata", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "GetMetadata", + "typeProperties": { + "dataset": { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@if(equals(pipeline().parameters.TaskObject.Source.ChunkSize,0),\n pipeline().parameters.TaskObject.Target.DataFileName,\n replace(\n pipeline().parameters.TaskObject.Target.DataFileName,\n '.parquet',\n concat('.chunk_', string(pipeline().parameters.Item),'.parquet')\n )\n)" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.Instance.TargetRelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "fieldList": [ + "structure" + ], + "storeSettings": { + "recursive": true, + "type": "AzureBlobFSReadSettings" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Pipeline AF Log - Copy Start", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy SQL to Storage\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy SQL to Storage", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Copy Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy SQL to Storage\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy SQL to Storage').output.rowsCopied), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'), '/Components')]" + }, + "parameters": { + "BatchCount": { + "type": "int" + }, + "Item": { + "type": "int" + }, + "Mapping": { + "type": "object" + }, + "TaskObject": { + "defaultValue": { + "DataFactory": { + "ADFPipeline": "AZ_SQL_AZ_Storage_Parquet_Azure", + "Id": 1, + "Name": "adsgofastdatakakeacceladf", + "ResourceGroup": "AdsGoFastDataLakeAccel", + "SubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3" + }, + "DegreeOfCopyParallelism": 1, + "Enabled": 1, + "ExecutionUid": "2c5924ee-b855-4d2b-bb7e-4f5dde4c4dd3", + "KeyVaultBaseUrl": "https://adsgofastkeyvault.vault.azure.net/", + "NumberOfRetries": 111, + "ScheduleMasterId": 2, + "Source": { + "Database": { + "AuthenticationType": "MSI", + "Name": "AWSample", + "SystemName": "adsgofastdatakakeaccelsqlsvr.database.windows.net" + }, + "Extraction": { + "FullOrIncremental": "Full", + "IncrementalType": null, + "TableName": "SalesOrderHeader", + "TableSchema": "SalesLT", + "Type": "Table" + }, + "Type": "Azure SQL" + }, + "Target": { + "DataFileName": "SalesLT.SalesOrderHeader.parquet", + "FirstRowAsHeader": null, + "MaxConcorrentConnections": null, + "RelativePath": "/AwSample/SalesLT/SalesOrderHeader/2020/7/9/14/12/", + "SchemaFileName": "SalesLT.SalesOrderHeader", + "SheetName": null, + "SkipLineCount": null, + "StorageAccountAccessMethod": "MSI", + "StorageAccountContainer": "datalakeraw", + "StorageAccountName": "https://adsgofastdatalakeaccelst.blob.core.windows.net", + "Type": "Azure Blob" + }, + "TaskGroupConcurrency": 10, + "TaskGroupPriority": 0, + "TaskInstanceId": 75, + "TaskMasterId": 12, + "TaskStatus": "Untried", + "TaskType": "SQL Database to Azure Storage" + }, + "type": "object" + } + }, + "variables": { + "SQLStatement": { + "type": "String" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL0_SqlServerTable_NA_AzureBlobStorage_Parquet_Watermark.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL0_SqlServerTable_NA_AzureBlobStorage_Parquet_Watermark.json new file mode 100644 index 00000000..bb836c1d --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL0_SqlServerTable_NA_AzureBlobStorage_Parquet_Watermark.json @@ -0,0 +1,393 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_SqlServerTable_NA_AzureBlobStorage_Parquet_Watermark_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "Pipeline AF Log - Copy Start", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Set SQLStatement", + "type": "SetVariable", + "typeProperties": { + "value": { + "type": "Expression", + "value": "@replace(\n replace(\n replace(\n pipeline().parameters.TaskObject.Source.SQLStatement,\n '',\n string(pipeline().parameters.BatchCount)\n ),\n '',string(pipeline().parameters.Item)),\n '',string(pipeline().parameters.NewWaterMark)\n)" + }, + "variableName": "SQLStatement" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Set SQLStatement", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Database" + }, + "KeyVaultBaseUrl": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.KeyVaultBaseUrl" + }, + "PasswordSecret": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.PasswordKeyVaultSecretName" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + }, + "TableName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableName" + }, + "TableSchema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableSchema" + }, + "UserName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Username" + } + }, + "referenceName": "[concat('GDS_SqlServerTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy SQL to Storage", + "outputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@if(equals(pipeline().parameters.TaskObject.Source.ChunkSize,0),\n pipeline().parameters.TaskObject.Target.DataFileName,\n replace(\n pipeline().parameters.TaskObject.Target.DataFileName,\n '.parquet',\n concat('.chunk_', string(pipeline().parameters.Item),'.parquet')\n )\n)" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.Instance.TargetRelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "storeSettings": { + "type": "AzureBlobStorageWriteSettings" + }, + "type": "ParquetSink" + }, + "source": { + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@variables('SQLStatement')" + }, + "type": "SqlServerSource" + }, + "translator": { + "type": "Expression", + "value": "@pipeline().parameters.Mapping" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy SQL to Storage", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Copy Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy SQL to Storage\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy SQL to Storage').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Get Parquet Metadata", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Persist Parquet Metadata", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"StorageAccountName\":\"', \n string(pipeline().parameters.TaskObject.Target.System.SystemServer), \n '\",\"StorageAccountContainer\":\"', \n string(pipeline().parameters.TaskObject.Target.System.Container), \n '\",\"RelativePath\":\"', \n string(pipeline().parameters.TaskObject.Target.Instance.TargetRelativePath), \n '\",\"SchemaFileName\":\"', \n string(pipeline().parameters.TaskObject.Target.SchemaFileName), \n '\",\"SourceType\":\"', \n string(pipeline().parameters.TaskObject.Source.System.Type), \n '\",\"TargetType\":\"', \n if(\n contains(\n string(pipeline().parameters.TaskObject.Target.System.SystemServer),\n '.dfs.core.windows.net'\n ),\n 'ADLS',\n 'Azure Blob'), \n '\",\"Data\":',\n string(activity('Get Parquet Metadata').output),\n ',\"MetadataType\":\"Parquet\"}')\n)" + }, + "functionName": "TaskExecutionSchemaFile", + "method": "POST" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy SQL to Storage", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Get Parquet Metadata", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "GetMetadata", + "typeProperties": { + "dataset": { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@if(equals(pipeline().parameters.TaskObject.Source.ChunkSize,0),\n pipeline().parameters.TaskObject.Target.DataFileName,\n replace(\n pipeline().parameters.TaskObject.Target.DataFileName,\n '.parquet',\n concat('.chunk_', string(pipeline().parameters.Item),'.parquet')\n )\n)" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.Instance.TargetRelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "fieldList": [ + "structure" + ], + "storeSettings": { + "recursive": true, + "type": "AzureBlobFSReadSettings" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Pipeline AF Log - Copy Start", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy SQL to Storage\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy SQL to Storage", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Copy Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy SQL to Storage\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy SQL to Storage').output.rowsCopied), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'), '/Components')]" + }, + "parameters": { + "BatchCount": { + "type": "int" + }, + "Item": { + "type": "int" + }, + "Mapping": { + "type": "object" + }, + "NewWaterMark": { + "type": "string" + }, + "TaskObject": { + "defaultValue": { + "DataFactory": { + "ADFPipeline": "AZ_SQL_AZ_Storage_Parquet_Azure", + "Id": 1, + "Name": "adsgofastdatakakeacceladf", + "ResourceGroup": "AdsGoFastDataLakeAccel", + "SubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3" + }, + "DegreeOfCopyParallelism": 1, + "Enabled": 1, + "ExecutionUid": "2c5924ee-b855-4d2b-bb7e-4f5dde4c4dd3", + "KeyVaultBaseUrl": "https://adsgofastkeyvault.vault.azure.net/", + "NumberOfRetries": 111, + "ScheduleMasterId": 2, + "Source": { + "Database": { + "AuthenticationType": "MSI", + "Name": "AWSample", + "SystemName": "adsgofastdatakakeaccelsqlsvr.database.windows.net" + }, + "Extraction": { + "FullOrIncremental": "Full", + "IncrementalType": null, + "TableName": "SalesOrderHeader", + "TableSchema": "SalesLT", + "Type": "Table" + }, + "Type": "Azure SQL" + }, + "Target": { + "DataFileName": "SalesLT.SalesOrderHeader.parquet", + "FirstRowAsHeader": null, + "MaxConcorrentConnections": null, + "RelativePath": "/AwSample/SalesLT/SalesOrderHeader/2020/7/9/14/12/", + "SchemaFileName": "SalesLT.SalesOrderHeader", + "SheetName": null, + "SkipLineCount": null, + "StorageAccountAccessMethod": "MSI", + "StorageAccountContainer": "datalakeraw", + "StorageAccountName": "https://adsgofastdatalakeaccelst.blob.core.windows.net", + "Type": "Azure Blob" + }, + "TaskGroupConcurrency": 10, + "TaskGroupPriority": 0, + "TaskInstanceId": 75, + "TaskMasterId": 12, + "TaskStatus": "Untried", + "TaskType": "SQL Database to Azure Storage" + }, + "type": "object" + } + }, + "variables": { + "SQLStatement": { + "type": "String" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL1_AzureSqlTable_NA_AzureBlobFS_Parquet_Full_Load_Chunk.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL1_AzureSqlTable_NA_AzureBlobFS_Parquet_Full_Load_Chunk.json new file mode 100644 index 00000000..e889fe54 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL1_AzureSqlTable_NA_AzureBlobFS_Parquet_Full_Load_Chunk.json @@ -0,0 +1,143 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureSqlTable_NA_AzureBlobFS_Parquet_Full_Load_Chunk_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "ForEach Chunk", + "type": "ForEach", + "typeProperties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Full Load", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "BatchCount": { + "type": "Expression", + "value": "@pipeline().parameters.BatchCount" + }, + "Item": { + "type": "Expression", + "value": "@item()" + }, + "Mapping": { + "type": "Expression", + "value": "@pipeline().parameters.Mapping" + }, + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_AzureBlobFS_Parquet_Full_Load_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "isSequential": true, + "items": { + "type": "Expression", + "value": "@range(1, pipeline().parameters.BatchCount)" + } + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'), '/Components')]" + }, + "parameters": { + "BatchCount": { + "type": "int" + }, + "Mapping": { + "type": "object" + }, + "TaskObject": { + "defaultValue": { + "DataFactory": { + "ADFPipeline": "SH-AZ_Storage_Parquet_Azure", + "Id": 1, + "Name": "adsgofastdatakakeacceladf", + "ResourceGroup": "AdsGoFastDataLakeAccel", + "SubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3" + }, + "DegreeOfCopyParallelism": 1, + "Enabled": 1, + "ExecutionUid": "2c5924ee-b855-4d2b-bb7e-4f5dde4c4dd3", + "KeyVaultBaseUrl": "https://adsgofastkeyvault.vault.azure.net/", + "NumberOfRetries": 111, + "ScheduleMasterId": 2, + "Source": { + "Database": { + "AuthenticationType": "MSI", + "Name": "AWSample", + "SystemName": "adsgofastdatakakeaccelsqlsvr.database.windows.net" + }, + "Extraction": { + "FullOrIncremental": "Full", + "IncrementalType": null, + "TableName": "SalesOrderHeader", + "TableSchema": "SalesLT", + "Type": "Table" + }, + "Type": "Azure SQL" + }, + "Target": { + "DataFileName": "SalesLT.SalesOrderHeader.parquet", + "FirstRowAsHeader": null, + "MaxConcorrentConnections": null, + "RelativePath": "/AwSample/SalesLT/SalesOrderHeader/2020/7/9/14/12/", + "SchemaFileName": "SalesLT.SalesOrderHeader", + "SheetName": null, + "SkipLineCount": null, + "StorageAccountAccessMethod": "MSI", + "StorageAccountContainer": "datalakeraw", + "StorageAccountName": "https://adsgofastdatalakeaccelst.blob.core.windows.net", + "Type": "Azure Blob" + }, + "TaskGroupConcurrency": 10, + "TaskGroupPriority": 0, + "TaskInstanceId": 75, + "TaskMasterId": 12, + "TaskStatus": "Untried", + "TaskType": "SQL Database to Azure Storage" + }, + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL1_AzureSqlTable_NA_AzureBlobFS_Parquet_Watermark_Chunk.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL1_AzureSqlTable_NA_AzureBlobFS_Parquet_Watermark_Chunk.json new file mode 100644 index 00000000..e941c8ce --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL1_AzureSqlTable_NA_AzureBlobFS_Parquet_Watermark_Chunk.json @@ -0,0 +1,178 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureSqlTable_NA_AzureBlobFS_Parquet_Watermark_Chunk_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "ForEach Chunk", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "AF Set New Watermark", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"TaskMasterId\":\"', string(pipeline().parameters.TaskObject.TaskMasterId),'\",\"TaskMasterWaterMarkColumnType\":\"', string(pipeline().parameters.TaskObject.Source.IncrementalColumnType),'\",\"WaterMarkValue\":\"', string(pipeline().parameters.NewWatermark), '\"}'))" + }, + "FunctionName": "WaterMark", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "ForEach Chunk", + "type": "ForEach", + "typeProperties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Watermark", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "BatchCount": { + "type": "Expression", + "value": "@pipeline().parameters.BatchCount" + }, + "Item": { + "type": "Expression", + "value": "@item()" + }, + "Mapping": { + "type": "Expression", + "value": "@pipeline().parameters.Mapping" + }, + "NewWaterMark": { + "type": "Expression", + "value": "@pipeline().parameters.NewWatermark" + }, + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_AzureBlobFS_Parquet_Watermark_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "isSequential": true, + "items": { + "type": "Expression", + "value": "@range(1, pipeline().parameters.BatchCount)" + } + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'), '/Components')]" + }, + "parameters": { + "BatchCount": { + "type": "int" + }, + "Mapping": { + "type": "object" + }, + "NewWatermark": { + "type": "string" + }, + "TaskObject": { + "defaultValue": { + "DataFactory": { + "ADFPipeline": "AZ_SQL_AZ_Storage_Parquet_Azure", + "Id": 1, + "Name": "adsgofastdatakakeacceladf", + "ResourceGroup": "AdsGoFastDataLakeAccel", + "SubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3" + }, + "DegreeOfCopyParallelism": 1, + "Enabled": 1, + "ExecutionUid": "2c5924ee-b855-4d2b-bb7e-4f5dde4c4dd3", + "KeyVaultBaseUrl": "https://adsgofastkeyvault.vault.azure.net/", + "NumberOfRetries": 111, + "ScheduleMasterId": 2, + "Source": { + "Database": { + "AuthenticationType": "MSI", + "Name": "AWSample", + "SystemName": "adsgofastdatakakeaccelsqlsvr.database.windows.net" + }, + "Extraction": { + "FullOrIncremental": "Full", + "IncrementalType": null, + "TableName": "SalesOrderHeader", + "TableSchema": "SalesLT", + "Type": "Table" + }, + "Type": "Azure SQL" + }, + "Target": { + "DataFileName": "SalesLT.SalesOrderHeader.parquet", + "FirstRowAsHeader": null, + "MaxConcorrentConnections": null, + "RelativePath": "/AwSample/SalesLT/SalesOrderHeader/2020/7/9/14/12/", + "SchemaFileName": "SalesLT.SalesOrderHeader", + "SheetName": null, + "SkipLineCount": null, + "StorageAccountAccessMethod": "MSI", + "StorageAccountContainer": "datalakeraw", + "StorageAccountName": "https://adsgofastdatalakeaccelst.blob.core.windows.net", + "Type": "Azure Blob" + }, + "TaskGroupConcurrency": 10, + "TaskGroupPriority": 0, + "TaskInstanceId": 75, + "TaskMasterId": 12, + "TaskStatus": "Untried", + "TaskType": "SQL Database to Azure Storage" + }, + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL1_AzureSqlTable_NA_AzureBlobStorage_Parquet_Full_Load_Chunk.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL1_AzureSqlTable_NA_AzureBlobStorage_Parquet_Full_Load_Chunk.json new file mode 100644 index 00000000..8f8e34fc --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL1_AzureSqlTable_NA_AzureBlobStorage_Parquet_Full_Load_Chunk.json @@ -0,0 +1,143 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureSqlTable_NA_AzureBlobStorage_Parquet_Full_Load_Chunk_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "ForEach Chunk", + "type": "ForEach", + "typeProperties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Full Load", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "BatchCount": { + "type": "Expression", + "value": "@pipeline().parameters.BatchCount" + }, + "Item": { + "type": "Expression", + "value": "@item()" + }, + "Mapping": { + "type": "Expression", + "value": "@pipeline().parameters.Mapping" + }, + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_AzureBlobStorage_Parquet_Full_Load_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "isSequential": true, + "items": { + "type": "Expression", + "value": "@range(1, pipeline().parameters.BatchCount)" + } + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'), '/Components')]" + }, + "parameters": { + "BatchCount": { + "type": "int" + }, + "Mapping": { + "type": "object" + }, + "TaskObject": { + "defaultValue": { + "DataFactory": { + "ADFPipeline": "SH-AZ_Storage_Parquet_Azure", + "Id": 1, + "Name": "adsgofastdatakakeacceladf", + "ResourceGroup": "AdsGoFastDataLakeAccel", + "SubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3" + }, + "DegreeOfCopyParallelism": 1, + "Enabled": 1, + "ExecutionUid": "2c5924ee-b855-4d2b-bb7e-4f5dde4c4dd3", + "KeyVaultBaseUrl": "https://adsgofastkeyvault.vault.azure.net/", + "NumberOfRetries": 111, + "ScheduleMasterId": 2, + "Source": { + "Database": { + "AuthenticationType": "MSI", + "Name": "AWSample", + "SystemName": "adsgofastdatakakeaccelsqlsvr.database.windows.net" + }, + "Extraction": { + "FullOrIncremental": "Full", + "IncrementalType": null, + "TableName": "SalesOrderHeader", + "TableSchema": "SalesLT", + "Type": "Table" + }, + "Type": "Azure SQL" + }, + "Target": { + "DataFileName": "SalesLT.SalesOrderHeader.parquet", + "FirstRowAsHeader": null, + "MaxConcorrentConnections": null, + "RelativePath": "/AwSample/SalesLT/SalesOrderHeader/2020/7/9/14/12/", + "SchemaFileName": "SalesLT.SalesOrderHeader", + "SheetName": null, + "SkipLineCount": null, + "StorageAccountAccessMethod": "MSI", + "StorageAccountContainer": "datalakeraw", + "StorageAccountName": "https://adsgofastdatalakeaccelst.blob.core.windows.net", + "Type": "Azure Blob" + }, + "TaskGroupConcurrency": 10, + "TaskGroupPriority": 0, + "TaskInstanceId": 75, + "TaskMasterId": 12, + "TaskStatus": "Untried", + "TaskType": "SQL Database to Azure Storage" + }, + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL1_AzureSqlTable_NA_AzureBlobStorage_Parquet_Watermark_Chunk.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL1_AzureSqlTable_NA_AzureBlobStorage_Parquet_Watermark_Chunk.json new file mode 100644 index 00000000..1d1c66d5 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL1_AzureSqlTable_NA_AzureBlobStorage_Parquet_Watermark_Chunk.json @@ -0,0 +1,178 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureSqlTable_NA_AzureBlobStorage_Parquet_Watermark_Chunk_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "ForEach Chunk", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "AF Set New Watermark", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"TaskMasterId\":\"', string(pipeline().parameters.TaskObject.TaskMasterId),'\",\"TaskMasterWaterMarkColumnType\":\"', string(pipeline().parameters.TaskObject.Source.IncrementalColumnType),'\",\"WaterMarkValue\":\"', string(pipeline().parameters.NewWatermark), '\"}'))" + }, + "FunctionName": "WaterMark", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "ForEach Chunk", + "type": "ForEach", + "typeProperties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Watermark", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "BatchCount": { + "type": "Expression", + "value": "@pipeline().parameters.BatchCount" + }, + "Item": { + "type": "Expression", + "value": "@item()" + }, + "Mapping": { + "type": "Expression", + "value": "@pipeline().parameters.Mapping" + }, + "NewWaterMark": { + "type": "Expression", + "value": "@pipeline().parameters.NewWatermark" + }, + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_AzureBlobStorage_Parquet_Watermark_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "isSequential": true, + "items": { + "type": "Expression", + "value": "@range(1, pipeline().parameters.BatchCount)" + } + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'), '/Components')]" + }, + "parameters": { + "BatchCount": { + "type": "int" + }, + "Mapping": { + "type": "object" + }, + "NewWatermark": { + "type": "string" + }, + "TaskObject": { + "defaultValue": { + "DataFactory": { + "ADFPipeline": "AZ_SQL_AZ_Storage_Parquet_Azure", + "Id": 1, + "Name": "adsgofastdatakakeacceladf", + "ResourceGroup": "AdsGoFastDataLakeAccel", + "SubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3" + }, + "DegreeOfCopyParallelism": 1, + "Enabled": 1, + "ExecutionUid": "2c5924ee-b855-4d2b-bb7e-4f5dde4c4dd3", + "KeyVaultBaseUrl": "https://adsgofastkeyvault.vault.azure.net/", + "NumberOfRetries": 111, + "ScheduleMasterId": 2, + "Source": { + "Database": { + "AuthenticationType": "MSI", + "Name": "AWSample", + "SystemName": "adsgofastdatakakeaccelsqlsvr.database.windows.net" + }, + "Extraction": { + "FullOrIncremental": "Full", + "IncrementalType": null, + "TableName": "SalesOrderHeader", + "TableSchema": "SalesLT", + "Type": "Table" + }, + "Type": "Azure SQL" + }, + "Target": { + "DataFileName": "SalesLT.SalesOrderHeader.parquet", + "FirstRowAsHeader": null, + "MaxConcorrentConnections": null, + "RelativePath": "/AwSample/SalesLT/SalesOrderHeader/2020/7/9/14/12/", + "SchemaFileName": "SalesLT.SalesOrderHeader", + "SheetName": null, + "SkipLineCount": null, + "StorageAccountAccessMethod": "MSI", + "StorageAccountContainer": "datalakeraw", + "StorageAccountName": "https://adsgofastdatalakeaccelst.blob.core.windows.net", + "Type": "Azure Blob" + }, + "TaskGroupConcurrency": 10, + "TaskGroupPriority": 0, + "TaskInstanceId": 75, + "TaskMasterId": 12, + "TaskStatus": "Untried", + "TaskType": "SQL Database to Azure Storage" + }, + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL1_SqlServerTable_NA_AzureBlobFS_Parquet_Full_Load_Chunk.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL1_SqlServerTable_NA_AzureBlobFS_Parquet_Full_Load_Chunk.json new file mode 100644 index 00000000..76b59c77 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL1_SqlServerTable_NA_AzureBlobFS_Parquet_Full_Load_Chunk.json @@ -0,0 +1,143 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_SqlServerTable_NA_AzureBlobFS_Parquet_Full_Load_Chunk_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "ForEach Chunk", + "type": "ForEach", + "typeProperties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Full Load", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "BatchCount": { + "type": "Expression", + "value": "@pipeline().parameters.BatchCount" + }, + "Item": { + "type": "Expression", + "value": "@item()" + }, + "Mapping": { + "type": "Expression", + "value": "@pipeline().parameters.Mapping" + }, + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_SqlServerTable_NA_AzureBlobFS_Parquet_Full_Load_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "isSequential": true, + "items": { + "type": "Expression", + "value": "@range(1, pipeline().parameters.BatchCount)" + } + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'), '/Components')]" + }, + "parameters": { + "BatchCount": { + "type": "int" + }, + "Mapping": { + "type": "object" + }, + "TaskObject": { + "defaultValue": { + "DataFactory": { + "ADFPipeline": "SH-AZ_Storage_Parquet_Azure", + "Id": 1, + "Name": "adsgofastdatakakeacceladf", + "ResourceGroup": "AdsGoFastDataLakeAccel", + "SubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3" + }, + "DegreeOfCopyParallelism": 1, + "Enabled": 1, + "ExecutionUid": "2c5924ee-b855-4d2b-bb7e-4f5dde4c4dd3", + "KeyVaultBaseUrl": "https://adsgofastkeyvault.vault.azure.net/", + "NumberOfRetries": 111, + "ScheduleMasterId": 2, + "Source": { + "Database": { + "AuthenticationType": "MSI", + "Name": "AWSample", + "SystemName": "adsgofastdatakakeaccelsqlsvr.database.windows.net" + }, + "Extraction": { + "FullOrIncremental": "Full", + "IncrementalType": null, + "TableName": "SalesOrderHeader", + "TableSchema": "SalesLT", + "Type": "Table" + }, + "Type": "Azure SQL" + }, + "Target": { + "DataFileName": "SalesLT.SalesOrderHeader.parquet", + "FirstRowAsHeader": null, + "MaxConcorrentConnections": null, + "RelativePath": "/AwSample/SalesLT/SalesOrderHeader/2020/7/9/14/12/", + "SchemaFileName": "SalesLT.SalesOrderHeader", + "SheetName": null, + "SkipLineCount": null, + "StorageAccountAccessMethod": "MSI", + "StorageAccountContainer": "datalakeraw", + "StorageAccountName": "https://adsgofastdatalakeaccelst.blob.core.windows.net", + "Type": "Azure Blob" + }, + "TaskGroupConcurrency": 10, + "TaskGroupPriority": 0, + "TaskInstanceId": 75, + "TaskMasterId": 12, + "TaskStatus": "Untried", + "TaskType": "SQL Database to Azure Storage" + }, + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL1_SqlServerTable_NA_AzureBlobFS_Parquet_Watermark_Chunk.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL1_SqlServerTable_NA_AzureBlobFS_Parquet_Watermark_Chunk.json new file mode 100644 index 00000000..7f9408ca --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL1_SqlServerTable_NA_AzureBlobFS_Parquet_Watermark_Chunk.json @@ -0,0 +1,178 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_SqlServerTable_NA_AzureBlobFS_Parquet_Watermark_Chunk_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "ForEach Chunk", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "AF Set New Watermark", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"TaskMasterId\":\"', string(pipeline().parameters.TaskObject.TaskMasterId),'\",\"TaskMasterWaterMarkColumnType\":\"', string(pipeline().parameters.TaskObject.Source.IncrementalColumnType),'\",\"WaterMarkValue\":\"', string(pipeline().parameters.NewWatermark), '\"}'))" + }, + "FunctionName": "WaterMark", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "ForEach Chunk", + "type": "ForEach", + "typeProperties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Watermark", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "BatchCount": { + "type": "Expression", + "value": "@pipeline().parameters.BatchCount" + }, + "Item": { + "type": "Expression", + "value": "@item()" + }, + "Mapping": { + "type": "Expression", + "value": "@pipeline().parameters.Mapping" + }, + "NewWaterMark": { + "type": "Expression", + "value": "@pipeline().parameters.NewWatermark" + }, + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_SqlServerTable_NA_AzureBlobFS_Parquet_Watermark_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "isSequential": true, + "items": { + "type": "Expression", + "value": "@range(1, pipeline().parameters.BatchCount)" + } + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'), '/Components')]" + }, + "parameters": { + "BatchCount": { + "type": "int" + }, + "Mapping": { + "type": "object" + }, + "NewWatermark": { + "type": "string" + }, + "TaskObject": { + "defaultValue": { + "DataFactory": { + "ADFPipeline": "AZ_SQL_AZ_Storage_Parquet_Azure", + "Id": 1, + "Name": "adsgofastdatakakeacceladf", + "ResourceGroup": "AdsGoFastDataLakeAccel", + "SubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3" + }, + "DegreeOfCopyParallelism": 1, + "Enabled": 1, + "ExecutionUid": "2c5924ee-b855-4d2b-bb7e-4f5dde4c4dd3", + "KeyVaultBaseUrl": "https://adsgofastkeyvault.vault.azure.net/", + "NumberOfRetries": 111, + "ScheduleMasterId": 2, + "Source": { + "Database": { + "AuthenticationType": "MSI", + "Name": "AWSample", + "SystemName": "adsgofastdatakakeaccelsqlsvr.database.windows.net" + }, + "Extraction": { + "FullOrIncremental": "Full", + "IncrementalType": null, + "TableName": "SalesOrderHeader", + "TableSchema": "SalesLT", + "Type": "Table" + }, + "Type": "Azure SQL" + }, + "Target": { + "DataFileName": "SalesLT.SalesOrderHeader.parquet", + "FirstRowAsHeader": null, + "MaxConcorrentConnections": null, + "RelativePath": "/AwSample/SalesLT/SalesOrderHeader/2020/7/9/14/12/", + "SchemaFileName": "SalesLT.SalesOrderHeader", + "SheetName": null, + "SkipLineCount": null, + "StorageAccountAccessMethod": "MSI", + "StorageAccountContainer": "datalakeraw", + "StorageAccountName": "https://adsgofastdatalakeaccelst.blob.core.windows.net", + "Type": "Azure Blob" + }, + "TaskGroupConcurrency": 10, + "TaskGroupPriority": 0, + "TaskInstanceId": 75, + "TaskMasterId": 12, + "TaskStatus": "Untried", + "TaskType": "SQL Database to Azure Storage" + }, + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL1_SqlServerTable_NA_AzureBlobStorage_Parquet_Full_Load_Chunk.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL1_SqlServerTable_NA_AzureBlobStorage_Parquet_Full_Load_Chunk.json new file mode 100644 index 00000000..36dcb7a4 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL1_SqlServerTable_NA_AzureBlobStorage_Parquet_Full_Load_Chunk.json @@ -0,0 +1,143 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_SqlServerTable_NA_AzureBlobStorage_Parquet_Full_Load_Chunk_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "ForEach Chunk", + "type": "ForEach", + "typeProperties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Full Load", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "BatchCount": { + "type": "Expression", + "value": "@pipeline().parameters.BatchCount" + }, + "Item": { + "type": "Expression", + "value": "@item()" + }, + "Mapping": { + "type": "Expression", + "value": "@pipeline().parameters.Mapping" + }, + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_SqlServerTable_NA_AzureBlobStorage_Parquet_Full_Load_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "isSequential": true, + "items": { + "type": "Expression", + "value": "@range(1, pipeline().parameters.BatchCount)" + } + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'), '/Components')]" + }, + "parameters": { + "BatchCount": { + "type": "int" + }, + "Mapping": { + "type": "object" + }, + "TaskObject": { + "defaultValue": { + "DataFactory": { + "ADFPipeline": "SH-AZ_Storage_Parquet_Azure", + "Id": 1, + "Name": "adsgofastdatakakeacceladf", + "ResourceGroup": "AdsGoFastDataLakeAccel", + "SubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3" + }, + "DegreeOfCopyParallelism": 1, + "Enabled": 1, + "ExecutionUid": "2c5924ee-b855-4d2b-bb7e-4f5dde4c4dd3", + "KeyVaultBaseUrl": "https://adsgofastkeyvault.vault.azure.net/", + "NumberOfRetries": 111, + "ScheduleMasterId": 2, + "Source": { + "Database": { + "AuthenticationType": "MSI", + "Name": "AWSample", + "SystemName": "adsgofastdatakakeaccelsqlsvr.database.windows.net" + }, + "Extraction": { + "FullOrIncremental": "Full", + "IncrementalType": null, + "TableName": "SalesOrderHeader", + "TableSchema": "SalesLT", + "Type": "Table" + }, + "Type": "Azure SQL" + }, + "Target": { + "DataFileName": "SalesLT.SalesOrderHeader.parquet", + "FirstRowAsHeader": null, + "MaxConcorrentConnections": null, + "RelativePath": "/AwSample/SalesLT/SalesOrderHeader/2020/7/9/14/12/", + "SchemaFileName": "SalesLT.SalesOrderHeader", + "SheetName": null, + "SkipLineCount": null, + "StorageAccountAccessMethod": "MSI", + "StorageAccountContainer": "datalakeraw", + "StorageAccountName": "https://adsgofastdatalakeaccelst.blob.core.windows.net", + "Type": "Azure Blob" + }, + "TaskGroupConcurrency": 10, + "TaskGroupPriority": 0, + "TaskInstanceId": 75, + "TaskMasterId": 12, + "TaskStatus": "Untried", + "TaskType": "SQL Database to Azure Storage" + }, + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL1_SqlServerTable_NA_AzureBlobStorage_Parquet_Watermark_Chunk.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL1_SqlServerTable_NA_AzureBlobStorage_Parquet_Watermark_Chunk.json new file mode 100644 index 00000000..f44ec921 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL1_SqlServerTable_NA_AzureBlobStorage_Parquet_Watermark_Chunk.json @@ -0,0 +1,178 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_SqlServerTable_NA_AzureBlobStorage_Parquet_Watermark_Chunk_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "ForEach Chunk", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "AF Set New Watermark", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"TaskMasterId\":\"', string(pipeline().parameters.TaskObject.TaskMasterId),'\",\"TaskMasterWaterMarkColumnType\":\"', string(pipeline().parameters.TaskObject.Source.IncrementalColumnType),'\",\"WaterMarkValue\":\"', string(pipeline().parameters.NewWatermark), '\"}'))" + }, + "FunctionName": "WaterMark", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "ForEach Chunk", + "type": "ForEach", + "typeProperties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Watermark", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "BatchCount": { + "type": "Expression", + "value": "@pipeline().parameters.BatchCount" + }, + "Item": { + "type": "Expression", + "value": "@item()" + }, + "Mapping": { + "type": "Expression", + "value": "@pipeline().parameters.Mapping" + }, + "NewWaterMark": { + "type": "Expression", + "value": "@pipeline().parameters.NewWatermark" + }, + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_SqlServerTable_NA_AzureBlobStorage_Parquet_Watermark_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "isSequential": true, + "items": { + "type": "Expression", + "value": "@range(1, pipeline().parameters.BatchCount)" + } + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'), '/Components')]" + }, + "parameters": { + "BatchCount": { + "type": "int" + }, + "Mapping": { + "type": "object" + }, + "NewWatermark": { + "type": "string" + }, + "TaskObject": { + "defaultValue": { + "DataFactory": { + "ADFPipeline": "AZ_SQL_AZ_Storage_Parquet_Azure", + "Id": 1, + "Name": "adsgofastdatakakeacceladf", + "ResourceGroup": "AdsGoFastDataLakeAccel", + "SubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3" + }, + "DegreeOfCopyParallelism": 1, + "Enabled": 1, + "ExecutionUid": "2c5924ee-b855-4d2b-bb7e-4f5dde4c4dd3", + "KeyVaultBaseUrl": "https://adsgofastkeyvault.vault.azure.net/", + "NumberOfRetries": 111, + "ScheduleMasterId": 2, + "Source": { + "Database": { + "AuthenticationType": "MSI", + "Name": "AWSample", + "SystemName": "adsgofastdatakakeaccelsqlsvr.database.windows.net" + }, + "Extraction": { + "FullOrIncremental": "Full", + "IncrementalType": null, + "TableName": "SalesOrderHeader", + "TableSchema": "SalesLT", + "Type": "Table" + }, + "Type": "Azure SQL" + }, + "Target": { + "DataFileName": "SalesLT.SalesOrderHeader.parquet", + "FirstRowAsHeader": null, + "MaxConcorrentConnections": null, + "RelativePath": "/AwSample/SalesLT/SalesOrderHeader/2020/7/9/14/12/", + "SchemaFileName": "SalesLT.SalesOrderHeader", + "SheetName": null, + "SkipLineCount": null, + "StorageAccountAccessMethod": "MSI", + "StorageAccountContainer": "datalakeraw", + "StorageAccountName": "https://adsgofastdatalakeaccelst.blob.core.windows.net", + "Type": "Azure Blob" + }, + "TaskGroupConcurrency": 10, + "TaskGroupPriority": 0, + "TaskInstanceId": 75, + "TaskMasterId": 12, + "TaskStatus": "Untried", + "TaskType": "SQL Database to Azure Storage" + }, + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Binary_AzureBlobFS_Binary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Binary_AzureBlobFS_Binary.json new file mode 100644 index 00000000..afd55f21 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Binary_AzureBlobFS_Binary.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Binary_AzureBlobFS_Binary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobFS_Binary_AzureBlobFS_Binary_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Binary_AzureBlobFS_Binary_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Binary_AzureBlobFS_Binary_Primary.json new file mode 100644 index 00000000..df9fddfd --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Binary_AzureBlobFS_Binary_Primary.json @@ -0,0 +1,223 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Binary_AzureBlobFS_Binary_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobFS Started", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "Directory": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.RelativePath" + }, + "File": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "FileSystem": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_Binary_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy AzureBlobFS to AzureBlobFS", + "outputs": [ + { + "parameters": { + "Directory": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.RelativePath" + }, + "File": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.DataFileName" + }, + "FileSystem": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_Binary_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "storeSettings": { + "copyBehavior": "PreserveHierarchy", + "type": "AzureBlobFSWriteSettings" + }, + "type": "BinarySink" + }, + "source": { + "storeSettings": { + "deleteFilesAfterCompletion": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DeleteAfterCompletion" + }, + "formatSettings": { + "type": "BinaryReadSettings" + }, + "recursive": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.Recursively" + }, + "type": "AzureBlobFSReadSettings", + "wildcardFileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + } + }, + "type": "BinarySource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobFS", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Copy AzureBlobFS to AzureBlobFS Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy AzureBlobFS to AzureBlobFS').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Copy AzureBlobFS to AzureBlobFS Started", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobFS", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Copy AzureBlobFS to AzureBlobFS Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy AzureBlobFS to AzureBlobFS').output.filesWritten), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-05T04:14:00Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Binary_AzureBlobStorage_Binary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Binary_AzureBlobStorage_Binary.json new file mode 100644 index 00000000..843a0ddf --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Binary_AzureBlobStorage_Binary.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Binary_AzureBlobStorage_Binary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobFS_Binary_AzureBlobStorage_Binary_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Binary_AzureBlobStorage_Binary_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Binary_AzureBlobStorage_Binary_Primary.json new file mode 100644 index 00000000..69a62d66 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Binary_AzureBlobStorage_Binary_Primary.json @@ -0,0 +1,223 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Binary_AzureBlobStorage_Binary_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobStorage Started", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "Directory": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.RelativePath" + }, + "File": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "FileSystem": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_Binary_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy AzureBlobFS to AzureBlobStorage", + "outputs": [ + { + "parameters": { + "Directory": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.RelativePath" + }, + "File": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.DataFileName" + }, + "FileSystem": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_Binary_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "storeSettings": { + "copyBehavior": "PreserveHierarchy", + "type": "AzureBlobStorageWriteSettings" + }, + "type": "BinarySink" + }, + "source": { + "storeSettings": { + "deleteFilesAfterCompletion": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DeleteAfterCompletion" + }, + "formatSettings": { + "type": "BinaryReadSettings" + }, + "recursive": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.Recursively" + }, + "type": "AzureBlobFSReadSettings", + "wildcardFileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + } + }, + "type": "BinarySource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobStorage", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Copy AzureBlobFS to AzureBlobStorage Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy AzureBlobFS to AzureBlobStorage').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Copy AzureBlobFS to AzureBlobStorage Started", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobStorage", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Copy AzureBlobFS to AzureBlobStorage Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy AzureBlobFS to AzureBlobStorage').output.filesWritten), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-05T04:14:00Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_DelimitedText_AzureBlobFS_Parquet.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_DelimitedText_AzureBlobFS_Parquet.json new file mode 100644 index 00000000..fd537022 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_DelimitedText_AzureBlobFS_Parquet.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_DelimitedText_AzureBlobFS_Parquet_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobFS_DelimitedText_AzureBlobFS_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_DelimitedText_AzureBlobFS_Parquet_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_DelimitedText_AzureBlobFS_Parquet_Primary.json new file mode 100644 index 00000000..8ad2eb7f --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_DelimitedText_AzureBlobFS_Parquet_Primary.json @@ -0,0 +1,233 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_DelimitedText_AzureBlobFS_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobFS Started", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "FirstRowAsHeader": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.FirstRowAsHeader" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.RelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy AzureBlobFS to AzureBlobFS", + "outputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.DataFileName" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.RelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "storeSettings": { + "copyBehavior": "PreserveHierarchy", + "type": "AzureBlobFSWriteSettings" + }, + "type": "ParquetSink" + }, + "source": { + "storeSettings": { + "enablePartitionDiscovery": false, + "formatSettings": { + "skipLineCount": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.SkipLineCount" + }, + "type": "DelimitedTextReadSettings" + }, + "maxConcurrentConnections": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.MaxConcorrentConnections" + }, + "recursive": true, + "type": "AzureBlobFSReadSettings", + "wildcardFileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "wildcardFolderPath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.Instance.SourceRelativePath" + } + }, + "type": "DelimitedTextSource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobFS", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Copy AzureBlobFS to AzureBlobFS Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy AzureBlobFS to AzureBlobFS').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Copy AzureBlobFS to AzureBlobFS Started", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobFS", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Copy AzureBlobFS to AzureBlobFS Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy AzureBlobFS to AzureBlobFS').output.filesWritten), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-05T04:14:00Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_DelimitedText_AzureBlobStorage_Parquet.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_DelimitedText_AzureBlobStorage_Parquet.json new file mode 100644 index 00000000..f36fe9dc --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_DelimitedText_AzureBlobStorage_Parquet.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_DelimitedText_AzureBlobStorage_Parquet_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobFS_DelimitedText_AzureBlobStorage_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_DelimitedText_AzureBlobStorage_Parquet_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_DelimitedText_AzureBlobStorage_Parquet_Primary.json new file mode 100644 index 00000000..46b2a1a8 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_DelimitedText_AzureBlobStorage_Parquet_Primary.json @@ -0,0 +1,233 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_DelimitedText_AzureBlobStorage_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobStorage Started", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "FirstRowAsHeader": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.FirstRowAsHeader" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.RelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy AzureBlobFS to AzureBlobStorage", + "outputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.DataFileName" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.RelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "storeSettings": { + "copyBehavior": "PreserveHierarchy", + "type": "AzureBlobStorageWriteSettings" + }, + "type": "ParquetSink" + }, + "source": { + "storeSettings": { + "enablePartitionDiscovery": false, + "formatSettings": { + "skipLineCount": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.SkipLineCount" + }, + "type": "DelimitedTextReadSettings" + }, + "maxConcurrentConnections": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.MaxConcorrentConnections" + }, + "recursive": true, + "type": "AzureBlobFSReadSettings", + "wildcardFileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "wildcardFolderPath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.Instance.SourceRelativePath" + } + }, + "type": "DelimitedTextSource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobStorage", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Copy AzureBlobFS to AzureBlobStorage Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy AzureBlobFS to AzureBlobStorage').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Copy AzureBlobFS to AzureBlobStorage Started", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobStorage", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Copy AzureBlobFS to AzureBlobStorage Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy AzureBlobFS to AzureBlobStorage').output.filesWritten), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-05T04:14:00Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_DelimitedText_AzureSqlTable_NA.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_DelimitedText_AzureSqlTable_NA.json new file mode 100644 index 00000000..33a0372a --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_DelimitedText_AzureSqlTable_NA.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_DelimitedText_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobFS_DelimitedText_AzureSqlTable_NA_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_DelimitedText_AzureSqlTable_NA_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_DelimitedText_AzureSqlTable_NA_Primary.json new file mode 100644 index 00000000..37fb3f3b --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_DelimitedText_AzureSqlTable_NA_Primary.json @@ -0,0 +1,338 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_DelimitedText_AzureSqlTable_NA_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Pipeline AF Log - ADLS to Azure SQL Start", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy to SQL\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Pipeline AF Log - ADLS to Azure SQL Start", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "description": "Auto Creates Table Using a Schema File", + "name": "If Auto Create Table", + "type": "IfCondition", + "typeProperties": { + "expression": { + "type": "Expression", + "value": "@and(not(equals(coalesce(pipeline().parameters.TaskObject.Source.SchemaFileName,''),'')),bool(pipeline().parameters.TaskObject.Target.AutoCreateTable))" + }, + "ifTrueActivities": [ + { + "dependsOn": [ ], + "name": "Execute Create Table", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_Create_Table_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Create Table", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Get Mapping", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(\n concat('{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"StorageAccountName\":\"', \n string(pipeline().parameters.TaskObject.Source.System.SystemServer), \n '\",\"StorageAccountContainer\":\"', \n string(pipeline().parameters.TaskObject.Source.System.Container), \n '\",\"RelativePath\":\"', \n string(pipeline().parameters.TaskObject.Source.Instance.SourceRelativePath), \n '\",\"SchemaFileName\":\"', \n string(pipeline().parameters.TaskObject.Source.SchemaFileName), \n '\",\"SourceType\":\"', \n if(\n contains(string(pipeline().parameters.TaskObject.Source.System.SystemServer),'.dfs.core.windows.net'),\n 'ADLS',\n 'Azure Blob'\n ), \n '\",\"TargetType\":\"', \n string(pipeline().parameters.TaskObject.Target.System.Type), \n '\",\"MetadataType\":\"Parquet\"}')\n)" + }, + "functionName": "GetSourceTargetMapping", + "method": "POST" + }, + "userProperties": [ ] + } + ] + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "If Auto Create Table", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "FirstRowAsHeader": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.FirstRowAsHeader" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.Instance.SourceRelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy to SQL", + "outputs": [ + { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Database" + }, + "Schema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.StagingTableSchema" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + }, + "Table": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.StagingTableName" + } + }, + "referenceName": "[concat('GDS_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "disableMetricsCollection": false, + "preCopyScript": { + "type": "Expression", + "value": "@{pipeline().parameters.TaskObject.Target.PreCopySQL}" + }, + "tableOption": "autoCreate", + "type": "AzureSqlSink" + }, + "source": { + "formatSettings": { + "skipLineCount": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.SkipLineCount" + }, + "type": "DelimitedTextReadSettings" + }, + "storeSettings": { + "enablePartitionDiscovery": false, + "maxConcurrentConnections": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.MaxConcorrentConnections" + }, + "recursive": true, + "type": "AzureBlobFSReadSettings", + "wildcardFileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "wildcardFolderPath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.Instance.SourceRelativePath" + } + }, + "type": "DelimitedTextSource" + }, + "translator": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.DynamicMapping" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy to SQL", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - ADLS to Azure SQL Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy to SQL\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy to SQL').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy to SQL", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - ADLS to Azure SQL Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy to SQL\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy to SQL').output.rowsCopied), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Pipeline AF Log - ADLS to Azure SQL Succeed", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Execute AZ_SQL_Post-Copy", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_Post_Copy_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-07-29T09:43:40Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Excel_AzureBlobFS_DelimitedText.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Excel_AzureBlobFS_DelimitedText.json new file mode 100644 index 00000000..aeed462e --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Excel_AzureBlobFS_DelimitedText.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Excel_AzureBlobFS_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobFS_Excel_AzureBlobFS_DelimitedText_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Excel_AzureBlobFS_DelimitedText_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Excel_AzureBlobFS_DelimitedText_Primary.json new file mode 100644 index 00000000..e1d5851d --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Excel_AzureBlobFS_DelimitedText_Primary.json @@ -0,0 +1,226 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Excel_AzureBlobFS_DelimitedText_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobFS Started", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "FirstRowAsHeader": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.FirstRowAsHeader" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.RelativePath" + }, + "SheetName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.SheetName" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_Excel_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy AzureBlobFS to AzureBlobFS", + "outputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.DataFileName" + }, + "FirstRowAsHeader": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.FirstRowAsHeader" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.RelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "storeSettings": { + "copyBehavior": "PreserveHierarchy", + "formatSettings": { + "fileExtension": ".txt", + "quoteAllText": true, + "type": "DelimitedTextWriteSettings" + }, + "type": "AzureBlobFSWriteSettings" + }, + "type": "DelimitedTextSink" + }, + "source": { + "storeSettings": { + "recursive": true, + "type": "AzureBlobFSReadSettings" + }, + "type": "ExcelSource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobFS", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Copy AzureBlobFS to AzureBlobFS Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy AzureBlobFS to AzureBlobFS').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Copy AzureBlobFS to AzureBlobFS Started", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobFS", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Copy AzureBlobFS to AzureBlobFS Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy AzureBlobFS to AzureBlobFS').output.filesWritten), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-05T04:14:00Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Excel_AzureBlobFS_Parquet.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Excel_AzureBlobFS_Parquet.json new file mode 100644 index 00000000..ccb0177c --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Excel_AzureBlobFS_Parquet.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Excel_AzureBlobFS_Parquet_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobFS_Excel_AzureBlobFS_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Excel_AzureBlobFS_Parquet_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Excel_AzureBlobFS_Parquet_Primary.json new file mode 100644 index 00000000..d1a384a0 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Excel_AzureBlobFS_Parquet_Primary.json @@ -0,0 +1,217 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Excel_AzureBlobFS_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobFS Started", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "FirstRowAsHeader": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.FirstRowAsHeader" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.RelativePath" + }, + "SheetName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.SheetName" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_Excel_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy AzureBlobFS to AzureBlobFS", + "outputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.DataFileName" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.RelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "storeSettings": { + "copyBehavior": "PreserveHierarchy", + "type": "AzureBlobFSWriteSettings" + }, + "type": "ParquetSink" + }, + "source": { + "storeSettings": { + "recursive": true, + "type": "AzureBlobFSReadSettings" + }, + "type": "ExcelSource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobFS", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Copy AzureBlobFS to AzureBlobFS Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy AzureBlobFS to AzureBlobFS').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Copy AzureBlobFS to AzureBlobFS Started", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobFS", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Copy AzureBlobFS to AzureBlobFS Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy AzureBlobFS to AzureBlobFS').output.filesWritten), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-05T04:14:00Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Excel_AzureBlobStorage_DelimitedText.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Excel_AzureBlobStorage_DelimitedText.json new file mode 100644 index 00000000..7c075177 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Excel_AzureBlobStorage_DelimitedText.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Excel_AzureBlobStorage_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobFS_Excel_AzureBlobStorage_DelimitedText_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Excel_AzureBlobStorage_DelimitedText_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Excel_AzureBlobStorage_DelimitedText_Primary.json new file mode 100644 index 00000000..f0e03d9a --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Excel_AzureBlobStorage_DelimitedText_Primary.json @@ -0,0 +1,226 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Excel_AzureBlobStorage_DelimitedText_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobStorage Started", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "FirstRowAsHeader": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.FirstRowAsHeader" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.RelativePath" + }, + "SheetName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.SheetName" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_Excel_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy AzureBlobFS to AzureBlobStorage", + "outputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.DataFileName" + }, + "FirstRowAsHeader": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.FirstRowAsHeader" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.RelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "storeSettings": { + "copyBehavior": "PreserveHierarchy", + "formatSettings": { + "fileExtension": ".txt", + "quoteAllText": true, + "type": "DelimitedTextWriteSettings" + }, + "type": "AzureBlobStorageWriteSettings" + }, + "type": "DelimitedTextSink" + }, + "source": { + "storeSettings": { + "recursive": true, + "type": "AzureBlobFSReadSettings" + }, + "type": "ExcelSource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobStorage", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Copy AzureBlobFS to AzureBlobStorage Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy AzureBlobFS to AzureBlobStorage').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Copy AzureBlobFS to AzureBlobStorage Started", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobStorage", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Copy AzureBlobFS to AzureBlobStorage Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy AzureBlobFS to AzureBlobStorage').output.filesWritten), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-05T04:14:00Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Excel_AzureBlobStorage_Parquet.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Excel_AzureBlobStorage_Parquet.json new file mode 100644 index 00000000..797e5b41 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Excel_AzureBlobStorage_Parquet.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Excel_AzureBlobStorage_Parquet_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobFS_Excel_AzureBlobStorage_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Excel_AzureBlobStorage_Parquet_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Excel_AzureBlobStorage_Parquet_Primary.json new file mode 100644 index 00000000..7f5f6fe6 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Excel_AzureBlobStorage_Parquet_Primary.json @@ -0,0 +1,217 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Excel_AzureBlobStorage_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobStorage Started", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "FirstRowAsHeader": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.FirstRowAsHeader" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.RelativePath" + }, + "SheetName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.SheetName" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_Excel_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy AzureBlobFS to AzureBlobStorage", + "outputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.DataFileName" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.RelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "storeSettings": { + "copyBehavior": "PreserveHierarchy", + "type": "AzureBlobStorageWriteSettings" + }, + "type": "ParquetSink" + }, + "source": { + "storeSettings": { + "recursive": true, + "type": "AzureBlobFSReadSettings" + }, + "type": "ExcelSource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobStorage", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Copy AzureBlobFS to AzureBlobStorage Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy AzureBlobFS to AzureBlobStorage').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Copy AzureBlobFS to AzureBlobStorage Started", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobStorage", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Copy AzureBlobFS to AzureBlobStorage Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy AzureBlobFS to AzureBlobStorage').output.filesWritten), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-05T04:14:00Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Excel_AzureSqlTable_NA.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Excel_AzureSqlTable_NA.json new file mode 100644 index 00000000..3bdb5e4b --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Excel_AzureSqlTable_NA.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Excel_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobFS_Excel_AzureSqlTable_NA_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Excel_AzureSqlTable_NA_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Excel_AzureSqlTable_NA_Primary.json new file mode 100644 index 00000000..a8db4019 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Excel_AzureSqlTable_NA_Primary.json @@ -0,0 +1,322 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Excel_AzureSqlTable_NA_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Pipeline AF Log - ADLS to Azure SQL Start", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy to SQL\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Pipeline AF Log - ADLS to Azure SQL Start", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "description": "Auto Creates Table Using a Schema File", + "name": "If Auto Create Table", + "type": "IfCondition", + "typeProperties": { + "expression": { + "type": "Expression", + "value": "@and(not(equals(coalesce(pipeline().parameters.TaskObject.Source.SchemaFileName,''),'')),bool(pipeline().parameters.TaskObject.Target.AutoCreateTable))" + }, + "ifTrueActivities": [ + { + "dependsOn": [ ], + "name": "Execute Create Table", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_Create_Table_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Create Table", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Get Mapping", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(\n concat('{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"StorageAccountName\":\"', \n string(pipeline().parameters.TaskObject.Source.System.SystemServer), \n '\",\"StorageAccountContainer\":\"', \n string(pipeline().parameters.TaskObject.Source.System.Container), \n '\",\"RelativePath\":\"', \n string(pipeline().parameters.TaskObject.Source.Instance.SourceRelativePath), \n '\",\"SchemaFileName\":\"', \n string(pipeline().parameters.TaskObject.Source.SchemaFileName), \n '\",\"SourceType\":\"', \n if(\n contains(string(pipeline().parameters.TaskObject.Source.System.SystemServer),'.dfs.core.windows.net'),\n 'ADLS',\n 'Azure Blob'\n ), \n '\",\"TargetType\":\"', \n string(pipeline().parameters.TaskObject.Target.System.Type), \n '\",\"MetadataType\":\"Parquet\"}')\n)" + }, + "functionName": "GetSourceTargetMapping", + "method": "POST" + }, + "userProperties": [ ] + } + ] + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "If Auto Create Table", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "FirstRowAsHeader": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.FirstRowAsHeader" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.Instance.SourceRelativePath" + }, + "SheetName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.SheetName" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_Excel_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy to SQL", + "outputs": [ + { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Database" + }, + "Schema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.StagingTableSchema" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + }, + "Table": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.StagingTableName" + } + }, + "referenceName": "[concat('GDS_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "disableMetricsCollection": false, + "preCopyScript": { + "type": "Expression", + "value": "@{pipeline().parameters.TaskObject.Target.PreCopySQL}" + }, + "tableOption": "autoCreate", + "type": "AzureSqlSink" + }, + "source": { + "storeSettings": { + "recursive": true, + "type": "AzureBlobFSReadSettings" + }, + "type": "ExcelSource" + }, + "translator": { + "type": "Expression", + "value": "@if(and(not(equals(coalesce(pipeline().parameters.TaskObject.Source.SchemaFileName,''),'')),bool(pipeline().parameters.TaskObject.Target.AutoCreateTable)),activity('AF Get Mapping').output.value, null)" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy to SQL", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - ADLS to Azure SQL Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy to SQL\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy to SQL').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy to SQL", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - ADLS to Azure SQL Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy to SQL\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy to SQL').output.rowsCopied), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Pipeline AF Log - ADLS to Azure SQL Succeed", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Execute AZ_SQL_Post-Copy", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_Post_Copy_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-07-29T09:43:40Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Json_AzureSqlTable_NA.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Json_AzureSqlTable_NA.json new file mode 100644 index 00000000..4e2b95c7 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Json_AzureSqlTable_NA.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Json_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobFS_Json_AzureSqlTable_NA_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Json_AzureSqlTable_NA_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Json_AzureSqlTable_NA_Primary.json new file mode 100644 index 00000000..bdf06492 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Json_AzureSqlTable_NA_Primary.json @@ -0,0 +1,316 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Json_AzureSqlTable_NA_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Pipeline AF Log - ADLS to Azure SQL Start", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy to SQL\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Pipeline AF Log - ADLS to Azure SQL Start", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "description": "Auto Creates Table Using a Schema File", + "name": "If Auto Create Table", + "type": "IfCondition", + "typeProperties": { + "expression": { + "type": "Expression", + "value": "@and(not(equals(coalesce(pipeline().parameters.TaskObject.Source.SchemaFileName,''),'')),bool(pipeline().parameters.TaskObject.Target.AutoCreateTable))" + }, + "ifTrueActivities": [ + { + "dependsOn": [ ], + "name": "Execute Create Table", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_Create_Table_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Create Table", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Get Mapping", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(\n concat('{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"StorageAccountName\":\"', \n string(pipeline().parameters.TaskObject.Source.System.SystemServer), \n '\",\"StorageAccountContainer\":\"', \n string(pipeline().parameters.TaskObject.Source.System.Container), \n '\",\"RelativePath\":\"', \n string(pipeline().parameters.TaskObject.Source.Instance.SourceRelativePath), \n '\",\"SchemaFileName\":\"', \n string(pipeline().parameters.TaskObject.Source.SchemaFileName), \n '\",\"SourceType\":\"', \n if(\n contains(string(pipeline().parameters.TaskObject.Source.System.SystemServer),'.dfs.core.windows.net'),\n 'ADLS',\n 'Azure Blob'\n ), \n '\",\"TargetType\":\"', \n string(pipeline().parameters.TaskObject.Target.System.Type), \n '\",\"MetadataType\":\"Parquet\"}')\n)" + }, + "functionName": "GetSourceTargetMapping", + "method": "POST" + }, + "userProperties": [ ] + } + ] + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "If Auto Create Table", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.Instance.SourceRelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_Json_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy to SQL", + "outputs": [ + { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Database" + }, + "Schema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.StagingTableSchema" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + }, + "Table": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.StagingTableName" + } + }, + "referenceName": "[concat('GDS_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "disableMetricsCollection": false, + "preCopyScript": { + "type": "Expression", + "value": "@{pipeline().parameters.TaskObject.Target.PreCopySQL}" + }, + "type": "AzureSqlSink" + }, + "source": { + "formatSettings": { + "type": "JsonReadSettings" + }, + "storeSettings": { + "recursive": true, + "type": "AzureBlobFSReadSettings" + }, + "type": "JsonSource" + }, + "translator": { + "type": "Expression", + "value": "@if(and(not(equals(coalesce(pipeline().parameters.TaskObject.Source.SchemaFileName,''),'')),bool(pipeline().parameters.TaskObject.Target.AutoCreateTable)),activity('AF Get Mapping').output.value, null)" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy to SQL", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - ADLS to Azure SQL Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy to SQL\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy to SQL').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy to SQL", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - ADLS to Azure SQL Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy to SQL\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy to SQL').output.rowsCopied), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Pipeline AF Log - ADLS to Azure SQL Succeed", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Execute AZ_SQL_Post-Copy", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_Post_Copy_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-07-29T09:43:40Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Parquet_AzureBlobFS_DelimitedText.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Parquet_AzureBlobFS_DelimitedText.json new file mode 100644 index 00000000..2b6fcbd4 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Parquet_AzureBlobFS_DelimitedText.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Parquet_AzureBlobFS_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobFS_Parquet_AzureBlobFS_DelimitedText_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Parquet_AzureBlobFS_DelimitedText_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Parquet_AzureBlobFS_DelimitedText_Primary.json new file mode 100644 index 00000000..22fdbf40 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Parquet_AzureBlobFS_DelimitedText_Primary.json @@ -0,0 +1,218 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Parquet_AzureBlobFS_DelimitedText_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobFS Started", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.RelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy AzureBlobFS to AzureBlobFS", + "outputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.DataFileName" + }, + "FirstRowAsHeader": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.FirstRowAsHeader" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.RelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "storeSettings": { + "copyBehavior": "PreserveHierarchy", + "formatSettings": { + "fileExtension": ".txt", + "quoteAllText": true, + "type": "DelimitedTextWriteSettings" + }, + "type": "AzureBlobFSWriteSettings" + }, + "type": "DelimitedTextSink" + }, + "source": { + "storeSettings": { + "recursive": true, + "type": "AzureBlobFSReadSettings" + }, + "type": "ParquetSource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobFS", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Copy AzureBlobFS to AzureBlobFS Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy AzureBlobFS to AzureBlobFS').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Copy AzureBlobFS to AzureBlobFS Started", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobFS", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Copy AzureBlobFS to AzureBlobFS Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy AzureBlobFS to AzureBlobFS').output.filesWritten), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-05T04:14:00Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Parquet_AzureBlobStorage_DelimitedText.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Parquet_AzureBlobStorage_DelimitedText.json new file mode 100644 index 00000000..2119ff26 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Parquet_AzureBlobStorage_DelimitedText.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Parquet_AzureBlobStorage_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobFS_Parquet_AzureBlobStorage_DelimitedText_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Parquet_AzureBlobStorage_DelimitedText_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Parquet_AzureBlobStorage_DelimitedText_Primary.json new file mode 100644 index 00000000..c8eb6d88 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Parquet_AzureBlobStorage_DelimitedText_Primary.json @@ -0,0 +1,218 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Parquet_AzureBlobStorage_DelimitedText_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobStorage Started", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.RelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy AzureBlobFS to AzureBlobStorage", + "outputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.DataFileName" + }, + "FirstRowAsHeader": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.FirstRowAsHeader" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.RelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "storeSettings": { + "copyBehavior": "PreserveHierarchy", + "formatSettings": { + "fileExtension": ".txt", + "quoteAllText": true, + "type": "DelimitedTextWriteSettings" + }, + "type": "AzureBlobStorageWriteSettings" + }, + "type": "DelimitedTextSink" + }, + "source": { + "storeSettings": { + "recursive": true, + "type": "AzureBlobFSReadSettings" + }, + "type": "ParquetSource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobStorage", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Copy AzureBlobFS to AzureBlobStorage Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy AzureBlobFS to AzureBlobStorage').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Copy AzureBlobFS to AzureBlobStorage Started", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobStorage", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Copy AzureBlobFS to AzureBlobStorage Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy AzureBlobFS to AzureBlobStorage').output.filesWritten), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-05T04:14:00Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Parquet_AzureSqlTable_NA.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Parquet_AzureSqlTable_NA.json new file mode 100644 index 00000000..a2fdce05 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Parquet_AzureSqlTable_NA.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Parquet_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobFS_Parquet_AzureSqlTable_NA_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Parquet_AzureSqlTable_NA_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Parquet_AzureSqlTable_NA_Primary.json new file mode 100644 index 00000000..ce1fddad --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobFS_Parquet_AzureSqlTable_NA_Primary.json @@ -0,0 +1,326 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Parquet_AzureSqlTable_NA_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Pipeline AF Log - ADLS to Azure SQL Start", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy to SQL\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Pipeline AF Log - ADLS to Azure SQL Start", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "description": "Auto Creates Table Using a Schema File", + "name": "If Auto Create Table", + "type": "IfCondition", + "typeProperties": { + "expression": { + "type": "Expression", + "value": "@and(not(equals(coalesce(pipeline().parameters.TaskObject.Source.SchemaFileName,''),'')),bool(pipeline().parameters.TaskObject.Target.AutoCreateTable))" + }, + "ifTrueActivities": [ + { + "dependsOn": [ ], + "name": "Execute Create Table", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_Create_Table_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Create Table", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Get Mapping", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(\n concat('{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"StorageAccountName\":\"', \n string(pipeline().parameters.TaskObject.Source.System.SystemServer), \n '\",\"StorageAccountContainer\":\"', \n string(pipeline().parameters.TaskObject.Source.System.Container), \n '\",\"RelativePath\":\"', \n string(pipeline().parameters.TaskObject.Source.Instance.SourceRelativePath), \n '\",\"SchemaFileName\":\"', \n string(pipeline().parameters.TaskObject.Source.SchemaFileName), \n '\",\"SourceType\":\"', \n if(\n contains(string(pipeline().parameters.TaskObject.Source.System.SystemServer),'.dfs.core.windows.net'),\n 'ADLS',\n 'Azure Blob'\n ), \n '\",\"TargetType\":\"', \n string(pipeline().parameters.TaskObject.Target.System.Type), \n '\",\"MetadataType\":\"Parquet\"}')\n)" + }, + "functionName": "GetSourceTargetMapping", + "method": "POST" + }, + "userProperties": [ ] + } + ] + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "If Auto Create Table", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.Instance.SourceRelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy to SQL", + "outputs": [ + { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Database" + }, + "Schema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.StagingTableSchema" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + }, + "Table": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.StagingTableName" + } + }, + "referenceName": "[concat('GDS_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "disableMetricsCollection": false, + "preCopyScript": { + "type": "Expression", + "value": "@{pipeline().parameters.TaskObject.Target.PreCopySQL}" + }, + "tableOption": "autoCreate", + "type": "AzureSqlSink" + }, + "source": { + "formatSettings": { + "type": "AzureBlobFSReadSettings" + }, + "storeSettings": { + "enablePartitionDiscovery": false, + "recursive": false, + "type": "AzureBlobFSReadSettings", + "wildcardFileName": { + "type": "Expression", + "value": "@concat(\n replace(\n pipeline().parameters.TaskObject.Source.DataFileName,\n '.parquet',\n ''\n ),\n '*.parquet'\n)" + }, + "wildcardFolderPath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.Instance.SourceRelativePath" + } + }, + "type": "ParquetSource" + }, + "translator": { + "type": "Expression", + "value": "@if(and(not(equals(coalesce(pipeline().parameters.TaskObject.Source.SchemaFileName,''),'')),bool(pipeline().parameters.TaskObject.Target.AutoCreateTable)),activity('AF Get Mapping').output.value, null)" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy to SQL", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - ADLS to Azure SQL Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy to SQL\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy to SQL').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy to SQL", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - ADLS to Azure SQL Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy to SQL\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy to SQL').output.rowsCopied), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Pipeline AF Log - ADLS to Azure SQL Succeed", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Execute AZ_SQL_Post-Copy", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_Post_Copy_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-07-29T09:43:40Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Binary_AzureBlobFS_Binary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Binary_AzureBlobFS_Binary.json new file mode 100644 index 00000000..dff71093 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Binary_AzureBlobFS_Binary.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Binary_AzureBlobFS_Binary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobStorage_Binary_AzureBlobFS_Binary_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Binary_AzureBlobFS_Binary_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Binary_AzureBlobFS_Binary_Primary.json new file mode 100644 index 00000000..b2f6cd33 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Binary_AzureBlobFS_Binary_Primary.json @@ -0,0 +1,223 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Binary_AzureBlobFS_Binary_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobFS Started", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "Directory": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.RelativePath" + }, + "File": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "FileSystem": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_Binary_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy AzureBlobStorage to AzureBlobFS", + "outputs": [ + { + "parameters": { + "Directory": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.RelativePath" + }, + "File": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.DataFileName" + }, + "FileSystem": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_Binary_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "storeSettings": { + "copyBehavior": "PreserveHierarchy", + "type": "AzureBlobFSWriteSettings" + }, + "type": "BinarySink" + }, + "source": { + "storeSettings": { + "deleteFilesAfterCompletion": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DeleteAfterCompletion" + }, + "formatSettings": { + "type": "BinaryReadSettings" + }, + "recursive": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.Recursively" + }, + "type": "AzureBlobStorageReadSettings", + "wildcardFileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + } + }, + "type": "BinarySource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobFS", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Copy AzureBlobStorage to AzureBlobFS Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy AzureBlobStorage to AzureBlobFS').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Copy AzureBlobStorage to AzureBlobFS Started", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobFS", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Copy AzureBlobStorage to AzureBlobFS Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy AzureBlobStorage to AzureBlobFS').output.filesWritten), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-05T04:14:00Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Binary_AzureBlobStorage_Binary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Binary_AzureBlobStorage_Binary.json new file mode 100644 index 00000000..d320aea5 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Binary_AzureBlobStorage_Binary.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Binary_AzureBlobStorage_Binary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobStorage_Binary_AzureBlobStorage_Binary_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Binary_AzureBlobStorage_Binary_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Binary_AzureBlobStorage_Binary_Primary.json new file mode 100644 index 00000000..196abb3e --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Binary_AzureBlobStorage_Binary_Primary.json @@ -0,0 +1,223 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Binary_AzureBlobStorage_Binary_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobStorage Started", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "Directory": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.RelativePath" + }, + "File": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "FileSystem": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_Binary_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy AzureBlobStorage to AzureBlobStorage", + "outputs": [ + { + "parameters": { + "Directory": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.RelativePath" + }, + "File": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.DataFileName" + }, + "FileSystem": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_Binary_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "storeSettings": { + "copyBehavior": "PreserveHierarchy", + "type": "AzureBlobStorageWriteSettings" + }, + "type": "BinarySink" + }, + "source": { + "storeSettings": { + "deleteFilesAfterCompletion": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DeleteAfterCompletion" + }, + "formatSettings": { + "type": "BinaryReadSettings" + }, + "recursive": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.Recursively" + }, + "type": "AzureBlobStorageReadSettings", + "wildcardFileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + } + }, + "type": "BinarySource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobStorage", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Copy AzureBlobStorage to AzureBlobStorage Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy AzureBlobStorage to AzureBlobStorage').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Copy AzureBlobStorage to AzureBlobStorage Started", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobStorage", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Copy AzureBlobStorage to AzureBlobStorage Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy AzureBlobStorage to AzureBlobStorage').output.filesWritten), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-05T04:14:00Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_DelimitedText_AzureBlobFS_Parquet.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_DelimitedText_AzureBlobFS_Parquet.json new file mode 100644 index 00000000..0b77f6a1 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_DelimitedText_AzureBlobFS_Parquet.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_DelimitedText_AzureBlobFS_Parquet_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobStorage_DelimitedText_AzureBlobFS_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_DelimitedText_AzureBlobFS_Parquet_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_DelimitedText_AzureBlobFS_Parquet_Primary.json new file mode 100644 index 00000000..f7716074 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_DelimitedText_AzureBlobFS_Parquet_Primary.json @@ -0,0 +1,233 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_DelimitedText_AzureBlobFS_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobFS Started", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "FirstRowAsHeader": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.FirstRowAsHeader" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.RelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy AzureBlobStorage to AzureBlobFS", + "outputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.DataFileName" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.RelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "storeSettings": { + "copyBehavior": "PreserveHierarchy", + "type": "AzureBlobFSWriteSettings" + }, + "type": "ParquetSink" + }, + "source": { + "storeSettings": { + "enablePartitionDiscovery": false, + "formatSettings": { + "skipLineCount": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.SkipLineCount" + }, + "type": "DelimitedTextReadSettings" + }, + "maxConcurrentConnections": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.MaxConcorrentConnections" + }, + "recursive": true, + "type": "AzureBlobStorageReadSettings", + "wildcardFileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "wildcardFolderPath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.Instance.SourceRelativePath" + } + }, + "type": "DelimitedTextSource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobFS", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Copy AzureBlobStorage to AzureBlobFS Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy AzureBlobStorage to AzureBlobFS').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Copy AzureBlobStorage to AzureBlobFS Started", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobFS", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Copy AzureBlobStorage to AzureBlobFS Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy AzureBlobStorage to AzureBlobFS').output.filesWritten), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-05T04:14:00Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_DelimitedText_AzureBlobStorage_Parquet.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_DelimitedText_AzureBlobStorage_Parquet.json new file mode 100644 index 00000000..c828662b --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_DelimitedText_AzureBlobStorage_Parquet.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_DelimitedText_AzureBlobStorage_Parquet_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobStorage_DelimitedText_AzureBlobStorage_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_DelimitedText_AzureBlobStorage_Parquet_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_DelimitedText_AzureBlobStorage_Parquet_Primary.json new file mode 100644 index 00000000..b4ee8501 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_DelimitedText_AzureBlobStorage_Parquet_Primary.json @@ -0,0 +1,233 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_DelimitedText_AzureBlobStorage_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobStorage Started", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "FirstRowAsHeader": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.FirstRowAsHeader" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.RelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy AzureBlobStorage to AzureBlobStorage", + "outputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.DataFileName" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.RelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "storeSettings": { + "copyBehavior": "PreserveHierarchy", + "type": "AzureBlobStorageWriteSettings" + }, + "type": "ParquetSink" + }, + "source": { + "storeSettings": { + "enablePartitionDiscovery": false, + "formatSettings": { + "skipLineCount": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.SkipLineCount" + }, + "type": "DelimitedTextReadSettings" + }, + "maxConcurrentConnections": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.MaxConcorrentConnections" + }, + "recursive": true, + "type": "AzureBlobStorageReadSettings", + "wildcardFileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "wildcardFolderPath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.Instance.SourceRelativePath" + } + }, + "type": "DelimitedTextSource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobStorage", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Copy AzureBlobStorage to AzureBlobStorage Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy AzureBlobStorage to AzureBlobStorage').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Copy AzureBlobStorage to AzureBlobStorage Started", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobStorage", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Copy AzureBlobStorage to AzureBlobStorage Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy AzureBlobStorage to AzureBlobStorage').output.filesWritten), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-05T04:14:00Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_DelimitedText_AzureSqlTable_NA.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_DelimitedText_AzureSqlTable_NA.json new file mode 100644 index 00000000..c8babcaf --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_DelimitedText_AzureSqlTable_NA.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_DelimitedText_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobStorage_DelimitedText_AzureSqlTable_NA_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_DelimitedText_AzureSqlTable_NA_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_DelimitedText_AzureSqlTable_NA_Primary.json new file mode 100644 index 00000000..339bcefc --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_DelimitedText_AzureSqlTable_NA_Primary.json @@ -0,0 +1,338 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_DelimitedText_AzureSqlTable_NA_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Pipeline AF Log - ADLS to Azure SQL Start", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy to SQL\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Pipeline AF Log - ADLS to Azure SQL Start", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "description": "Auto Creates Table Using a Schema File", + "name": "If Auto Create Table", + "type": "IfCondition", + "typeProperties": { + "expression": { + "type": "Expression", + "value": "@and(not(equals(coalesce(pipeline().parameters.TaskObject.Source.SchemaFileName,''),'')),bool(pipeline().parameters.TaskObject.Target.AutoCreateTable))" + }, + "ifTrueActivities": [ + { + "dependsOn": [ ], + "name": "Execute Create Table", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_Create_Table_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Create Table", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Get Mapping", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(\n concat('{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"StorageAccountName\":\"', \n string(pipeline().parameters.TaskObject.Source.System.SystemServer), \n '\",\"StorageAccountContainer\":\"', \n string(pipeline().parameters.TaskObject.Source.System.Container), \n '\",\"RelativePath\":\"', \n string(pipeline().parameters.TaskObject.Source.Instance.SourceRelativePath), \n '\",\"SchemaFileName\":\"', \n string(pipeline().parameters.TaskObject.Source.SchemaFileName), \n '\",\"SourceType\":\"', \n if(\n contains(string(pipeline().parameters.TaskObject.Source.System.SystemServer),'.dfs.core.windows.net'),\n 'ADLS',\n 'Azure Blob'\n ), \n '\",\"TargetType\":\"', \n string(pipeline().parameters.TaskObject.Target.System.Type), \n '\",\"MetadataType\":\"Parquet\"}')\n)" + }, + "functionName": "GetSourceTargetMapping", + "method": "POST" + }, + "userProperties": [ ] + } + ] + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "If Auto Create Table", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "FirstRowAsHeader": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.FirstRowAsHeader" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.Instance.SourceRelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy to SQL", + "outputs": [ + { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Database" + }, + "Schema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.StagingTableSchema" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + }, + "Table": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.StagingTableName" + } + }, + "referenceName": "[concat('GDS_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "disableMetricsCollection": false, + "preCopyScript": { + "type": "Expression", + "value": "@{pipeline().parameters.TaskObject.Target.PreCopySQL}" + }, + "tableOption": "autoCreate", + "type": "AzureSqlSink" + }, + "source": { + "formatSettings": { + "skipLineCount": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.SkipLineCount" + }, + "type": "DelimitedTextReadSettings" + }, + "storeSettings": { + "enablePartitionDiscovery": false, + "maxConcurrentConnections": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.MaxConcorrentConnections" + }, + "recursive": true, + "type": "AzureBlobStorageReadSettings", + "wildcardFileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "wildcardFolderPath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.Instance.SourceRelativePath" + } + }, + "type": "DelimitedTextSource" + }, + "translator": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.DynamicMapping" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy to SQL", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - ADLS to Azure SQL Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy to SQL\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy to SQL').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy to SQL", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - ADLS to Azure SQL Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy to SQL\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy to SQL').output.rowsCopied), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Pipeline AF Log - ADLS to Azure SQL Succeed", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Execute AZ_SQL_Post-Copy", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_Post_Copy_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-07-29T09:43:40Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Excel_AzureBlobFS_DelimitedText.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Excel_AzureBlobFS_DelimitedText.json new file mode 100644 index 00000000..82f6a770 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Excel_AzureBlobFS_DelimitedText.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Excel_AzureBlobFS_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobStorage_Excel_AzureBlobFS_DelimitedText_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Excel_AzureBlobFS_DelimitedText_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Excel_AzureBlobFS_DelimitedText_Primary.json new file mode 100644 index 00000000..9dd18d2f --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Excel_AzureBlobFS_DelimitedText_Primary.json @@ -0,0 +1,226 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Excel_AzureBlobFS_DelimitedText_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobFS Started", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "FirstRowAsHeader": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.FirstRowAsHeader" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.RelativePath" + }, + "SheetName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.SheetName" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_Excel_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy AzureBlobStorage to AzureBlobFS", + "outputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.DataFileName" + }, + "FirstRowAsHeader": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.FirstRowAsHeader" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.RelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "storeSettings": { + "copyBehavior": "PreserveHierarchy", + "formatSettings": { + "fileExtension": ".txt", + "quoteAllText": true, + "type": "DelimitedTextWriteSettings" + }, + "type": "AzureBlobFSWriteSettings" + }, + "type": "DelimitedTextSink" + }, + "source": { + "storeSettings": { + "recursive": true, + "type": "AzureBlobStorageReadSettings" + }, + "type": "ExcelSource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobFS", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Copy AzureBlobStorage to AzureBlobFS Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy AzureBlobStorage to AzureBlobFS').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Copy AzureBlobStorage to AzureBlobFS Started", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobFS", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Copy AzureBlobStorage to AzureBlobFS Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy AzureBlobStorage to AzureBlobFS').output.filesWritten), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-05T04:14:00Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Excel_AzureBlobFS_Parquet.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Excel_AzureBlobFS_Parquet.json new file mode 100644 index 00000000..e9d89967 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Excel_AzureBlobFS_Parquet.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Excel_AzureBlobFS_Parquet_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobStorage_Excel_AzureBlobFS_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Excel_AzureBlobFS_Parquet_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Excel_AzureBlobFS_Parquet_Primary.json new file mode 100644 index 00000000..92456950 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Excel_AzureBlobFS_Parquet_Primary.json @@ -0,0 +1,217 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Excel_AzureBlobFS_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobFS Started", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "FirstRowAsHeader": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.FirstRowAsHeader" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.RelativePath" + }, + "SheetName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.SheetName" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_Excel_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy AzureBlobStorage to AzureBlobFS", + "outputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.DataFileName" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.RelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "storeSettings": { + "copyBehavior": "PreserveHierarchy", + "type": "AzureBlobFSWriteSettings" + }, + "type": "ParquetSink" + }, + "source": { + "storeSettings": { + "recursive": true, + "type": "AzureBlobStorageReadSettings" + }, + "type": "ExcelSource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobFS", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Copy AzureBlobStorage to AzureBlobFS Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy AzureBlobStorage to AzureBlobFS').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Copy AzureBlobStorage to AzureBlobFS Started", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobFS", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Copy AzureBlobStorage to AzureBlobFS Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy AzureBlobStorage to AzureBlobFS').output.filesWritten), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-05T04:14:00Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Excel_AzureBlobStorage_DelimitedText.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Excel_AzureBlobStorage_DelimitedText.json new file mode 100644 index 00000000..59170429 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Excel_AzureBlobStorage_DelimitedText.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Excel_AzureBlobStorage_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobStorage_Excel_AzureBlobStorage_DelimitedText_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Excel_AzureBlobStorage_DelimitedText_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Excel_AzureBlobStorage_DelimitedText_Primary.json new file mode 100644 index 00000000..1cb0e972 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Excel_AzureBlobStorage_DelimitedText_Primary.json @@ -0,0 +1,226 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Excel_AzureBlobStorage_DelimitedText_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobStorage Started", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "FirstRowAsHeader": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.FirstRowAsHeader" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.RelativePath" + }, + "SheetName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.SheetName" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_Excel_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy AzureBlobStorage to AzureBlobStorage", + "outputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.DataFileName" + }, + "FirstRowAsHeader": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.FirstRowAsHeader" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.RelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "storeSettings": { + "copyBehavior": "PreserveHierarchy", + "formatSettings": { + "fileExtension": ".txt", + "quoteAllText": true, + "type": "DelimitedTextWriteSettings" + }, + "type": "AzureBlobStorageWriteSettings" + }, + "type": "DelimitedTextSink" + }, + "source": { + "storeSettings": { + "recursive": true, + "type": "AzureBlobStorageReadSettings" + }, + "type": "ExcelSource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobStorage", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Copy AzureBlobStorage to AzureBlobStorage Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy AzureBlobStorage to AzureBlobStorage').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Copy AzureBlobStorage to AzureBlobStorage Started", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobStorage", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Copy AzureBlobStorage to AzureBlobStorage Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy AzureBlobStorage to AzureBlobStorage').output.filesWritten), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-05T04:14:00Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Excel_AzureBlobStorage_Parquet.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Excel_AzureBlobStorage_Parquet.json new file mode 100644 index 00000000..50677e09 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Excel_AzureBlobStorage_Parquet.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Excel_AzureBlobStorage_Parquet_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobStorage_Excel_AzureBlobStorage_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Excel_AzureBlobStorage_Parquet_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Excel_AzureBlobStorage_Parquet_Primary.json new file mode 100644 index 00000000..5290a3ea --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Excel_AzureBlobStorage_Parquet_Primary.json @@ -0,0 +1,217 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Excel_AzureBlobStorage_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobStorage Started", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "FirstRowAsHeader": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.FirstRowAsHeader" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.RelativePath" + }, + "SheetName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.SheetName" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_Excel_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy AzureBlobStorage to AzureBlobStorage", + "outputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.DataFileName" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.RelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "storeSettings": { + "copyBehavior": "PreserveHierarchy", + "type": "AzureBlobStorageWriteSettings" + }, + "type": "ParquetSink" + }, + "source": { + "storeSettings": { + "recursive": true, + "type": "AzureBlobStorageReadSettings" + }, + "type": "ExcelSource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobStorage", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Copy AzureBlobStorage to AzureBlobStorage Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy AzureBlobStorage to AzureBlobStorage').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Copy AzureBlobStorage to AzureBlobStorage Started", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobStorage", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Copy AzureBlobStorage to AzureBlobStorage Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy AzureBlobStorage to AzureBlobStorage').output.filesWritten), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-05T04:14:00Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Excel_AzureSqlTable_NA.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Excel_AzureSqlTable_NA.json new file mode 100644 index 00000000..1a439be0 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Excel_AzureSqlTable_NA.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Excel_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobStorage_Excel_AzureSqlTable_NA_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Excel_AzureSqlTable_NA_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Excel_AzureSqlTable_NA_Primary.json new file mode 100644 index 00000000..a860edb5 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Excel_AzureSqlTable_NA_Primary.json @@ -0,0 +1,322 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Excel_AzureSqlTable_NA_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Pipeline AF Log - ADLS to Azure SQL Start", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy to SQL\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Pipeline AF Log - ADLS to Azure SQL Start", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "description": "Auto Creates Table Using a Schema File", + "name": "If Auto Create Table", + "type": "IfCondition", + "typeProperties": { + "expression": { + "type": "Expression", + "value": "@and(not(equals(coalesce(pipeline().parameters.TaskObject.Source.SchemaFileName,''),'')),bool(pipeline().parameters.TaskObject.Target.AutoCreateTable))" + }, + "ifTrueActivities": [ + { + "dependsOn": [ ], + "name": "Execute Create Table", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_Create_Table_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Create Table", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Get Mapping", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(\n concat('{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"StorageAccountName\":\"', \n string(pipeline().parameters.TaskObject.Source.System.SystemServer), \n '\",\"StorageAccountContainer\":\"', \n string(pipeline().parameters.TaskObject.Source.System.Container), \n '\",\"RelativePath\":\"', \n string(pipeline().parameters.TaskObject.Source.Instance.SourceRelativePath), \n '\",\"SchemaFileName\":\"', \n string(pipeline().parameters.TaskObject.Source.SchemaFileName), \n '\",\"SourceType\":\"', \n if(\n contains(string(pipeline().parameters.TaskObject.Source.System.SystemServer),'.dfs.core.windows.net'),\n 'ADLS',\n 'Azure Blob'\n ), \n '\",\"TargetType\":\"', \n string(pipeline().parameters.TaskObject.Target.System.Type), \n '\",\"MetadataType\":\"Parquet\"}')\n)" + }, + "functionName": "GetSourceTargetMapping", + "method": "POST" + }, + "userProperties": [ ] + } + ] + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "If Auto Create Table", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "FirstRowAsHeader": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.FirstRowAsHeader" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.Instance.SourceRelativePath" + }, + "SheetName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.SheetName" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_Excel_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy to SQL", + "outputs": [ + { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Database" + }, + "Schema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.StagingTableSchema" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + }, + "Table": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.StagingTableName" + } + }, + "referenceName": "[concat('GDS_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "disableMetricsCollection": false, + "preCopyScript": { + "type": "Expression", + "value": "@{pipeline().parameters.TaskObject.Target.PreCopySQL}" + }, + "tableOption": "autoCreate", + "type": "AzureSqlSink" + }, + "source": { + "storeSettings": { + "recursive": true, + "type": "AzureBlobStorageReadSettings" + }, + "type": "ExcelSource" + }, + "translator": { + "type": "Expression", + "value": "@if(and(not(equals(coalesce(pipeline().parameters.TaskObject.Source.SchemaFileName,''),'')),bool(pipeline().parameters.TaskObject.Target.AutoCreateTable)),activity('AF Get Mapping').output.value, null)" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy to SQL", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - ADLS to Azure SQL Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy to SQL\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy to SQL').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy to SQL", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - ADLS to Azure SQL Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy to SQL\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy to SQL').output.rowsCopied), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Pipeline AF Log - ADLS to Azure SQL Succeed", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Execute AZ_SQL_Post-Copy", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_Post_Copy_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-07-29T09:43:40Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Json_AzureSqlTable_NA.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Json_AzureSqlTable_NA.json new file mode 100644 index 00000000..e8ee0d9f --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Json_AzureSqlTable_NA.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Json_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobStorage_Json_AzureSqlTable_NA_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Json_AzureSqlTable_NA_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Json_AzureSqlTable_NA_Primary.json new file mode 100644 index 00000000..c5745a8a --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Json_AzureSqlTable_NA_Primary.json @@ -0,0 +1,317 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Json_AzureSqlTable_NA_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Pipeline AF Log - ADLS to Azure SQL Start", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy to SQL\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Pipeline AF Log - ADLS to Azure SQL Start", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "description": "Auto Creates Table Using a Schema File", + "name": "If Auto Create Table", + "type": "IfCondition", + "typeProperties": { + "expression": { + "type": "Expression", + "value": "@and(not(equals(coalesce(pipeline().parameters.TaskObject.Source.SchemaFileName,''),'')),bool(pipeline().parameters.TaskObject.Target.AutoCreateTable))" + }, + "ifTrueActivities": [ + { + "dependsOn": [ ], + "name": "Execute Create Table", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_Create_Table_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Create Table", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Get Mapping", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(\n concat('{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"StorageAccountName\":\"', \n string(pipeline().parameters.TaskObject.Source.System.SystemServer), \n '\",\"StorageAccountContainer\":\"', \n string(pipeline().parameters.TaskObject.Source.System.Container), \n '\",\"RelativePath\":\"', \n string(pipeline().parameters.TaskObject.Source.Instance.SourceRelativePath), \n '\",\"SchemaFileName\":\"', \n string(pipeline().parameters.TaskObject.Source.SchemaFileName), \n '\",\"SourceType\":\"', \n if(\n contains(string(pipeline().parameters.TaskObject.Source.System.SystemServer),'.dfs.core.windows.net'),\n 'ADLS',\n 'Azure Blob'\n ), \n '\",\"TargetType\":\"', \n string(pipeline().parameters.TaskObject.Target.System.Type), \n '\",\"MetadataType\":\"Parquet\"}')\n)" + }, + "functionName": "GetSourceTargetMapping", + "method": "POST" + }, + "userProperties": [ ] + } + ] + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "If Auto Create Table", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.Instance.SourceRelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_Json_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy to SQL", + "outputs": [ + { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Database" + }, + "Schema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.StagingTableSchema" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + }, + "Table": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.StagingTableName" + } + }, + "referenceName": "[concat('GDS_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "disableMetricsCollection": false, + "preCopyScript": { + "type": "Expression", + "value": "@{pipeline().parameters.TaskObject.Target.PreCopySQL}" + }, + "tableOption": "autoCreate", + "type": "AzureSqlSink" + }, + "source": { + "formatSettings": { + "type": "JsonReadSettings" + }, + "storeSettings": { + "recursive": true, + "type": "AzureBlobStorageReadSettings" + }, + "type": "JsonSource" + }, + "translator": { + "type": "Expression", + "value": "@if(and(not(equals(coalesce(pipeline().parameters.TaskObject.Source.SchemaFileName,''),'')),bool(pipeline().parameters.TaskObject.Target.AutoCreateTable)),activity('AF Get Mapping').output.value, null)" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy to SQL", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - ADLS to Azure SQL Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy to SQL\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy to SQL').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy to SQL", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - ADLS to Azure SQL Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy to SQL\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy to SQL').output.rowsCopied), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Pipeline AF Log - ADLS to Azure SQL Succeed", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Execute AZ_SQL_Post-Copy", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_Post_Copy_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-07-29T09:43:40Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Parquet_AzureBlobFS_DelimitedText.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Parquet_AzureBlobFS_DelimitedText.json new file mode 100644 index 00000000..154c39d5 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Parquet_AzureBlobFS_DelimitedText.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Parquet_AzureBlobFS_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobStorage_Parquet_AzureBlobFS_DelimitedText_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Parquet_AzureBlobFS_DelimitedText_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Parquet_AzureBlobFS_DelimitedText_Primary.json new file mode 100644 index 00000000..2c9e25c2 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Parquet_AzureBlobFS_DelimitedText_Primary.json @@ -0,0 +1,218 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Parquet_AzureBlobFS_DelimitedText_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobFS Started", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.RelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy AzureBlobStorage to AzureBlobFS", + "outputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.DataFileName" + }, + "FirstRowAsHeader": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.FirstRowAsHeader" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.RelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "storeSettings": { + "copyBehavior": "PreserveHierarchy", + "formatSettings": { + "fileExtension": ".txt", + "quoteAllText": true, + "type": "DelimitedTextWriteSettings" + }, + "type": "AzureBlobFSWriteSettings" + }, + "type": "DelimitedTextSink" + }, + "source": { + "storeSettings": { + "recursive": true, + "type": "AzureBlobStorageReadSettings" + }, + "type": "ParquetSource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobFS", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Copy AzureBlobStorage to AzureBlobFS Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy AzureBlobStorage to AzureBlobFS').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Copy AzureBlobStorage to AzureBlobFS Started", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobFS", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Copy AzureBlobStorage to AzureBlobFS Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy AzureBlobStorage to AzureBlobFS').output.filesWritten), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-05T04:14:00Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Parquet_AzureBlobStorage_DelimitedText.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Parquet_AzureBlobStorage_DelimitedText.json new file mode 100644 index 00000000..a3c684f0 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Parquet_AzureBlobStorage_DelimitedText.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Parquet_AzureBlobStorage_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobStorage_Parquet_AzureBlobStorage_DelimitedText_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Parquet_AzureBlobStorage_DelimitedText_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Parquet_AzureBlobStorage_DelimitedText_Primary.json new file mode 100644 index 00000000..ec908ca5 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Parquet_AzureBlobStorage_DelimitedText_Primary.json @@ -0,0 +1,218 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Parquet_AzureBlobStorage_DelimitedText_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobStorage Started", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.RelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy AzureBlobStorage to AzureBlobStorage", + "outputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.DataFileName" + }, + "FirstRowAsHeader": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.FirstRowAsHeader" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.RelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "storeSettings": { + "copyBehavior": "PreserveHierarchy", + "formatSettings": { + "fileExtension": ".txt", + "quoteAllText": true, + "type": "DelimitedTextWriteSettings" + }, + "type": "AzureBlobStorageWriteSettings" + }, + "type": "DelimitedTextSink" + }, + "source": { + "storeSettings": { + "recursive": true, + "type": "AzureBlobStorageReadSettings" + }, + "type": "ParquetSource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobStorage", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Copy AzureBlobStorage to AzureBlobStorage Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy AzureBlobStorage to AzureBlobStorage').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Copy AzureBlobStorage to AzureBlobStorage Started", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobStorage", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Copy AzureBlobStorage to AzureBlobStorage Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy AzureBlobStorage to AzureBlobStorage').output.filesWritten), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-05T04:14:00Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Parquet_AzureSqlTable_NA.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Parquet_AzureSqlTable_NA.json new file mode 100644 index 00000000..91f65897 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Parquet_AzureSqlTable_NA.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Parquet_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobStorage_Parquet_AzureSqlTable_NA_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Parquet_AzureSqlTable_NA_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Parquet_AzureSqlTable_NA_Primary.json new file mode 100644 index 00000000..3a19cd41 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureBlobStorage_Parquet_AzureSqlTable_NA_Primary.json @@ -0,0 +1,326 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Parquet_AzureSqlTable_NA_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Pipeline AF Log - ADLS to Azure SQL Start", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy to SQL\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Pipeline AF Log - ADLS to Azure SQL Start", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "description": "Auto Creates Table Using a Schema File", + "name": "If Auto Create Table", + "type": "IfCondition", + "typeProperties": { + "expression": { + "type": "Expression", + "value": "@and(not(equals(coalesce(pipeline().parameters.TaskObject.Source.SchemaFileName,''),'')),bool(pipeline().parameters.TaskObject.Target.AutoCreateTable))" + }, + "ifTrueActivities": [ + { + "dependsOn": [ ], + "name": "Execute Create Table", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_Create_Table_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Create Table", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Get Mapping", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(\n concat('{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"StorageAccountName\":\"', \n string(pipeline().parameters.TaskObject.Source.System.SystemServer), \n '\",\"StorageAccountContainer\":\"', \n string(pipeline().parameters.TaskObject.Source.System.Container), \n '\",\"RelativePath\":\"', \n string(pipeline().parameters.TaskObject.Source.Instance.SourceRelativePath), \n '\",\"SchemaFileName\":\"', \n string(pipeline().parameters.TaskObject.Source.SchemaFileName), \n '\",\"SourceType\":\"', \n if(\n contains(string(pipeline().parameters.TaskObject.Source.System.SystemServer),'.dfs.core.windows.net'),\n 'ADLS',\n 'Azure Blob'\n ), \n '\",\"TargetType\":\"', \n string(pipeline().parameters.TaskObject.Target.System.Type), \n '\",\"MetadataType\":\"Parquet\"}')\n)" + }, + "functionName": "GetSourceTargetMapping", + "method": "POST" + }, + "userProperties": [ ] + } + ] + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "If Auto Create Table", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.Instance.SourceRelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy to SQL", + "outputs": [ + { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Database" + }, + "Schema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.StagingTableSchema" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + }, + "Table": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.StagingTableName" + } + }, + "referenceName": "[concat('GDS_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "disableMetricsCollection": false, + "preCopyScript": { + "type": "Expression", + "value": "@{pipeline().parameters.TaskObject.Target.PreCopySQL}" + }, + "tableOption": "autoCreate", + "type": "AzureSqlSink" + }, + "source": { + "formatSettings": { + "type": "AzureBlobStorageReadSettings" + }, + "storeSettings": { + "enablePartitionDiscovery": false, + "recursive": true, + "type": "AzureBlobStorageReadSettings", + "wildcardFileName": { + "type": "Expression", + "value": "@concat(\n replace(\n pipeline().parameters.TaskObject.Source.DataFileName,\n '.parquet',\n ''\n ),\n '*.parquet'\n)" + }, + "wildcardFolderPath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.Instance.SourceRelativePath" + } + }, + "type": "ParquetSource" + }, + "translator": { + "type": "Expression", + "value": "@if(and(not(equals(coalesce(pipeline().parameters.TaskObject.Source.SchemaFileName,''),'')),bool(pipeline().parameters.TaskObject.Target.AutoCreateTable)),activity('AF Get Mapping').output.value, null)" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy to SQL", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - ADLS to Azure SQL Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy to SQL\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy to SQL').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy to SQL", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - ADLS to Azure SQL Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy to SQL\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy to SQL').output.rowsCopied), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Pipeline AF Log - ADLS to Azure SQL Succeed", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Execute AZ_SQL_Post-Copy", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_Post_Copy_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-07-29T09:43:40Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureSqlTable_NA_AzureBlobFS_Parquet.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureSqlTable_NA_AzureBlobFS_Parquet.json new file mode 100644 index 00000000..8760c73b --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureSqlTable_NA_AzureBlobFS_Parquet.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureSqlTable_NA_AzureBlobFS_Parquet_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_AzureBlobFS_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureSqlTable_NA_AzureBlobFS_Parquet_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureSqlTable_NA_AzureBlobFS_Parquet_Primary.json new file mode 100644 index 00000000..dff95779 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureSqlTable_NA_AzureBlobFS_Parquet_Primary.json @@ -0,0 +1,551 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureSqlTable_NA_AzureBlobFS_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Get Information Schema SQL", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"TableSchema\":\"', string(pipeline().parameters.TaskObject.Source.TableSchema), '\",\"TableName\":\"', string(pipeline().parameters.TaskObject.Source.TableName),'\"}'))" + }, + "functionName": "GetInformationSchemaSQL", + "method": "POST" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "AF Get Information Schema SQL", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Lookup Get SQL Metadata", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Lookup", + "typeProperties": { + "dataset": { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Database" + }, + "Schema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableSchema" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + }, + "Table": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableName" + } + }, + "referenceName": "[concat('GDS_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "firstRowOnly": false, + "source": { + "partitionOption": "None", + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@activity('AF Get Information Schema SQL').output.InformationSchemaSQL" + }, + "type": "AzureSqlSource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Lookup Get SQL Metadata", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "AF Log - Get Metadata Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Get Metadata\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Lookup Get SQL Metadata').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Lookup Get SQL Metadata", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Persist Metadata and Get Mapping", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(\n concat('{\"TaskInstanceId\":\"',\n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', string(pipeline().RunId), \n '\",\"StorageAccountName\":\"', \n string(pipeline().parameters.TaskObject.Target.System.SystemServer),\n '\",\"StorageAccountContainer\":\"', \n string(pipeline().parameters.TaskObject.Target.System.Container), \n '\",\"RelativePath\":\"', \n string(pipeline().parameters.TaskObject.Target.Instance.TargetRelativePath), \n '\",\"SchemaFileName\":\"', \n string(pipeline().parameters.TaskObject.Target.SchemaFileName), \n '\",\"SourceType\":\"', \n string(pipeline().parameters.TaskObject.Source.System.Type), \n '\",\"TargetType\":\"', \n if(\n contains(\n string(pipeline().parameters.TaskObject.Target.System.SystemServer),\n '.dfs.core.windows.net'\n ),\n 'ADLS',\n 'Azure Blob'), \n '\",\"Data\":',\n string(activity('Lookup Get SQL Metadata').output),\n ',\"MetadataType\":\"SQL\"}')\n)" + }, + "functionName": "TaskExecutionSchemaFile", + "method": "POST" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "AF Persist Metadata and Get Mapping", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Switch Load Type", + "type": "Switch", + "typeProperties": { + "cases": [ + { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Full_Load Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "BatchCount": "1", + "Item": "1", + "Mapping": { + "type": "Expression", + "value": "@activity('AF Persist Metadata and Get Mapping').output.value" + }, + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_AzureBlobFS_Parquet_Full_Load_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "value": "Full" + }, + { + "activities": [ + { + "dependsOn": [ + { + "activity": "Lookup New Watermark", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Execute Watermark Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "BatchCount": "1", + "Item": "1", + "Mapping": { + "type": "Expression", + "value": "@activity('AF Persist Metadata and Get Mapping').output.value" + }, + "NewWaterMark": { + "type": "Expression", + "value": "@activity('Lookup New Watermark').output.firstRow.newWatermark" + }, + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_AzureBlobFS_Parquet_Watermark_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Lookup New Watermark", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "0.00:30:00" + }, + "type": "Lookup", + "typeProperties": { + "dataset": { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Database" + }, + "Schema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableSchema" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + }, + "Table": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableName" + } + }, + "referenceName": "[concat('GDS_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "firstRowOnly": true, + "source": { + "partitionOption": "None", + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@{pipeline().parameters.TaskObject.Source.IncrementalSQLStatement}" + }, + "type": "AzureSqlSource" + } + }, + "userProperties": [ ] + } + ], + "value": "Watermark" + }, + { + "activities": [ + { + "dependsOn": [ + { + "activity": "Lookup Chunk", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Execute Full Load Chunk Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "BatchCount": { + "type": "Expression", + "value": "@activity('Lookup Chunk').output.firstRow.batchcount" + }, + "Mapping": { + "type": "Expression", + "value": "@activity('AF Persist Metadata and Get Mapping').output.value" + }, + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_AzureBlobFS_Parquet_Full_Load_Chunk_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Lookup Chunk", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "0.00:30:00" + }, + "type": "Lookup", + "typeProperties": { + "dataset": { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Database" + }, + "Schema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableSchema" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + }, + "Table": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableName" + } + }, + "referenceName": "[concat('GDS_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "firstRowOnly": true, + "source": { + "partitionOption": "None", + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@{pipeline().parameters.TaskObject.Source.IncrementalSQLStatement}" + }, + "type": "AzureSqlSource" + } + }, + "userProperties": [ ] + } + ], + "value": "Full_Chunk" + }, + { + "activities": [ + { + "dependsOn": [ + { + "activity": "Lookup New Watermark and Chunk", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Execute Watermark Chunk Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "BatchCount": { + "type": "Expression", + "value": "@activity('Lookup New Watermark and Chunk').output.firstRow.batchcount" + }, + "Mapping": { + "type": "Expression", + "value": "@activity('AF Persist Metadata and Get Mapping').output.value" + }, + "NewWatermark": { + "type": "Expression", + "value": "@activity('Lookup New Watermark and Chunk').output.firstRow.newWatermark" + }, + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_AzureBlobFS_Parquet_Watermark_Chunk_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Lookup New Watermark and Chunk", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "0.00:30:00" + }, + "type": "Lookup", + "typeProperties": { + "dataset": { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Database" + }, + "Schema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableSchema" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + }, + "Table": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableName" + } + }, + "referenceName": "[concat('GDS_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "firstRowOnly": true, + "source": { + "partitionOption": "None", + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@{pipeline().parameters.TaskObject.Source.IncrementalSQLStatement}" + }, + "type": "AzureSqlSource" + } + }, + "userProperties": [ ] + } + ], + "value": "Watermark_Chunk" + } + ], + "on": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.IncrementalType" + } + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-04T12:40:45Z", + "parameters": { + "TaskObject": { + "defaultValue": { + "DataFactory": { + "ADFPipeline": "AZ_SQL_AZ_Storage_Parquet_Azure", + "Id": 1, + "Name": "adsgofastdatakakeacceladf", + "ResourceGroup": "AdsGoFastDataLakeAccel", + "SubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3" + }, + "DegreeOfCopyParallelism": 1, + "Enabled": 1, + "ExecutionUid": "2c5924ee-b855-4d2b-bb7e-4f5dde4c4dd3", + "KeyVaultBaseUrl": "https://adsgofastkeyvault.vault.azure.net/", + "NumberOfRetries": 111, + "ScheduleMasterId": 2, + "Source": { + "Database": { + "AuthenticationType": "MSI", + "Name": "AWSample", + "SystemName": "adsgofastdatakakeaccelsqlsvr.database.windows.net" + }, + "Extraction": { + "FullOrIncremental": "Full", + "IncrementalType": null, + "TableName": "SalesOrderHeader", + "TableSchema": "SalesLT", + "Type": "Table" + }, + "Type": "Azure SQL" + }, + "Target": { + "DataFileName": "SalesLT.SalesOrderHeader.parquet", + "FirstRowAsHeader": null, + "MaxConcorrentConnections": null, + "RelativePath": "/AwSample/SalesLT/SalesOrderHeader/2020/7/9/14/12/", + "SchemaFileName": "SalesLT.SalesOrderHeader", + "SheetName": null, + "SkipLineCount": null, + "StorageAccountAccessMethod": "MSI", + "StorageAccountContainer": "datalakeraw", + "StorageAccountName": "https://adsgofastdatalakeaccelst.blob.core.windows.net", + "Type": "Azure Blob" + }, + "TaskGroupConcurrency": 10, + "TaskGroupPriority": 0, + "TaskInstanceId": 75, + "TaskMasterId": 12, + "TaskStatus": "Untried", + "TaskType": "SQL Database to Azure Storage" + }, + "type": "object" + } + }, + "variables": { + "SQLStatement": { + "type": "String" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureSqlTable_NA_AzureBlobStorage_Parquet.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureSqlTable_NA_AzureBlobStorage_Parquet.json new file mode 100644 index 00000000..be74a9e4 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureSqlTable_NA_AzureBlobStorage_Parquet.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureSqlTable_NA_AzureBlobStorage_Parquet_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_AzureBlobStorage_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureSqlTable_NA_AzureBlobStorage_Parquet_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureSqlTable_NA_AzureBlobStorage_Parquet_Primary.json new file mode 100644 index 00000000..a1698575 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_AzureSqlTable_NA_AzureBlobStorage_Parquet_Primary.json @@ -0,0 +1,551 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureSqlTable_NA_AzureBlobStorage_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Get Information Schema SQL", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"TableSchema\":\"', string(pipeline().parameters.TaskObject.Source.TableSchema), '\",\"TableName\":\"', string(pipeline().parameters.TaskObject.Source.TableName),'\"}'))" + }, + "functionName": "GetInformationSchemaSQL", + "method": "POST" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "AF Get Information Schema SQL", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Lookup Get SQL Metadata", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Lookup", + "typeProperties": { + "dataset": { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Database" + }, + "Schema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableSchema" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + }, + "Table": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableName" + } + }, + "referenceName": "[concat('GDS_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "firstRowOnly": false, + "source": { + "partitionOption": "None", + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@activity('AF Get Information Schema SQL').output.InformationSchemaSQL" + }, + "type": "AzureSqlSource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Lookup Get SQL Metadata", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "AF Log - Get Metadata Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Get Metadata\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Lookup Get SQL Metadata').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Lookup Get SQL Metadata", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Persist Metadata and Get Mapping", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(\n concat('{\"TaskInstanceId\":\"',\n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', string(pipeline().RunId), \n '\",\"StorageAccountName\":\"', \n string(pipeline().parameters.TaskObject.Target.System.SystemServer),\n '\",\"StorageAccountContainer\":\"', \n string(pipeline().parameters.TaskObject.Target.System.Container), \n '\",\"RelativePath\":\"', \n string(pipeline().parameters.TaskObject.Target.Instance.TargetRelativePath), \n '\",\"SchemaFileName\":\"', \n string(pipeline().parameters.TaskObject.Target.SchemaFileName), \n '\",\"SourceType\":\"', \n string(pipeline().parameters.TaskObject.Source.System.Type), \n '\",\"TargetType\":\"', \n if(\n contains(\n string(pipeline().parameters.TaskObject.Target.System.SystemServer),\n '.dfs.core.windows.net'\n ),\n 'ADLS',\n 'Azure Blob'), \n '\",\"Data\":',\n string(activity('Lookup Get SQL Metadata').output),\n ',\"MetadataType\":\"SQL\"}')\n)" + }, + "functionName": "TaskExecutionSchemaFile", + "method": "POST" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "AF Persist Metadata and Get Mapping", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Switch Load Type", + "type": "Switch", + "typeProperties": { + "cases": [ + { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Full_Load Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "BatchCount": "1", + "Item": "1", + "Mapping": { + "type": "Expression", + "value": "@activity('AF Persist Metadata and Get Mapping').output.value" + }, + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_AzureBlobStorage_Parquet_Full_Load_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "value": "Full" + }, + { + "activities": [ + { + "dependsOn": [ + { + "activity": "Lookup New Watermark", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Execute Watermark Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "BatchCount": "1", + "Item": "1", + "Mapping": { + "type": "Expression", + "value": "@activity('AF Persist Metadata and Get Mapping').output.value" + }, + "NewWaterMark": { + "type": "Expression", + "value": "@activity('Lookup New Watermark').output.firstRow.newWatermark" + }, + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_AzureBlobStorage_Parquet_Watermark_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Lookup New Watermark", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "0.00:30:00" + }, + "type": "Lookup", + "typeProperties": { + "dataset": { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Database" + }, + "Schema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableSchema" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + }, + "Table": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableName" + } + }, + "referenceName": "[concat('GDS_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "firstRowOnly": true, + "source": { + "partitionOption": "None", + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@{pipeline().parameters.TaskObject.Source.IncrementalSQLStatement}" + }, + "type": "AzureSqlSource" + } + }, + "userProperties": [ ] + } + ], + "value": "Watermark" + }, + { + "activities": [ + { + "dependsOn": [ + { + "activity": "Lookup Chunk", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Execute Full Load Chunk Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "BatchCount": { + "type": "Expression", + "value": "@activity('Lookup Chunk').output.firstRow.batchcount" + }, + "Mapping": { + "type": "Expression", + "value": "@activity('AF Persist Metadata and Get Mapping').output.value" + }, + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_AzureBlobStorage_Parquet_Full_Load_Chunk_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Lookup Chunk", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "0.00:30:00" + }, + "type": "Lookup", + "typeProperties": { + "dataset": { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Database" + }, + "Schema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableSchema" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + }, + "Table": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableName" + } + }, + "referenceName": "[concat('GDS_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "firstRowOnly": true, + "source": { + "partitionOption": "None", + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@{pipeline().parameters.TaskObject.Source.IncrementalSQLStatement}" + }, + "type": "AzureSqlSource" + } + }, + "userProperties": [ ] + } + ], + "value": "Full_Chunk" + }, + { + "activities": [ + { + "dependsOn": [ + { + "activity": "Lookup New Watermark and Chunk", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Execute Watermark Chunk Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "BatchCount": { + "type": "Expression", + "value": "@activity('Lookup New Watermark and Chunk').output.firstRow.batchcount" + }, + "Mapping": { + "type": "Expression", + "value": "@activity('AF Persist Metadata and Get Mapping').output.value" + }, + "NewWatermark": { + "type": "Expression", + "value": "@activity('Lookup New Watermark and Chunk').output.firstRow.newWatermark" + }, + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_AzureBlobStorage_Parquet_Watermark_Chunk_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Lookup New Watermark and Chunk", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "0.00:30:00" + }, + "type": "Lookup", + "typeProperties": { + "dataset": { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Database" + }, + "Schema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableSchema" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + }, + "Table": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableName" + } + }, + "referenceName": "[concat('GDS_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "firstRowOnly": true, + "source": { + "partitionOption": "None", + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@{pipeline().parameters.TaskObject.Source.IncrementalSQLStatement}" + }, + "type": "AzureSqlSource" + } + }, + "userProperties": [ ] + } + ], + "value": "Watermark_Chunk" + } + ], + "on": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.IncrementalType" + } + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-04T12:40:45Z", + "parameters": { + "TaskObject": { + "defaultValue": { + "DataFactory": { + "ADFPipeline": "AZ_SQL_AZ_Storage_Parquet_Azure", + "Id": 1, + "Name": "adsgofastdatakakeacceladf", + "ResourceGroup": "AdsGoFastDataLakeAccel", + "SubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3" + }, + "DegreeOfCopyParallelism": 1, + "Enabled": 1, + "ExecutionUid": "2c5924ee-b855-4d2b-bb7e-4f5dde4c4dd3", + "KeyVaultBaseUrl": "https://adsgofastkeyvault.vault.azure.net/", + "NumberOfRetries": 111, + "ScheduleMasterId": 2, + "Source": { + "Database": { + "AuthenticationType": "MSI", + "Name": "AWSample", + "SystemName": "adsgofastdatakakeaccelsqlsvr.database.windows.net" + }, + "Extraction": { + "FullOrIncremental": "Full", + "IncrementalType": null, + "TableName": "SalesOrderHeader", + "TableSchema": "SalesLT", + "Type": "Table" + }, + "Type": "Azure SQL" + }, + "Target": { + "DataFileName": "SalesLT.SalesOrderHeader.parquet", + "FirstRowAsHeader": null, + "MaxConcorrentConnections": null, + "RelativePath": "/AwSample/SalesLT/SalesOrderHeader/2020/7/9/14/12/", + "SchemaFileName": "SalesLT.SalesOrderHeader", + "SheetName": null, + "SkipLineCount": null, + "StorageAccountAccessMethod": "MSI", + "StorageAccountContainer": "datalakeraw", + "StorageAccountName": "https://adsgofastdatalakeaccelst.blob.core.windows.net", + "Type": "Azure Blob" + }, + "TaskGroupConcurrency": 10, + "TaskGroupPriority": 0, + "TaskInstanceId": 75, + "TaskMasterId": 12, + "TaskStatus": "Untried", + "TaskType": "SQL Database to Azure Storage" + }, + "type": "object" + } + }, + "variables": { + "SQLStatement": { + "type": "String" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_SqlServerTable_NA_AzureBlobFS_Parquet.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_SqlServerTable_NA_AzureBlobFS_Parquet.json new file mode 100644 index 00000000..d0a6d554 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_SqlServerTable_NA_AzureBlobFS_Parquet.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_SqlServerTable_NA_AzureBlobFS_Parquet_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_SqlServerTable_NA_AzureBlobFS_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_SqlServerTable_NA_AzureBlobFS_Parquet_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_SqlServerTable_NA_AzureBlobFS_Parquet_Primary.json new file mode 100644 index 00000000..8972156a --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_SqlServerTable_NA_AzureBlobFS_Parquet_Primary.json @@ -0,0 +1,599 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_SqlServerTable_NA_AzureBlobFS_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Get Information Schema SQL", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"TableSchema\":\"', string(pipeline().parameters.TaskObject.Source.TableSchema), '\",\"TableName\":\"', string(pipeline().parameters.TaskObject.Source.TableName),'\"}'))" + }, + "functionName": "GetInformationSchemaSQL", + "method": "POST" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "AF Get Information Schema SQL", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Lookup Get SQL Metadata", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Lookup", + "typeProperties": { + "dataset": { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Database" + }, + "KeyVaultBaseUrl": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.KeyVaultBaseUrl" + }, + "PasswordSecret": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.PasswordKeyVaultSecretName" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + }, + "TableName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableName" + }, + "TableSchema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableSchema" + }, + "UserName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Username" + } + }, + "referenceName": "[concat('GDS_SqlServerTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "firstRowOnly": false, + "source": { + "partitionOption": "None", + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@activity('AF Get Information Schema SQL').output.InformationSchemaSQL" + }, + "type": "SqlServerSource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Lookup Get SQL Metadata", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "AF Log - Get Metadata Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Get Metadata\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Lookup Get SQL Metadata').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Lookup Get SQL Metadata", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Persist Metadata and Get Mapping", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(\n concat('{\"TaskInstanceId\":\"',\n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', string(pipeline().RunId), \n '\",\"StorageAccountName\":\"', \n string(pipeline().parameters.TaskObject.Target.System.SystemServer),\n '\",\"StorageAccountContainer\":\"', \n string(pipeline().parameters.TaskObject.Target.System.Container), \n '\",\"RelativePath\":\"', \n string(pipeline().parameters.TaskObject.Target.Instance.TargetRelativePath), \n '\",\"SchemaFileName\":\"', \n string(pipeline().parameters.TaskObject.Target.SchemaFileName), \n '\",\"SourceType\":\"', \n string(pipeline().parameters.TaskObject.Source.System.Type), \n '\",\"TargetType\":\"', \n if(\n contains(\n string(pipeline().parameters.TaskObject.Target.System.SystemServer),\n '.dfs.core.windows.net'\n ),\n 'ADLS',\n 'Azure Blob'), \n '\",\"Data\":',\n string(activity('Lookup Get SQL Metadata').output),\n ',\"MetadataType\":\"SQL\"}')\n)" + }, + "functionName": "TaskExecutionSchemaFile", + "method": "POST" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "AF Persist Metadata and Get Mapping", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Switch Load Type", + "type": "Switch", + "typeProperties": { + "cases": [ + { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Full_Load Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "BatchCount": "1", + "Item": "1", + "Mapping": { + "type": "Expression", + "value": "@activity('AF Persist Metadata and Get Mapping').output.value" + }, + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_SqlServerTable_NA_AzureBlobFS_Parquet_Full_Load_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "value": "Full" + }, + { + "activities": [ + { + "dependsOn": [ + { + "activity": "Lookup New Watermark", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Execute Watermark Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "BatchCount": "1", + "Item": "1", + "Mapping": { + "type": "Expression", + "value": "@activity('AF Persist Metadata and Get Mapping').output.value" + }, + "NewWaterMark": { + "type": "Expression", + "value": "@activity('Lookup New Watermark').output.firstRow.newWatermark" + }, + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_SqlServerTable_NA_AzureBlobFS_Parquet_Watermark_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Lookup New Watermark", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "0.00:30:00" + }, + "type": "Lookup", + "typeProperties": { + "dataset": { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Database" + }, + "KeyVaultBaseUrl": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.KeyVaultBaseUrl" + }, + "PasswordSecret": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.PasswordKeyVaultSecretName" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + }, + "TableName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableName" + }, + "TableSchema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableSchema" + }, + "UserName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Username" + } + }, + "referenceName": "[concat('GDS_SqlServerTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "firstRowOnly": true, + "source": { + "partitionOption": "None", + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@{pipeline().parameters.TaskObject.Source.IncrementalSQLStatement}" + }, + "type": "SqlServerSource" + } + }, + "userProperties": [ ] + } + ], + "value": "Watermark" + }, + { + "activities": [ + { + "dependsOn": [ + { + "activity": "Lookup Chunk", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Execute Full Load Chunk Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "BatchCount": { + "type": "Expression", + "value": "@activity('Lookup Chunk').output.firstRow.batchcount" + }, + "Mapping": { + "type": "Expression", + "value": "@activity('AF Persist Metadata and Get Mapping').output.value" + }, + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_SqlServerTable_NA_AzureBlobFS_Parquet_Full_Load_Chunk_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Lookup Chunk", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "0.00:30:00" + }, + "type": "Lookup", + "typeProperties": { + "dataset": { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Database" + }, + "KeyVaultBaseUrl": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.KeyVaultBaseUrl" + }, + "PasswordSecret": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.PasswordKeyVaultSecretName" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + }, + "TableName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableName" + }, + "TableSchema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableSchema" + }, + "UserName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Username" + } + }, + "referenceName": "[concat('GDS_SqlServerTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "firstRowOnly": true, + "source": { + "partitionOption": "None", + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@{pipeline().parameters.TaskObject.Source.IncrementalSQLStatement}" + }, + "type": "SqlServerSource" + } + }, + "userProperties": [ ] + } + ], + "value": "Full_Chunk" + }, + { + "activities": [ + { + "dependsOn": [ + { + "activity": "Lookup New Watermark and Chunk", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Execute Watermark Chunk Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "BatchCount": { + "type": "Expression", + "value": "@activity('Lookup New Watermark and Chunk').output.firstRow.batchcount" + }, + "Mapping": { + "type": "Expression", + "value": "@activity('AF Persist Metadata and Get Mapping').output.value" + }, + "NewWatermark": { + "type": "Expression", + "value": "@activity('Lookup New Watermark and Chunk').output.firstRow.newWatermark" + }, + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_SqlServerTable_NA_AzureBlobFS_Parquet_Watermark_Chunk_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Lookup New Watermark and Chunk", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "0.00:30:00" + }, + "type": "Lookup", + "typeProperties": { + "dataset": { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Database" + }, + "KeyVaultBaseUrl": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.KeyVaultBaseUrl" + }, + "PasswordSecret": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.PasswordKeyVaultSecretName" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + }, + "TableName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableName" + }, + "TableSchema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableSchema" + }, + "UserName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Username" + } + }, + "referenceName": "[concat('GDS_SqlServerTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "firstRowOnly": true, + "source": { + "partitionOption": "None", + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@{pipeline().parameters.TaskObject.Source.IncrementalSQLStatement}" + }, + "type": "SqlServerSource" + } + }, + "userProperties": [ ] + } + ], + "value": "Watermark_Chunk" + } + ], + "on": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.IncrementalType" + } + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-04T12:40:45Z", + "parameters": { + "TaskObject": { + "defaultValue": { + "DataFactory": { + "ADFPipeline": "AZ_SQL_AZ_Storage_Parquet_Azure", + "Id": 1, + "Name": "adsgofastdatakakeacceladf", + "ResourceGroup": "AdsGoFastDataLakeAccel", + "SubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3" + }, + "DegreeOfCopyParallelism": 1, + "Enabled": 1, + "ExecutionUid": "2c5924ee-b855-4d2b-bb7e-4f5dde4c4dd3", + "KeyVaultBaseUrl": "https://adsgofastkeyvault.vault.azure.net/", + "NumberOfRetries": 111, + "ScheduleMasterId": 2, + "Source": { + "Database": { + "AuthenticationType": "MSI", + "Name": "AWSample", + "SystemName": "adsgofastdatakakeaccelsqlsvr.database.windows.net" + }, + "Extraction": { + "FullOrIncremental": "Full", + "IncrementalType": null, + "TableName": "SalesOrderHeader", + "TableSchema": "SalesLT", + "Type": "Table" + }, + "Type": "Azure SQL" + }, + "Target": { + "DataFileName": "SalesLT.SalesOrderHeader.parquet", + "FirstRowAsHeader": null, + "MaxConcorrentConnections": null, + "RelativePath": "/AwSample/SalesLT/SalesOrderHeader/2020/7/9/14/12/", + "SchemaFileName": "SalesLT.SalesOrderHeader", + "SheetName": null, + "SkipLineCount": null, + "StorageAccountAccessMethod": "MSI", + "StorageAccountContainer": "datalakeraw", + "StorageAccountName": "https://adsgofastdatalakeaccelst.blob.core.windows.net", + "Type": "Azure Blob" + }, + "TaskGroupConcurrency": 10, + "TaskGroupPriority": 0, + "TaskInstanceId": 75, + "TaskMasterId": 12, + "TaskStatus": "Untried", + "TaskType": "SQL Database to Azure Storage" + }, + "type": "object" + } + }, + "variables": { + "SQLStatement": { + "type": "String" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_SqlServerTable_NA_AzureBlobStorage_Parquet.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_SqlServerTable_NA_AzureBlobStorage_Parquet.json new file mode 100644 index 00000000..a6daa874 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_SqlServerTable_NA_AzureBlobStorage_Parquet.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_SqlServerTable_NA_AzureBlobStorage_Parquet_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_SqlServerTable_NA_AzureBlobStorage_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_SqlServerTable_NA_AzureBlobStorage_Parquet_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_SqlServerTable_NA_AzureBlobStorage_Parquet_Primary.json new file mode 100644 index 00000000..14b31d48 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/arm/GPL_SqlServerTable_NA_AzureBlobStorage_Parquet_Primary.json @@ -0,0 +1,599 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_SqlServerTable_NA_AzureBlobStorage_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Get Information Schema SQL", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"TableSchema\":\"', string(pipeline().parameters.TaskObject.Source.TableSchema), '\",\"TableName\":\"', string(pipeline().parameters.TaskObject.Source.TableName),'\"}'))" + }, + "functionName": "GetInformationSchemaSQL", + "method": "POST" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "AF Get Information Schema SQL", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Lookup Get SQL Metadata", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Lookup", + "typeProperties": { + "dataset": { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Database" + }, + "KeyVaultBaseUrl": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.KeyVaultBaseUrl" + }, + "PasswordSecret": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.PasswordKeyVaultSecretName" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + }, + "TableName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableName" + }, + "TableSchema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableSchema" + }, + "UserName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Username" + } + }, + "referenceName": "[concat('GDS_SqlServerTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "firstRowOnly": false, + "source": { + "partitionOption": "None", + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@activity('AF Get Information Schema SQL').output.InformationSchemaSQL" + }, + "type": "SqlServerSource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Lookup Get SQL Metadata", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "AF Log - Get Metadata Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Get Metadata\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Lookup Get SQL Metadata').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Lookup Get SQL Metadata", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Persist Metadata and Get Mapping", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(\n concat('{\"TaskInstanceId\":\"',\n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', string(pipeline().RunId), \n '\",\"StorageAccountName\":\"', \n string(pipeline().parameters.TaskObject.Target.System.SystemServer),\n '\",\"StorageAccountContainer\":\"', \n string(pipeline().parameters.TaskObject.Target.System.Container), \n '\",\"RelativePath\":\"', \n string(pipeline().parameters.TaskObject.Target.Instance.TargetRelativePath), \n '\",\"SchemaFileName\":\"', \n string(pipeline().parameters.TaskObject.Target.SchemaFileName), \n '\",\"SourceType\":\"', \n string(pipeline().parameters.TaskObject.Source.System.Type), \n '\",\"TargetType\":\"', \n if(\n contains(\n string(pipeline().parameters.TaskObject.Target.System.SystemServer),\n '.dfs.core.windows.net'\n ),\n 'ADLS',\n 'Azure Blob'), \n '\",\"Data\":',\n string(activity('Lookup Get SQL Metadata').output),\n ',\"MetadataType\":\"SQL\"}')\n)" + }, + "functionName": "TaskExecutionSchemaFile", + "method": "POST" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "AF Persist Metadata and Get Mapping", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Switch Load Type", + "type": "Switch", + "typeProperties": { + "cases": [ + { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Full_Load Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "BatchCount": "1", + "Item": "1", + "Mapping": { + "type": "Expression", + "value": "@activity('AF Persist Metadata and Get Mapping').output.value" + }, + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_SqlServerTable_NA_AzureBlobStorage_Parquet_Full_Load_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "value": "Full" + }, + { + "activities": [ + { + "dependsOn": [ + { + "activity": "Lookup New Watermark", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Execute Watermark Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "BatchCount": "1", + "Item": "1", + "Mapping": { + "type": "Expression", + "value": "@activity('AF Persist Metadata and Get Mapping').output.value" + }, + "NewWaterMark": { + "type": "Expression", + "value": "@activity('Lookup New Watermark').output.firstRow.newWatermark" + }, + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_SqlServerTable_NA_AzureBlobStorage_Parquet_Watermark_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Lookup New Watermark", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "0.00:30:00" + }, + "type": "Lookup", + "typeProperties": { + "dataset": { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Database" + }, + "KeyVaultBaseUrl": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.KeyVaultBaseUrl" + }, + "PasswordSecret": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.PasswordKeyVaultSecretName" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + }, + "TableName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableName" + }, + "TableSchema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableSchema" + }, + "UserName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Username" + } + }, + "referenceName": "[concat('GDS_SqlServerTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "firstRowOnly": true, + "source": { + "partitionOption": "None", + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@{pipeline().parameters.TaskObject.Source.IncrementalSQLStatement}" + }, + "type": "SqlServerSource" + } + }, + "userProperties": [ ] + } + ], + "value": "Watermark" + }, + { + "activities": [ + { + "dependsOn": [ + { + "activity": "Lookup Chunk", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Execute Full Load Chunk Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "BatchCount": { + "type": "Expression", + "value": "@activity('Lookup Chunk').output.firstRow.batchcount" + }, + "Mapping": { + "type": "Expression", + "value": "@activity('AF Persist Metadata and Get Mapping').output.value" + }, + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_SqlServerTable_NA_AzureBlobStorage_Parquet_Full_Load_Chunk_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Lookup Chunk", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "0.00:30:00" + }, + "type": "Lookup", + "typeProperties": { + "dataset": { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Database" + }, + "KeyVaultBaseUrl": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.KeyVaultBaseUrl" + }, + "PasswordSecret": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.PasswordKeyVaultSecretName" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + }, + "TableName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableName" + }, + "TableSchema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableSchema" + }, + "UserName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Username" + } + }, + "referenceName": "[concat('GDS_SqlServerTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "firstRowOnly": true, + "source": { + "partitionOption": "None", + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@{pipeline().parameters.TaskObject.Source.IncrementalSQLStatement}" + }, + "type": "SqlServerSource" + } + }, + "userProperties": [ ] + } + ], + "value": "Full_Chunk" + }, + { + "activities": [ + { + "dependsOn": [ + { + "activity": "Lookup New Watermark and Chunk", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Execute Watermark Chunk Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "BatchCount": { + "type": "Expression", + "value": "@activity('Lookup New Watermark and Chunk').output.firstRow.batchcount" + }, + "Mapping": { + "type": "Expression", + "value": "@activity('AF Persist Metadata and Get Mapping').output.value" + }, + "NewWatermark": { + "type": "Expression", + "value": "@activity('Lookup New Watermark and Chunk').output.firstRow.newWatermark" + }, + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_SqlServerTable_NA_AzureBlobStorage_Parquet_Watermark_Chunk_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Lookup New Watermark and Chunk", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "0.00:30:00" + }, + "type": "Lookup", + "typeProperties": { + "dataset": { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Database" + }, + "KeyVaultBaseUrl": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.KeyVaultBaseUrl" + }, + "PasswordSecret": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.PasswordKeyVaultSecretName" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + }, + "TableName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableName" + }, + "TableSchema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableSchema" + }, + "UserName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Username" + } + }, + "referenceName": "[concat('GDS_SqlServerTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "firstRowOnly": true, + "source": { + "partitionOption": "None", + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@{pipeline().parameters.TaskObject.Source.IncrementalSQLStatement}" + }, + "type": "SqlServerSource" + } + }, + "userProperties": [ ] + } + ], + "value": "Watermark_Chunk" + } + ], + "on": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.IncrementalType" + } + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-04T12:40:45Z", + "parameters": { + "TaskObject": { + "defaultValue": { + "DataFactory": { + "ADFPipeline": "AZ_SQL_AZ_Storage_Parquet_Azure", + "Id": 1, + "Name": "adsgofastdatakakeacceladf", + "ResourceGroup": "AdsGoFastDataLakeAccel", + "SubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3" + }, + "DegreeOfCopyParallelism": 1, + "Enabled": 1, + "ExecutionUid": "2c5924ee-b855-4d2b-bb7e-4f5dde4c4dd3", + "KeyVaultBaseUrl": "https://adsgofastkeyvault.vault.azure.net/", + "NumberOfRetries": 111, + "ScheduleMasterId": 2, + "Source": { + "Database": { + "AuthenticationType": "MSI", + "Name": "AWSample", + "SystemName": "adsgofastdatakakeaccelsqlsvr.database.windows.net" + }, + "Extraction": { + "FullOrIncremental": "Full", + "IncrementalType": null, + "TableName": "SalesOrderHeader", + "TableSchema": "SalesLT", + "Type": "Table" + }, + "Type": "Azure SQL" + }, + "Target": { + "DataFileName": "SalesLT.SalesOrderHeader.parquet", + "FirstRowAsHeader": null, + "MaxConcorrentConnections": null, + "RelativePath": "/AwSample/SalesLT/SalesOrderHeader/2020/7/9/14/12/", + "SchemaFileName": "SalesLT.SalesOrderHeader", + "SheetName": null, + "SkipLineCount": null, + "StorageAccountAccessMethod": "MSI", + "StorageAccountContainer": "datalakeraw", + "StorageAccountName": "https://adsgofastdatalakeaccelst.blob.core.windows.net", + "Type": "Azure Blob" + }, + "TaskGroupConcurrency": 10, + "TaskGroupPriority": 0, + "TaskInstanceId": 75, + "TaskMasterId": 12, + "TaskStatus": "Untried", + "TaskType": "SQL Database to Azure Storage" + }, + "type": "object" + } + }, + "variables": { + "SQLStatement": { + "type": "String" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/main.tf b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/main.tf new file mode 100644 index 00000000..69735f30 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/main.tf @@ -0,0 +1,109 @@ +resource "azurerm_resource_group_template_deployment" "azure_pipelines_level_0" { + for_each = { + for pipeline in fileset(path.module, "arm/GPL0_Azure*.json"): + pipeline => pipeline + } + name = substr(sha256("${replace(replace(each.value, ".json", ""), "arm/", "")}_${var.integration_runtime_short_name}_${var.name_suffix}"), 0,30) + resource_group_name = var.resource_group_name + deployment_mode = "Incremental" + parameters_content = jsonencode({ + "dataFactoryName" = { + value = var.data_factory_name + }, + "integrationRuntimeName" = { + value = var.integration_runtime_name + }, + "integrationRuntimeShortName" = { + value = var.integration_runtime_short_name + }, + "sharedKeyVaultUri" = { + value = var.shared_keyvault_uri + }, + }) + template_content = file("${path.module}/${each.value}") +} + +resource "azurerm_resource_group_template_deployment" "azure_pipelines_level_1" { + for_each = { + for pipeline in fileset(path.module, "arm/GPL1_Azure*.json"): + pipeline => pipeline + } + name = substr(sha256("${replace(replace(each.value, ".json", ""), "arm/", "")}_${var.integration_runtime_short_name}_${var.name_suffix}"), 0,30) + resource_group_name = var.resource_group_name + deployment_mode = "Incremental" + parameters_content = jsonencode({ + "dataFactoryName" = { + value = var.data_factory_name + }, + "integrationRuntimeName" = { + value = var.integration_runtime_name + }, + "integrationRuntimeShortName" = { + value = var.integration_runtime_short_name + }, + "sharedKeyVaultUri" = { + value = var.shared_keyvault_uri + }, + }) + template_content = file("${path.module}/${each.value}") + depends_on = [ + azurerm_resource_group_template_deployment.azure_pipelines_level_0 + ] +} + +resource "azurerm_resource_group_template_deployment" "azure_pipelines" { + for_each = { + for pipeline in fileset(path.module, "arm/GPL_Azure*.json"): + pipeline => pipeline + } + name = substr(sha256("${replace(replace(each.value, ".json", ""), "arm/", "")}_${var.integration_runtime_short_name}_${var.name_suffix}"), 0,30) + resource_group_name = var.resource_group_name + deployment_mode = "Incremental" + parameters_content = jsonencode({ + "dataFactoryName" = { + value = var.data_factory_name + }, + "integrationRuntimeName" = { + value = var.integration_runtime_name + }, + "integrationRuntimeShortName" = { + value = var.integration_runtime_short_name + }, + "sharedKeyVaultUri" = { + value = var.shared_keyvault_uri + }, + }) + template_content = file("${path.module}/${each.value}") + depends_on = [ + azurerm_resource_group_template_deployment.azure_pipelines_level_0, + azurerm_resource_group_template_deployment.azure_pipelines_level_1 + ] +} + +resource "azurerm_resource_group_template_deployment" "azure_pipelines_wrapper" { + for_each = { + for pipeline in fileset(path.module, "arm/GPL-1_Azure*.json"): + pipeline => pipeline + } + name = substr(sha256("${replace(replace(each.value, ".json", ""), "arm/", "")}_${var.integration_runtime_short_name}_${var.name_suffix}"), 0,30) + resource_group_name = var.resource_group_name + deployment_mode = "Incremental" + parameters_content = jsonencode({ + "dataFactoryName" = { + value = var.data_factory_name + }, + "integrationRuntimeName" = { + value = var.integration_runtime_name + }, + "integrationRuntimeShortName" = { + value = var.integration_runtime_short_name + }, + "sharedKeyVaultUri" = { + value = var.shared_keyvault_uri + }, + }) + template_content = file("${path.module}/${each.value}") + depends_on = [ + azurerm_resource_group_template_deployment.azure_pipelines + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/outputs.tf b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/outputs.tf new file mode 100644 index 00000000..e69de29b diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/vars.tf b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/vars.tf new file mode 100644 index 00000000..548a6dc9 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_azure/vars.tf @@ -0,0 +1,28 @@ +variable "resource_group_name" { + description = "The name of the resource group to deploy into" + type = string +} + +variable "data_factory_name" { + description = "The name of the data factory" + type = string +} +variable "shared_keyvault_uri" { + description = "The uri of the shared keyvault" + type = string +} + + +variable "integration_runtime_name" { + description = "The name of the integration runtime" + type = string +} + +variable "integration_runtime_short_name" { + description = "The short name of the integration runtime" + type = string +} +variable "name_suffix" { + description = "Used to give resource group deployments unique names for an environment" + type = string +} \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_common/arm/GPL_AzureFunction_Common.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_common/arm/GPL_AzureFunction_Common.json new file mode 100644 index 00000000..d2098ea1 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_common/arm/GPL_AzureFunction_Common.json @@ -0,0 +1,121 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "type": "String", + "metadata": "The name of the data factory" + }, + "functionLinkedServiceName": { + "type": "String", + "metadata": "The name of the azure function linked service that this dataset uses" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/GPL_AzureFunction_Common')]", + "type": "Microsoft.DataFactory/factories/pipelines", + "properties": { + "activities": [ + { + "name": "Switch Method", + "type": "Switch", + "dependsOn": [], + "userProperties": [], + "typeProperties": { + "on": { + "value": "@pipeline().parameters.Method", + "type": "Expression" + }, + "cases": [ + { + "value": "Put", + "activities": [ + { + "name": "Azure Function Generic Put", + "type": "AzureFunctionActivity", + "dependsOn": [], + "policy": { + "timeout": "7.00:00:00", + "retry": 0, + "retryIntervalInSeconds": 30, + "secureOutput": false, + "secureInput": false + }, + "userProperties": [], + "typeProperties": { + "functionName": { + "value": "@pipeline().parameters.FunctionName", + "type": "Expression" + }, + "method": "PUT", + "body": { + "value": "@pipeline().parameters.Body", + "type": "Expression" + } + }, + "linkedServiceName": { + "referenceName": "[parameters('functionLinkedServiceName')]", + "type": "LinkedServiceReference" + } + } + ] + }, + { + "value": "Post", + "activities": [ + { + "name": "Azure Function Generic Post", + "type": "AzureFunctionActivity", + "dependsOn": [], + "policy": { + "timeout": "7.00:00:00", + "retry": 0, + "retryIntervalInSeconds": 30, + "secureOutput": false, + "secureInput": false + }, + "userProperties": [], + "typeProperties": { + "functionName": { + "value": "@pipeline().parameters.FunctionName", + "type": "Expression" + }, + "method": "POST", + "body": { + "value": "@pipeline().parameters.Body", + "type": "Expression" + } + }, + "linkedServiceName": { + "referenceName": "[parameters('functionLinkedServiceName')]", + "type": "LinkedServiceReference" + } + } + ] + } + ] + } + } + ], + "parameters": { + "Body": { + "type": "String" + }, + "FunctionName": { + "type": "String" + }, + "Method": { + "type": "String" + } + }, + "folder": { + "name": "ADS Go Fast/Common" + }, + "annotations": [], + "lastPublishTime": "2020-07-14T10:58:30Z" + } + } + ] +} \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_common/arm/SPL_AzureFunction.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_common/arm/SPL_AzureFunction.json new file mode 100644 index 00000000..870208b7 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_common/arm/SPL_AzureFunction.json @@ -0,0 +1,121 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "type": "String", + "metadata": "The name of the data factory" + }, + "functionLinkedServiceName": { + "type": "String", + "metadata": "The name of the azure function linked service that this dataset uses" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/SPL_AzureFunction')]", + "type": "Microsoft.DataFactory/factories/pipelines", + "properties": { + "activities": [ + { + "name": "Switch Method", + "type": "Switch", + "dependsOn": [], + "userProperties": [], + "typeProperties": { + "on": { + "value": "@pipeline().parameters.Method", + "type": "Expression" + }, + "cases": [ + { + "value": "Put", + "activities": [ + { + "name": "Azure Function Generic Put", + "type": "AzureFunctionActivity", + "dependsOn": [], + "policy": { + "timeout": "7.00:00:00", + "retry": 0, + "retryIntervalInSeconds": 30, + "secureOutput": false, + "secureInput": false + }, + "userProperties": [], + "typeProperties": { + "functionName": { + "value": "@pipeline().parameters.FunctionName", + "type": "Expression" + }, + "method": "PUT", + "body": { + "value": "@pipeline().parameters.Body", + "type": "Expression" + } + }, + "linkedServiceName": { + "referenceName": "[parameters('functionLinkedServiceName')]", + "type": "LinkedServiceReference" + } + } + ] + }, + { + "value": "Post", + "activities": [ + { + "name": "Azure Function Generic Post", + "type": "AzureFunctionActivity", + "dependsOn": [], + "policy": { + "timeout": "7.00:00:00", + "retry": 0, + "retryIntervalInSeconds": 30, + "secureOutput": false, + "secureInput": false + }, + "userProperties": [], + "typeProperties": { + "functionName": { + "value": "@pipeline().parameters.FunctionName", + "type": "Expression" + }, + "method": "POST", + "body": { + "value": "@pipeline().parameters.Body", + "type": "Expression" + } + }, + "linkedServiceName": { + "referenceName": "[parameters('functionLinkedServiceName')]", + "type": "LinkedServiceReference" + } + } + ] + } + ] + } + } + ], + "parameters": { + "Body": { + "type": "String" + }, + "FunctionName": { + "type": "String" + }, + "Method": { + "type": "String" + } + }, + "folder": { + "name": "ADS Go Fast/Common" + }, + "annotations": [], + "lastPublishTime": "2020-07-14T10:58:30Z" + } + } + ] +} \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_common/main.tf b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_common/main.tf new file mode 100644 index 00000000..b24fb8fc --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_common/main.tf @@ -0,0 +1,14 @@ +resource "azurerm_resource_group_template_deployment" "pipeline_generic_function" { + name = "AZ_Function_Generic_${var.name_suffix}" + resource_group_name = var.resource_group_name + deployment_mode = "Incremental" + parameters_content = jsonencode({ + "dataFactoryName" = { + value = var.data_factory_name + }, + "functionLinkedServiceName" = { + value = var.linkedservice_azure_function_name + } + }) + template_content = file("${path.module}/arm/SPL_AzureFunction.json") +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_common/outputs.tf b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_common/outputs.tf new file mode 100644 index 00000000..e69de29b diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_common/vars.tf b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_common/vars.tf new file mode 100644 index 00000000..e2bbb003 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_common/vars.tf @@ -0,0 +1,18 @@ +variable "resource_group_name" { + description = "The name of the resource group to deploy into" + type = string +} + +variable "data_factory_name" { + description = "The name of the data factory" + type = string +} + +variable "linkedservice_azure_function_name" { + description = "The name of the linked service for azure function" + type = string +} +variable "name_suffix" { + description = "Used to give resource group deployments unique names for an environment" + type = string +} \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobFS_Binary_AzureBlobFS_Binary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobFS_Binary_AzureBlobFS_Binary.json new file mode 100644 index 00000000..afd55f21 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobFS_Binary_AzureBlobFS_Binary.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Binary_AzureBlobFS_Binary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobFS_Binary_AzureBlobFS_Binary_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobFS_Binary_AzureBlobStorage_Binary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobFS_Binary_AzureBlobStorage_Binary.json new file mode 100644 index 00000000..843a0ddf --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobFS_Binary_AzureBlobStorage_Binary.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Binary_AzureBlobStorage_Binary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobFS_Binary_AzureBlobStorage_Binary_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobFS_DelimitedText_AzureBlobFS_Parquet.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobFS_DelimitedText_AzureBlobFS_Parquet.json new file mode 100644 index 00000000..fd537022 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobFS_DelimitedText_AzureBlobFS_Parquet.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_DelimitedText_AzureBlobFS_Parquet_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobFS_DelimitedText_AzureBlobFS_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobFS_DelimitedText_AzureBlobStorage_Parquet.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobFS_DelimitedText_AzureBlobStorage_Parquet.json new file mode 100644 index 00000000..f36fe9dc --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobFS_DelimitedText_AzureBlobStorage_Parquet.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_DelimitedText_AzureBlobStorage_Parquet_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobFS_DelimitedText_AzureBlobStorage_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobFS_DelimitedText_AzureSqlTable_NA.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobFS_DelimitedText_AzureSqlTable_NA.json new file mode 100644 index 00000000..33a0372a --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobFS_DelimitedText_AzureSqlTable_NA.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_DelimitedText_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobFS_DelimitedText_AzureSqlTable_NA_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobFS_Excel_AzureBlobFS_DelimitedText.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobFS_Excel_AzureBlobFS_DelimitedText.json new file mode 100644 index 00000000..aeed462e --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobFS_Excel_AzureBlobFS_DelimitedText.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Excel_AzureBlobFS_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobFS_Excel_AzureBlobFS_DelimitedText_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobFS_Excel_AzureBlobFS_Parquet.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobFS_Excel_AzureBlobFS_Parquet.json new file mode 100644 index 00000000..ccb0177c --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobFS_Excel_AzureBlobFS_Parquet.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Excel_AzureBlobFS_Parquet_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobFS_Excel_AzureBlobFS_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobFS_Excel_AzureBlobStorage_DelimitedText.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobFS_Excel_AzureBlobStorage_DelimitedText.json new file mode 100644 index 00000000..7c075177 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobFS_Excel_AzureBlobStorage_DelimitedText.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Excel_AzureBlobStorage_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobFS_Excel_AzureBlobStorage_DelimitedText_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobFS_Excel_AzureBlobStorage_Parquet.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobFS_Excel_AzureBlobStorage_Parquet.json new file mode 100644 index 00000000..797e5b41 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobFS_Excel_AzureBlobStorage_Parquet.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Excel_AzureBlobStorage_Parquet_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobFS_Excel_AzureBlobStorage_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobFS_Excel_AzureSqlTable_NA.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobFS_Excel_AzureSqlTable_NA.json new file mode 100644 index 00000000..3bdb5e4b --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobFS_Excel_AzureSqlTable_NA.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Excel_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobFS_Excel_AzureSqlTable_NA_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobFS_Json_AzureSqlTable_NA.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobFS_Json_AzureSqlTable_NA.json new file mode 100644 index 00000000..4e2b95c7 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobFS_Json_AzureSqlTable_NA.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Json_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobFS_Json_AzureSqlTable_NA_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobFS_Parquet_AzureBlobFS_DelimitedText.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobFS_Parquet_AzureBlobFS_DelimitedText.json new file mode 100644 index 00000000..2b6fcbd4 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobFS_Parquet_AzureBlobFS_DelimitedText.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Parquet_AzureBlobFS_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobFS_Parquet_AzureBlobFS_DelimitedText_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobFS_Parquet_AzureBlobStorage_DelimitedText.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobFS_Parquet_AzureBlobStorage_DelimitedText.json new file mode 100644 index 00000000..2119ff26 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobFS_Parquet_AzureBlobStorage_DelimitedText.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Parquet_AzureBlobStorage_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobFS_Parquet_AzureBlobStorage_DelimitedText_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobFS_Parquet_AzureSqlTable_NA.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobFS_Parquet_AzureSqlTable_NA.json new file mode 100644 index 00000000..a2fdce05 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobFS_Parquet_AzureSqlTable_NA.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Parquet_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobFS_Parquet_AzureSqlTable_NA_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobStorage_Binary_AzureBlobFS_Binary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobStorage_Binary_AzureBlobFS_Binary.json new file mode 100644 index 00000000..dff71093 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobStorage_Binary_AzureBlobFS_Binary.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Binary_AzureBlobFS_Binary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobStorage_Binary_AzureBlobFS_Binary_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobStorage_Binary_AzureBlobStorage_Binary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobStorage_Binary_AzureBlobStorage_Binary.json new file mode 100644 index 00000000..d320aea5 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobStorage_Binary_AzureBlobStorage_Binary.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Binary_AzureBlobStorage_Binary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobStorage_Binary_AzureBlobStorage_Binary_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobStorage_DelimitedText_AzureBlobFS_Parquet.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobStorage_DelimitedText_AzureBlobFS_Parquet.json new file mode 100644 index 00000000..0b77f6a1 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobStorage_DelimitedText_AzureBlobFS_Parquet.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_DelimitedText_AzureBlobFS_Parquet_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobStorage_DelimitedText_AzureBlobFS_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobStorage_DelimitedText_AzureBlobStorage_Parquet.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobStorage_DelimitedText_AzureBlobStorage_Parquet.json new file mode 100644 index 00000000..c828662b --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobStorage_DelimitedText_AzureBlobStorage_Parquet.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_DelimitedText_AzureBlobStorage_Parquet_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobStorage_DelimitedText_AzureBlobStorage_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobStorage_DelimitedText_AzureSqlTable_NA.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobStorage_DelimitedText_AzureSqlTable_NA.json new file mode 100644 index 00000000..c8babcaf --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobStorage_DelimitedText_AzureSqlTable_NA.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_DelimitedText_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobStorage_DelimitedText_AzureSqlTable_NA_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobStorage_Excel_AzureBlobFS_DelimitedText.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobStorage_Excel_AzureBlobFS_DelimitedText.json new file mode 100644 index 00000000..82f6a770 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobStorage_Excel_AzureBlobFS_DelimitedText.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Excel_AzureBlobFS_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobStorage_Excel_AzureBlobFS_DelimitedText_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobStorage_Excel_AzureBlobFS_Parquet.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobStorage_Excel_AzureBlobFS_Parquet.json new file mode 100644 index 00000000..e9d89967 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobStorage_Excel_AzureBlobFS_Parquet.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Excel_AzureBlobFS_Parquet_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobStorage_Excel_AzureBlobFS_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobStorage_Excel_AzureBlobStorage_DelimitedText.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobStorage_Excel_AzureBlobStorage_DelimitedText.json new file mode 100644 index 00000000..59170429 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobStorage_Excel_AzureBlobStorage_DelimitedText.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Excel_AzureBlobStorage_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobStorage_Excel_AzureBlobStorage_DelimitedText_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobStorage_Excel_AzureBlobStorage_Parquet.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobStorage_Excel_AzureBlobStorage_Parquet.json new file mode 100644 index 00000000..50677e09 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobStorage_Excel_AzureBlobStorage_Parquet.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Excel_AzureBlobStorage_Parquet_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobStorage_Excel_AzureBlobStorage_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobStorage_Excel_AzureSqlTable_NA.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobStorage_Excel_AzureSqlTable_NA.json new file mode 100644 index 00000000..1a439be0 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobStorage_Excel_AzureSqlTable_NA.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Excel_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobStorage_Excel_AzureSqlTable_NA_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobStorage_Json_AzureSqlTable_NA.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobStorage_Json_AzureSqlTable_NA.json new file mode 100644 index 00000000..e8ee0d9f --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobStorage_Json_AzureSqlTable_NA.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Json_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobStorage_Json_AzureSqlTable_NA_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobStorage_Parquet_AzureBlobFS_DelimitedText.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobStorage_Parquet_AzureBlobFS_DelimitedText.json new file mode 100644 index 00000000..154c39d5 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobStorage_Parquet_AzureBlobFS_DelimitedText.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Parquet_AzureBlobFS_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobStorage_Parquet_AzureBlobFS_DelimitedText_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobStorage_Parquet_AzureBlobStorage_DelimitedText.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobStorage_Parquet_AzureBlobStorage_DelimitedText.json new file mode 100644 index 00000000..a3c684f0 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobStorage_Parquet_AzureBlobStorage_DelimitedText.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Parquet_AzureBlobStorage_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobStorage_Parquet_AzureBlobStorage_DelimitedText_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobStorage_Parquet_AzureSqlTable_NA.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobStorage_Parquet_AzureSqlTable_NA.json new file mode 100644 index 00000000..91f65897 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureBlobStorage_Parquet_AzureSqlTable_NA.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Parquet_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobStorage_Parquet_AzureSqlTable_NA_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureSqlTable_NA_AzureBlobFS_Parquet.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureSqlTable_NA_AzureBlobFS_Parquet.json new file mode 100644 index 00000000..8760c73b --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureSqlTable_NA_AzureBlobFS_Parquet.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureSqlTable_NA_AzureBlobFS_Parquet_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_AzureBlobFS_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureSqlTable_NA_AzureBlobStorage_Parquet.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureSqlTable_NA_AzureBlobStorage_Parquet.json new file mode 100644 index 00000000..be74a9e4 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_AzureSqlTable_NA_AzureBlobStorage_Parquet.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureSqlTable_NA_AzureBlobStorage_Parquet_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_AzureBlobStorage_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_SqlServerTable_NA_AzureBlobFS_Parquet.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_SqlServerTable_NA_AzureBlobFS_Parquet.json new file mode 100644 index 00000000..d0a6d554 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_SqlServerTable_NA_AzureBlobFS_Parquet.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_SqlServerTable_NA_AzureBlobFS_Parquet_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_SqlServerTable_NA_AzureBlobFS_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_SqlServerTable_NA_AzureBlobStorage_Parquet.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_SqlServerTable_NA_AzureBlobStorage_Parquet.json new file mode 100644 index 00000000..a6daa874 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL-1_SqlServerTable_NA_AzureBlobStorage_Parquet.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_SqlServerTable_NA_AzureBlobStorage_Parquet_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_SqlServerTable_NA_AzureBlobStorage_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL0_AzureSqlTable_NA_AzureBlobFS_Parquet_Full_Load.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL0_AzureSqlTable_NA_AzureBlobFS_Parquet_Full_Load.json new file mode 100644 index 00000000..f6953cc1 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL0_AzureSqlTable_NA_AzureBlobFS_Parquet_Full_Load.json @@ -0,0 +1,378 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureSqlTable_NA_AzureBlobFS_Parquet_Full_Load_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "Pipeline AF Log - Copy Start", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Set SQLStatement", + "type": "SetVariable", + "typeProperties": { + "value": { + "type": "Expression", + "value": "@replace(replace(pipeline().parameters.TaskObject.Source.SQLStatement,'',string(pipeline().parameters.BatchCount)),'',string(pipeline().parameters.Item))" + }, + "variableName": "SQLStatement" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Set SQLStatement", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Database" + }, + "Schema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableSchema" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + }, + "Table": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableName" + } + }, + "referenceName": "[concat('GDS_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy SQL to Storage", + "outputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@if(equals(pipeline().parameters.TaskObject.Source.ChunkSize,0),\n pipeline().parameters.TaskObject.Target.DataFileName,\n replace(\n pipeline().parameters.TaskObject.Target.DataFileName,\n '.parquet',\n concat('.chunk_', string(pipeline().parameters.Item),'.parquet')\n )\n)" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.Instance.TargetRelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "storeSettings": { + "type": "AzureBlobFSWriteSettings" + }, + "type": "ParquetSink" + }, + "source": { + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@variables('SQLStatement')" + }, + "type": "AzureSqlSource" + }, + "translator": { + "type": "Expression", + "value": "@pipeline().parameters.Mapping" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy SQL to Storage", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Copy Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy SQL to Storage\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy SQL to Storage').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Get Parquet Metadata", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Persist Parquet Metadata", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(\n concat('{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"StorageAccountName\":\"', \n string(pipeline().parameters.TaskObject.Target.System.SystemServer), \n '\",\"StorageAccountContainer\":\"', \n string(pipeline().parameters.TaskObject.Target.System.Container), \n '\",\"RelativePath\":\"', \n string(pipeline().parameters.TaskObject.Target.Instance.TargetRelativePath), \n '\",\"SchemaFileName\":\"', \n string(pipeline().parameters.TaskObject.Target.SchemaFileName), \n '\",\"SourceType\":\"', \n string(pipeline().parameters.TaskObject.Source.System.Type), \n '\",\"TargetType\":\"', \n if(\n contains(\n string(pipeline().parameters.TaskObject.Target.System.SystemServer),\n '.dfs.core.windows.net'\n ),\n 'ADLS',\n 'Azure Blob'), \n '\",\"Data\":',\n string(activity('Get Parquet Metadata').output),\n ',\"MetadataType\":\"Parquet\"}')\n)" + }, + "functionName": "TaskExecutionSchemaFile", + "method": "POST" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy SQL to Storage", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Get Parquet Metadata", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "GetMetadata", + "typeProperties": { + "dataset": { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@if(equals(pipeline().parameters.TaskObject.Source.ChunkSize,0),\n pipeline().parameters.TaskObject.Target.DataFileName,\n replace(\n pipeline().parameters.TaskObject.Target.DataFileName,\n '.parquet',\n concat('.chunk_', string(pipeline().parameters.Item),'.parquet')\n )\n)" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.Instance.TargetRelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "fieldList": [ + "structure" + ], + "storeSettings": { + "recursive": true, + "type": "AzureBlobFSReadSettings" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Pipeline AF Log - Copy Start", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy SQL to Storage\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy SQL to Storage", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Copy Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy SQL to Storage\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy SQL to Storage').output.rowsCopied), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'), '/Components')]" + }, + "parameters": { + "BatchCount": { + "type": "int" + }, + "Item": { + "type": "int" + }, + "Mapping": { + "type": "object" + }, + "TaskObject": { + "defaultValue": { + "DataFactory": { + "ADFPipeline": "AZ_SQL_AZ_Storage_Parquet_Azure", + "Id": 1, + "Name": "adsgofastdatakakeacceladf", + "ResourceGroup": "AdsGoFastDataLakeAccel", + "SubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3" + }, + "DegreeOfCopyParallelism": 1, + "Enabled": 1, + "ExecutionUid": "2c5924ee-b855-4d2b-bb7e-4f5dde4c4dd3", + "KeyVaultBaseUrl": "https://adsgofastkeyvault.vault.azure.net/", + "NumberOfRetries": 111, + "ScheduleMasterId": 2, + "Source": { + "Database": { + "AuthenticationType": "MSI", + "Name": "AWSample", + "SystemName": "adsgofastdatakakeaccelsqlsvr.database.windows.net" + }, + "Extraction": { + "FullOrIncremental": "Full", + "IncrementalType": null, + "TableName": "SalesOrderHeader", + "TableSchema": "SalesLT", + "Type": "Table" + }, + "Type": "Azure SQL" + }, + "Target": { + "DataFileName": "SalesLT.SalesOrderHeader.parquet", + "FirstRowAsHeader": null, + "MaxConcorrentConnections": null, + "RelativePath": "/AwSample/SalesLT/SalesOrderHeader/2020/7/9/14/12/", + "SchemaFileName": "SalesLT.SalesOrderHeader", + "SheetName": null, + "SkipLineCount": null, + "StorageAccountAccessMethod": "MSI", + "StorageAccountContainer": "datalakeraw", + "StorageAccountName": "https://adsgofastdatalakeaccelst.blob.core.windows.net", + "Type": "Azure Blob" + }, + "TaskGroupConcurrency": 10, + "TaskGroupPriority": 0, + "TaskInstanceId": 75, + "TaskMasterId": 12, + "TaskStatus": "Untried", + "TaskType": "SQL Database to Azure Storage" + }, + "type": "object" + } + }, + "variables": { + "SQLStatement": { + "type": "String" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL0_AzureSqlTable_NA_AzureBlobFS_Parquet_Watermark.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL0_AzureSqlTable_NA_AzureBlobFS_Parquet_Watermark.json new file mode 100644 index 00000000..b4b84ca9 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL0_AzureSqlTable_NA_AzureBlobFS_Parquet_Watermark.json @@ -0,0 +1,381 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureSqlTable_NA_AzureBlobFS_Parquet_Watermark_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "Pipeline AF Log - Copy Start", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Set SQLStatement", + "type": "SetVariable", + "typeProperties": { + "value": { + "type": "Expression", + "value": "@replace(\n replace(\n replace(\n pipeline().parameters.TaskObject.Source.SQLStatement,\n '',\n string(pipeline().parameters.BatchCount)\n ),\n '',string(pipeline().parameters.Item)),\n '',string(pipeline().parameters.NewWaterMark)\n)" + }, + "variableName": "SQLStatement" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Set SQLStatement", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Database" + }, + "Schema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableSchema" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + }, + "Table": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableName" + } + }, + "referenceName": "[concat('GDS_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy SQL to Storage", + "outputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@if(equals(pipeline().parameters.TaskObject.Source.ChunkSize,0),\n pipeline().parameters.TaskObject.Target.DataFileName,\n replace(\n pipeline().parameters.TaskObject.Target.DataFileName,\n '.parquet',\n concat('.chunk_', string(pipeline().parameters.Item),'.parquet')\n )\n)" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.Instance.TargetRelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "storeSettings": { + "type": "AzureBlobFSWriteSettings" + }, + "type": "ParquetSink" + }, + "source": { + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@variables('SQLStatement')" + }, + "type": "AzureSqlSource" + }, + "translator": { + "type": "Expression", + "value": "@pipeline().parameters.Mapping" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy SQL to Storage", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Copy Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy SQL to Storage\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy SQL to Storage').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Get Parquet Metadata", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Persist Parquet Metadata", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"StorageAccountName\":\"', \n string(pipeline().parameters.TaskObject.Target.System.SystemServer), \n '\",\"StorageAccountContainer\":\"', \n string(pipeline().parameters.TaskObject.Target.System.Container), \n '\",\"RelativePath\":\"', \n string(pipeline().parameters.TaskObject.Target.Instance.TargetRelativePath), \n '\",\"SchemaFileName\":\"', \n string(pipeline().parameters.TaskObject.Target.SchemaFileName), \n '\",\"SourceType\":\"', \n string(pipeline().parameters.TaskObject.Source.System.Type), \n '\",\"TargetType\":\"', \n if(\n contains(\n string(pipeline().parameters.TaskObject.Target.System.SystemServer),\n '.dfs.core.windows.net'\n ),\n 'ADLS',\n 'Azure Blob'), \n '\",\"Data\":',\n string(activity('Get Parquet Metadata').output),\n ',\"MetadataType\":\"Parquet\"}')\n)" + }, + "functionName": "TaskExecutionSchemaFile", + "method": "POST" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy SQL to Storage", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Get Parquet Metadata", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "GetMetadata", + "typeProperties": { + "dataset": { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@if(equals(pipeline().parameters.TaskObject.Source.ChunkSize,0),\n pipeline().parameters.TaskObject.Target.DataFileName,\n replace(\n pipeline().parameters.TaskObject.Target.DataFileName,\n '.parquet',\n concat('.chunk_', string(pipeline().parameters.Item),'.parquet')\n )\n)" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.Instance.TargetRelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "fieldList": [ + "structure" + ], + "storeSettings": { + "recursive": true, + "type": "AzureBlobFSReadSettings" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Pipeline AF Log - Copy Start", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy SQL to Storage\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy SQL to Storage", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Copy Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy SQL to Storage\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy SQL to Storage').output.rowsCopied), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'), '/Components')]" + }, + "parameters": { + "BatchCount": { + "type": "int" + }, + "Item": { + "type": "int" + }, + "Mapping": { + "type": "object" + }, + "NewWaterMark": { + "type": "string" + }, + "TaskObject": { + "defaultValue": { + "DataFactory": { + "ADFPipeline": "AZ_SQL_AZ_Storage_Parquet_Azure", + "Id": 1, + "Name": "adsgofastdatakakeacceladf", + "ResourceGroup": "AdsGoFastDataLakeAccel", + "SubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3" + }, + "DegreeOfCopyParallelism": 1, + "Enabled": 1, + "ExecutionUid": "2c5924ee-b855-4d2b-bb7e-4f5dde4c4dd3", + "KeyVaultBaseUrl": "https://adsgofastkeyvault.vault.azure.net/", + "NumberOfRetries": 111, + "ScheduleMasterId": 2, + "Source": { + "Database": { + "AuthenticationType": "MSI", + "Name": "AWSample", + "SystemName": "adsgofastdatakakeaccelsqlsvr.database.windows.net" + }, + "Extraction": { + "FullOrIncremental": "Full", + "IncrementalType": null, + "TableName": "SalesOrderHeader", + "TableSchema": "SalesLT", + "Type": "Table" + }, + "Type": "Azure SQL" + }, + "Target": { + "DataFileName": "SalesLT.SalesOrderHeader.parquet", + "FirstRowAsHeader": null, + "MaxConcorrentConnections": null, + "RelativePath": "/AwSample/SalesLT/SalesOrderHeader/2020/7/9/14/12/", + "SchemaFileName": "SalesLT.SalesOrderHeader", + "SheetName": null, + "SkipLineCount": null, + "StorageAccountAccessMethod": "MSI", + "StorageAccountContainer": "datalakeraw", + "StorageAccountName": "https://adsgofastdatalakeaccelst.blob.core.windows.net", + "Type": "Azure Blob" + }, + "TaskGroupConcurrency": 10, + "TaskGroupPriority": 0, + "TaskInstanceId": 75, + "TaskMasterId": 12, + "TaskStatus": "Untried", + "TaskType": "SQL Database to Azure Storage" + }, + "type": "object" + } + }, + "variables": { + "SQLStatement": { + "type": "String" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL0_AzureSqlTable_NA_AzureBlobStorage_Parquet_Full_Load.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL0_AzureSqlTable_NA_AzureBlobStorage_Parquet_Full_Load.json new file mode 100644 index 00000000..6737eecd --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL0_AzureSqlTable_NA_AzureBlobStorage_Parquet_Full_Load.json @@ -0,0 +1,378 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureSqlTable_NA_AzureBlobStorage_Parquet_Full_Load_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "Pipeline AF Log - Copy Start", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Set SQLStatement", + "type": "SetVariable", + "typeProperties": { + "value": { + "type": "Expression", + "value": "@replace(replace(pipeline().parameters.TaskObject.Source.SQLStatement,'',string(pipeline().parameters.BatchCount)),'',string(pipeline().parameters.Item))" + }, + "variableName": "SQLStatement" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Set SQLStatement", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Database" + }, + "Schema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableSchema" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + }, + "Table": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableName" + } + }, + "referenceName": "[concat('GDS_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy SQL to Storage", + "outputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@if(equals(pipeline().parameters.TaskObject.Source.ChunkSize,0),\n pipeline().parameters.TaskObject.Target.DataFileName,\n replace(\n pipeline().parameters.TaskObject.Target.DataFileName,\n '.parquet',\n concat('.chunk_', string(pipeline().parameters.Item),'.parquet')\n )\n)" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.Instance.TargetRelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "storeSettings": { + "type": "AzureBlobStorageWriteSettings" + }, + "type": "ParquetSink" + }, + "source": { + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@variables('SQLStatement')" + }, + "type": "AzureSqlSource" + }, + "translator": { + "type": "Expression", + "value": "@pipeline().parameters.Mapping" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy SQL to Storage", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Copy Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy SQL to Storage\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy SQL to Storage').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Get Parquet Metadata", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Persist Parquet Metadata", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(\n concat('{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"StorageAccountName\":\"', \n string(pipeline().parameters.TaskObject.Target.System.SystemServer), \n '\",\"StorageAccountContainer\":\"', \n string(pipeline().parameters.TaskObject.Target.System.Container), \n '\",\"RelativePath\":\"', \n string(pipeline().parameters.TaskObject.Target.Instance.TargetRelativePath), \n '\",\"SchemaFileName\":\"', \n string(pipeline().parameters.TaskObject.Target.SchemaFileName), \n '\",\"SourceType\":\"', \n string(pipeline().parameters.TaskObject.Source.System.Type), \n '\",\"TargetType\":\"', \n if(\n contains(\n string(pipeline().parameters.TaskObject.Target.System.SystemServer),\n '.dfs.core.windows.net'\n ),\n 'ADLS',\n 'Azure Blob'), \n '\",\"Data\":',\n string(activity('Get Parquet Metadata').output),\n ',\"MetadataType\":\"Parquet\"}')\n)" + }, + "functionName": "TaskExecutionSchemaFile", + "method": "POST" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy SQL to Storage", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Get Parquet Metadata", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "GetMetadata", + "typeProperties": { + "dataset": { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@if(equals(pipeline().parameters.TaskObject.Source.ChunkSize,0),\n pipeline().parameters.TaskObject.Target.DataFileName,\n replace(\n pipeline().parameters.TaskObject.Target.DataFileName,\n '.parquet',\n concat('.chunk_', string(pipeline().parameters.Item),'.parquet')\n )\n)" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.Instance.TargetRelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "fieldList": [ + "structure" + ], + "storeSettings": { + "recursive": true, + "type": "AzureBlobFSReadSettings" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Pipeline AF Log - Copy Start", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy SQL to Storage\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy SQL to Storage", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Copy Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy SQL to Storage\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy SQL to Storage').output.rowsCopied), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'), '/Components')]" + }, + "parameters": { + "BatchCount": { + "type": "int" + }, + "Item": { + "type": "int" + }, + "Mapping": { + "type": "object" + }, + "TaskObject": { + "defaultValue": { + "DataFactory": { + "ADFPipeline": "AZ_SQL_AZ_Storage_Parquet_Azure", + "Id": 1, + "Name": "adsgofastdatakakeacceladf", + "ResourceGroup": "AdsGoFastDataLakeAccel", + "SubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3" + }, + "DegreeOfCopyParallelism": 1, + "Enabled": 1, + "ExecutionUid": "2c5924ee-b855-4d2b-bb7e-4f5dde4c4dd3", + "KeyVaultBaseUrl": "https://adsgofastkeyvault.vault.azure.net/", + "NumberOfRetries": 111, + "ScheduleMasterId": 2, + "Source": { + "Database": { + "AuthenticationType": "MSI", + "Name": "AWSample", + "SystemName": "adsgofastdatakakeaccelsqlsvr.database.windows.net" + }, + "Extraction": { + "FullOrIncremental": "Full", + "IncrementalType": null, + "TableName": "SalesOrderHeader", + "TableSchema": "SalesLT", + "Type": "Table" + }, + "Type": "Azure SQL" + }, + "Target": { + "DataFileName": "SalesLT.SalesOrderHeader.parquet", + "FirstRowAsHeader": null, + "MaxConcorrentConnections": null, + "RelativePath": "/AwSample/SalesLT/SalesOrderHeader/2020/7/9/14/12/", + "SchemaFileName": "SalesLT.SalesOrderHeader", + "SheetName": null, + "SkipLineCount": null, + "StorageAccountAccessMethod": "MSI", + "StorageAccountContainer": "datalakeraw", + "StorageAccountName": "https://adsgofastdatalakeaccelst.blob.core.windows.net", + "Type": "Azure Blob" + }, + "TaskGroupConcurrency": 10, + "TaskGroupPriority": 0, + "TaskInstanceId": 75, + "TaskMasterId": 12, + "TaskStatus": "Untried", + "TaskType": "SQL Database to Azure Storage" + }, + "type": "object" + } + }, + "variables": { + "SQLStatement": { + "type": "String" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL0_AzureSqlTable_NA_AzureBlobStorage_Parquet_Watermark.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL0_AzureSqlTable_NA_AzureBlobStorage_Parquet_Watermark.json new file mode 100644 index 00000000..778457dd --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL0_AzureSqlTable_NA_AzureBlobStorage_Parquet_Watermark.json @@ -0,0 +1,381 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureSqlTable_NA_AzureBlobStorage_Parquet_Watermark_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "Pipeline AF Log - Copy Start", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Set SQLStatement", + "type": "SetVariable", + "typeProperties": { + "value": { + "type": "Expression", + "value": "@replace(\n replace(\n replace(\n pipeline().parameters.TaskObject.Source.SQLStatement,\n '',\n string(pipeline().parameters.BatchCount)\n ),\n '',string(pipeline().parameters.Item)),\n '',string(pipeline().parameters.NewWaterMark)\n)" + }, + "variableName": "SQLStatement" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Set SQLStatement", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Database" + }, + "Schema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableSchema" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + }, + "Table": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableName" + } + }, + "referenceName": "[concat('GDS_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy SQL to Storage", + "outputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@if(equals(pipeline().parameters.TaskObject.Source.ChunkSize,0),\n pipeline().parameters.TaskObject.Target.DataFileName,\n replace(\n pipeline().parameters.TaskObject.Target.DataFileName,\n '.parquet',\n concat('.chunk_', string(pipeline().parameters.Item),'.parquet')\n )\n)" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.Instance.TargetRelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "storeSettings": { + "type": "AzureBlobStorageWriteSettings" + }, + "type": "ParquetSink" + }, + "source": { + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@variables('SQLStatement')" + }, + "type": "AzureSqlSource" + }, + "translator": { + "type": "Expression", + "value": "@pipeline().parameters.Mapping" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy SQL to Storage", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Copy Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy SQL to Storage\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy SQL to Storage').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Get Parquet Metadata", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Persist Parquet Metadata", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"StorageAccountName\":\"', \n string(pipeline().parameters.TaskObject.Target.System.SystemServer), \n '\",\"StorageAccountContainer\":\"', \n string(pipeline().parameters.TaskObject.Target.System.Container), \n '\",\"RelativePath\":\"', \n string(pipeline().parameters.TaskObject.Target.Instance.TargetRelativePath), \n '\",\"SchemaFileName\":\"', \n string(pipeline().parameters.TaskObject.Target.SchemaFileName), \n '\",\"SourceType\":\"', \n string(pipeline().parameters.TaskObject.Source.System.Type), \n '\",\"TargetType\":\"', \n if(\n contains(\n string(pipeline().parameters.TaskObject.Target.System.SystemServer),\n '.dfs.core.windows.net'\n ),\n 'ADLS',\n 'Azure Blob'), \n '\",\"Data\":',\n string(activity('Get Parquet Metadata').output),\n ',\"MetadataType\":\"Parquet\"}')\n)" + }, + "functionName": "TaskExecutionSchemaFile", + "method": "POST" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy SQL to Storage", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Get Parquet Metadata", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "GetMetadata", + "typeProperties": { + "dataset": { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@if(equals(pipeline().parameters.TaskObject.Source.ChunkSize,0),\n pipeline().parameters.TaskObject.Target.DataFileName,\n replace(\n pipeline().parameters.TaskObject.Target.DataFileName,\n '.parquet',\n concat('.chunk_', string(pipeline().parameters.Item),'.parquet')\n )\n)" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.Instance.TargetRelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "fieldList": [ + "structure" + ], + "storeSettings": { + "recursive": true, + "type": "AzureBlobFSReadSettings" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Pipeline AF Log - Copy Start", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy SQL to Storage\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy SQL to Storage", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Copy Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy SQL to Storage\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy SQL to Storage').output.rowsCopied), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'), '/Components')]" + }, + "parameters": { + "BatchCount": { + "type": "int" + }, + "Item": { + "type": "int" + }, + "Mapping": { + "type": "object" + }, + "NewWaterMark": { + "type": "string" + }, + "TaskObject": { + "defaultValue": { + "DataFactory": { + "ADFPipeline": "AZ_SQL_AZ_Storage_Parquet_Azure", + "Id": 1, + "Name": "adsgofastdatakakeacceladf", + "ResourceGroup": "AdsGoFastDataLakeAccel", + "SubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3" + }, + "DegreeOfCopyParallelism": 1, + "Enabled": 1, + "ExecutionUid": "2c5924ee-b855-4d2b-bb7e-4f5dde4c4dd3", + "KeyVaultBaseUrl": "https://adsgofastkeyvault.vault.azure.net/", + "NumberOfRetries": 111, + "ScheduleMasterId": 2, + "Source": { + "Database": { + "AuthenticationType": "MSI", + "Name": "AWSample", + "SystemName": "adsgofastdatakakeaccelsqlsvr.database.windows.net" + }, + "Extraction": { + "FullOrIncremental": "Full", + "IncrementalType": null, + "TableName": "SalesOrderHeader", + "TableSchema": "SalesLT", + "Type": "Table" + }, + "Type": "Azure SQL" + }, + "Target": { + "DataFileName": "SalesLT.SalesOrderHeader.parquet", + "FirstRowAsHeader": null, + "MaxConcorrentConnections": null, + "RelativePath": "/AwSample/SalesLT/SalesOrderHeader/2020/7/9/14/12/", + "SchemaFileName": "SalesLT.SalesOrderHeader", + "SheetName": null, + "SkipLineCount": null, + "StorageAccountAccessMethod": "MSI", + "StorageAccountContainer": "datalakeraw", + "StorageAccountName": "https://adsgofastdatalakeaccelst.blob.core.windows.net", + "Type": "Azure Blob" + }, + "TaskGroupConcurrency": 10, + "TaskGroupPriority": 0, + "TaskInstanceId": 75, + "TaskMasterId": 12, + "TaskStatus": "Untried", + "TaskType": "SQL Database to Azure Storage" + }, + "type": "object" + } + }, + "variables": { + "SQLStatement": { + "type": "String" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL0_AzureSqlTable_NA_Create_Table.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL0_AzureSqlTable_NA_Create_Table.json new file mode 100644 index 00000000..1038fcd7 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL0_AzureSqlTable_NA_Create_Table.json @@ -0,0 +1,285 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureSqlTable_NA_Create_Table_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "If exist Staging TableName", + "type": "IfCondition", + "typeProperties": { + "expression": { + "type": "Expression", + "value": "@not(empty(pipeline().parameters.TaskObject.Target.TableName))" + }, + "ifTrueActivities": [ + { + "dependsOn": [ ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Get SQL Create Statement Staging", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"TableSchema\":\"',string(pipeline().parameters.TaskObject.Target.TableSchema), '\",\"TableName\":\"', string(pipeline().parameters.TaskObject.Target.TableName),'\",\"StorageAccountName\":\"', string(pipeline().parameters.TaskObject.Source.System.SystemServer), '\",\"StorageAccountContainer\":\"', string(pipeline().parameters.TaskObject.Source.System.Container), '\",\"RelativePath\":\"', string(pipeline().parameters.TaskObject.Source.Instance.SourceRelativePath), '\",\"SchemaFileName\":\"', string(pipeline().parameters.TaskObject.Source.SchemaFileName), '\"}'))" + }, + "functionName": "GetSQLCreateStatementFromSchema", + "method": "POST" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "AF Get SQL Create Statement Staging", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Lookup Create Staging Table", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Lookup", + "typeProperties": { + "dataset": { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Database" + }, + "Schema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.StagingTableSchema" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + }, + "Table": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.StagingTableName" + } + }, + "referenceName": "[concat('GDS_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "firstRowOnly": false, + "source": { + "partitionOption": "None", + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@activity('AF Get SQL Create Statement Staging').output.CreateStatement" + }, + "type": "AzureSqlSource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Lookup Create Staging Table", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "AF Log - Create Staging Table Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Create Staging Table\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Lookup Create Staging Table').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ] + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "If exist Target TableName", + "type": "IfCondition", + "typeProperties": { + "expression": { + "type": "Expression", + "value": "@not(empty(pipeline().parameters.TaskObject.Target.StagingTableName))" + }, + "ifTrueActivities": [ + { + "dependsOn": [ ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Get SQL Create Statement Target", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"TableSchema\":\"',string(pipeline().parameters.TaskObject.Target.StagingTableSchema), '\",\"TableName\":\"', string(pipeline().parameters.TaskObject.Target.StagingTableName),'\",\"StorageAccountName\":\"', string(pipeline().parameters.TaskObject.Source.System.SystemServer), '\",\"StorageAccountContainer\":\"', string(pipeline().parameters.TaskObject.Source.System.Container), '\",\"RelativePath\":\"', string(pipeline().parameters.TaskObject.Source.Instance.SourceRelativePath), '\",\"SchemaFileName\":\"', string(pipeline().parameters.TaskObject.Source.SchemaFileName), '\",\"DropIfExist\":\"True\"}'))" + }, + "functionName": "GetSQLCreateStatementFromSchema", + "method": "POST" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "AF Get SQL Create Statement Target", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Lookup Create Target Table", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Lookup", + "typeProperties": { + "dataset": { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Database" + }, + "Schema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.TableSchema" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + }, + "Table": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.TableName" + } + }, + "referenceName": "[concat('GDS_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "firstRowOnly": false, + "source": { + "partitionOption": "None", + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@activity('AF Get SQL Create Statement Target').output.CreateStatement" + }, + "type": "AzureSqlSource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Lookup Create Target Table", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "AF Log - Create Target Table Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Create Target Table\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Lookup Create Target Table').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ] + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-04T13:09:30Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL0_AzureSqlTable_NA_Post_Copy.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL0_AzureSqlTable_NA_Post_Copy.json new file mode 100644 index 00000000..8753f739 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL0_AzureSqlTable_NA_Post_Copy.json @@ -0,0 +1,582 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureSqlTable_NA_Post_Copy_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "If Exist PostCopySQL", + "type": "IfCondition", + "typeProperties": { + "expression": { + "type": "Expression", + "value": "@not(empty(pipeline().parameters.TaskObject.Target.PostCopySQL))" + }, + "ifTrueActivities": [ + { + "dependsOn": [ ], + "name": "Run PostCopySQL", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Lookup", + "typeProperties": { + "dataset": { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Database" + }, + "Schema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.TableSchema" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + }, + "Table": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.TableName" + } + }, + "referenceName": "[concat('GDS_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "source": { + "partitionOption": "None", + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.PostCopySQL" + }, + "type": "AzureSqlSource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Run PostCopySQL", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "AF Log - Run PostCopySQL Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Run PostCopySQL\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Run PostCopySQL').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ] + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "If Exist PostCopySQL", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "If Exist MergeSQL", + "type": "IfCondition", + "typeProperties": { + "expression": { + "type": "Expression", + "value": "@not(empty(pipeline().parameters.TaskObject.Target.MergeSQL))" + }, + "ifTrueActivities": [ + { + "dependsOn": [ ], + "name": "Run MergeSQL", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Lookup", + "typeProperties": { + "dataset": { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Database" + }, + "Schema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.TableSchema" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + }, + "Table": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.TableName" + } + }, + "referenceName": "[concat('GDS_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "firstRowOnly": true, + "source": { + "partitionOption": "None", + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.MergeSQL" + }, + "type": "AzureSqlSource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Run MergeSQL", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "AF Log - Run MergeSQL Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Run MergeSQL\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Run MergeSQL').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ] + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "If Exist PostCopySQL", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "If AutoGenerateMerge", + "type": "IfCondition", + "typeProperties": { + "expression": { + "type": "Expression", + "value": "@bool(pipeline().parameters.TaskObject.Target.AutoGenerateMerge)" + }, + "ifTrueActivities": [ + { + "dependsOn": [ + { + "activity": "AF Get Merge Statement", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Run MergeStatement", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Lookup", + "typeProperties": { + "dataset": { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Database" + }, + "Schema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.TableSchema" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + }, + "Table": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.TableName" + } + }, + "referenceName": "[concat('GDS_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "firstRowOnly": false, + "source": { + "partitionOption": "None", + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@activity('AF Get Merge Statement').output.MergeStatement" + }, + "type": "AzureSqlSource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Get Information Schema SQL Stage", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"TableSchema\":\"', string(pipeline().parameters.TaskObject.Target.StagingTableSchema), '\",\"TableName\":\"', string(pipeline().parameters.TaskObject.Target.StagingTableName),'\"}'))" + }, + "functionName": "GetInformationSchemaSQL", + "method": "POST" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Lookup Get Metadata Stage", + "dependencyConditions": [ + "Succeeded" + ] + }, + { + "activity": "Lookup Get Metadata Target", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Get Merge Statement", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId),'\",\"TargetTableSchema\":\"',string(pipeline().parameters.TaskObject.Target.TableSchema),'\",\"TargetTableName\":\"',string(pipeline().parameters.TaskObject.Target.TableName),'\",\"StagingTableSchema\":\"',string(pipeline().parameters.TaskObject.Target.StagingTableSchema),'\",\"StagingTableName\":\"',string(pipeline().parameters.TaskObject.Target.StagingTableName),'\",\"Stage\":', string(activity('Lookup Get Metadata Stage').output.value), ',\"Target\":', string(activity('Lookup Get Metadata Target').output.value),'}'))" + }, + "functionName": "GetSQLMergeStatement", + "method": "POST" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "AF Get Information Schema SQL Stage", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Lookup Get Metadata Stage", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Lookup", + "typeProperties": { + "dataset": { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Database" + }, + "Schema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.StagingTableSchema" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + }, + "Table": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.StagingTableName" + } + }, + "referenceName": "[concat('GDS_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "firstRowOnly": false, + "source": { + "partitionOption": "None", + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@activity('AF Get Information Schema SQL Stage').output.InformationSchemaSQL" + }, + "type": "AzureSqlSource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "AF Get Information Schema SQL Target", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Lookup Get Metadata Target", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Lookup", + "typeProperties": { + "dataset": { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Database" + }, + "Schema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.TableSchema" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + }, + "Table": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.TableName" + } + }, + "referenceName": "[concat('GDS_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "firstRowOnly": false, + "source": { + "partitionOption": "None", + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@activity('AF Get Information Schema SQL Target').output.InformationSchemaSQL" + }, + "type": "AzureSqlSource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Get Information Schema SQL Target", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"TableSchema\":\"', string(pipeline().parameters.TaskObject.Target.TableSchema), '\",\"TableName\":\"', string(pipeline().parameters.TaskObject.Target.TableName),'\"}'))" + }, + "functionName": "GetInformationSchemaSQL", + "method": "POST" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Run MergeStatement", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "AF Log - Run AutoMerge Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Run AutoMerge\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Run MergeStatement').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Lookup Get Metadata Target", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "AF Log - Run Lookup Get Metadata Target Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Lookup Get Metadata Target\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Lookup Get Metadata Target').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Lookup Get Metadata Stage", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "AF Log - Lookup Get Metadata Stage Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Lookup Get Metadata Stage\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Lookup Get Metadata Stage').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ] + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'), '/Common')]" + }, + "lastPublishTime": "2020-08-04T13:09:30Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL0_SqlServerTable_NA_AzureBlobFS_Parquet_Full_Load.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL0_SqlServerTable_NA_AzureBlobFS_Parquet_Full_Load.json new file mode 100644 index 00000000..ee57a52a --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL0_SqlServerTable_NA_AzureBlobFS_Parquet_Full_Load.json @@ -0,0 +1,390 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_SqlServerTable_NA_AzureBlobFS_Parquet_Full_Load_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "Pipeline AF Log - Copy Start", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Set SQLStatement", + "type": "SetVariable", + "typeProperties": { + "value": { + "type": "Expression", + "value": "@replace(replace(pipeline().parameters.TaskObject.Source.SQLStatement,'',string(pipeline().parameters.BatchCount)),'',string(pipeline().parameters.Item))" + }, + "variableName": "SQLStatement" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Set SQLStatement", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Database" + }, + "KeyVaultBaseUrl": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.KeyVaultBaseUrl" + }, + "PasswordSecret": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.PasswordKeyVaultSecretName" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + }, + "TableName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableName" + }, + "TableSchema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableSchema" + }, + "UserName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Username" + } + }, + "referenceName": "[concat('GDS_SqlServerTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy SQL to Storage", + "outputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@if(equals(pipeline().parameters.TaskObject.Source.ChunkSize,0),\n pipeline().parameters.TaskObject.Target.DataFileName,\n replace(\n pipeline().parameters.TaskObject.Target.DataFileName,\n '.parquet',\n concat('.chunk_', string(pipeline().parameters.Item),'.parquet')\n )\n)" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.Instance.TargetRelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "storeSettings": { + "type": "AzureBlobFSWriteSettings" + }, + "type": "ParquetSink" + }, + "source": { + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@variables('SQLStatement')" + }, + "type": "SqlServerSource" + }, + "translator": { + "type": "Expression", + "value": "@pipeline().parameters.Mapping" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy SQL to Storage", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Copy Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy SQL to Storage\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy SQL to Storage').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Get Parquet Metadata", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Persist Parquet Metadata", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(\n concat('{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"StorageAccountName\":\"', \n string(pipeline().parameters.TaskObject.Target.System.SystemServer), \n '\",\"StorageAccountContainer\":\"', \n string(pipeline().parameters.TaskObject.Target.System.Container), \n '\",\"RelativePath\":\"', \n string(pipeline().parameters.TaskObject.Target.Instance.TargetRelativePath), \n '\",\"SchemaFileName\":\"', \n string(pipeline().parameters.TaskObject.Target.SchemaFileName), \n '\",\"SourceType\":\"', \n string(pipeline().parameters.TaskObject.Source.System.Type), \n '\",\"TargetType\":\"', \n if(\n contains(\n string(pipeline().parameters.TaskObject.Target.System.SystemServer),\n '.dfs.core.windows.net'\n ),\n 'ADLS',\n 'Azure Blob'), \n '\",\"Data\":',\n string(activity('Get Parquet Metadata').output),\n ',\"MetadataType\":\"Parquet\"}')\n)" + }, + "functionName": "TaskExecutionSchemaFile", + "method": "POST" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy SQL to Storage", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Get Parquet Metadata", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "GetMetadata", + "typeProperties": { + "dataset": { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@if(equals(pipeline().parameters.TaskObject.Source.ChunkSize,0),\n pipeline().parameters.TaskObject.Target.DataFileName,\n replace(\n pipeline().parameters.TaskObject.Target.DataFileName,\n '.parquet',\n concat('.chunk_', string(pipeline().parameters.Item),'.parquet')\n )\n)" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.Instance.TargetRelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "fieldList": [ + "structure" + ], + "storeSettings": { + "recursive": true, + "type": "AzureBlobFSReadSettings" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Pipeline AF Log - Copy Start", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy SQL to Storage\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy SQL to Storage", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Copy Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy SQL to Storage\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy SQL to Storage').output.rowsCopied), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'), '/Components')]" + }, + "parameters": { + "BatchCount": { + "type": "int" + }, + "Item": { + "type": "int" + }, + "Mapping": { + "type": "object" + }, + "TaskObject": { + "defaultValue": { + "DataFactory": { + "ADFPipeline": "AZ_SQL_AZ_Storage_Parquet_Azure", + "Id": 1, + "Name": "adsgofastdatakakeacceladf", + "ResourceGroup": "AdsGoFastDataLakeAccel", + "SubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3" + }, + "DegreeOfCopyParallelism": 1, + "Enabled": 1, + "ExecutionUid": "2c5924ee-b855-4d2b-bb7e-4f5dde4c4dd3", + "KeyVaultBaseUrl": "https://adsgofastkeyvault.vault.azure.net/", + "NumberOfRetries": 111, + "ScheduleMasterId": 2, + "Source": { + "Database": { + "AuthenticationType": "MSI", + "Name": "AWSample", + "SystemName": "adsgofastdatakakeaccelsqlsvr.database.windows.net" + }, + "Extraction": { + "FullOrIncremental": "Full", + "IncrementalType": null, + "TableName": "SalesOrderHeader", + "TableSchema": "SalesLT", + "Type": "Table" + }, + "Type": "Azure SQL" + }, + "Target": { + "DataFileName": "SalesLT.SalesOrderHeader.parquet", + "FirstRowAsHeader": null, + "MaxConcorrentConnections": null, + "RelativePath": "/AwSample/SalesLT/SalesOrderHeader/2020/7/9/14/12/", + "SchemaFileName": "SalesLT.SalesOrderHeader", + "SheetName": null, + "SkipLineCount": null, + "StorageAccountAccessMethod": "MSI", + "StorageAccountContainer": "datalakeraw", + "StorageAccountName": "https://adsgofastdatalakeaccelst.blob.core.windows.net", + "Type": "Azure Blob" + }, + "TaskGroupConcurrency": 10, + "TaskGroupPriority": 0, + "TaskInstanceId": 75, + "TaskMasterId": 12, + "TaskStatus": "Untried", + "TaskType": "SQL Database to Azure Storage" + }, + "type": "object" + } + }, + "variables": { + "SQLStatement": { + "type": "String" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL0_SqlServerTable_NA_AzureBlobFS_Parquet_Watermark.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL0_SqlServerTable_NA_AzureBlobFS_Parquet_Watermark.json new file mode 100644 index 00000000..fe879d00 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL0_SqlServerTable_NA_AzureBlobFS_Parquet_Watermark.json @@ -0,0 +1,393 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_SqlServerTable_NA_AzureBlobFS_Parquet_Watermark_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "Pipeline AF Log - Copy Start", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Set SQLStatement", + "type": "SetVariable", + "typeProperties": { + "value": { + "type": "Expression", + "value": "@replace(\n replace(\n replace(\n pipeline().parameters.TaskObject.Source.SQLStatement,\n '',\n string(pipeline().parameters.BatchCount)\n ),\n '',string(pipeline().parameters.Item)),\n '',string(pipeline().parameters.NewWaterMark)\n)" + }, + "variableName": "SQLStatement" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Set SQLStatement", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Database" + }, + "KeyVaultBaseUrl": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.KeyVaultBaseUrl" + }, + "PasswordSecret": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.PasswordKeyVaultSecretName" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + }, + "TableName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableName" + }, + "TableSchema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableSchema" + }, + "UserName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Username" + } + }, + "referenceName": "[concat('GDS_SqlServerTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy SQL to Storage", + "outputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@if(equals(pipeline().parameters.TaskObject.Source.ChunkSize,0),\n pipeline().parameters.TaskObject.Target.DataFileName,\n replace(\n pipeline().parameters.TaskObject.Target.DataFileName,\n '.parquet',\n concat('.chunk_', string(pipeline().parameters.Item),'.parquet')\n )\n)" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.Instance.TargetRelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "storeSettings": { + "type": "AzureBlobFSWriteSettings" + }, + "type": "ParquetSink" + }, + "source": { + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@variables('SQLStatement')" + }, + "type": "SqlServerSource" + }, + "translator": { + "type": "Expression", + "value": "@pipeline().parameters.Mapping" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy SQL to Storage", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Copy Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy SQL to Storage\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy SQL to Storage').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Get Parquet Metadata", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Persist Parquet Metadata", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"StorageAccountName\":\"', \n string(pipeline().parameters.TaskObject.Target.System.SystemServer), \n '\",\"StorageAccountContainer\":\"', \n string(pipeline().parameters.TaskObject.Target.System.Container), \n '\",\"RelativePath\":\"', \n string(pipeline().parameters.TaskObject.Target.Instance.TargetRelativePath), \n '\",\"SchemaFileName\":\"', \n string(pipeline().parameters.TaskObject.Target.SchemaFileName), \n '\",\"SourceType\":\"', \n string(pipeline().parameters.TaskObject.Source.System.Type), \n '\",\"TargetType\":\"', \n if(\n contains(\n string(pipeline().parameters.TaskObject.Target.System.SystemServer),\n '.dfs.core.windows.net'\n ),\n 'ADLS',\n 'Azure Blob'), \n '\",\"Data\":',\n string(activity('Get Parquet Metadata').output),\n ',\"MetadataType\":\"Parquet\"}')\n)" + }, + "functionName": "TaskExecutionSchemaFile", + "method": "POST" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy SQL to Storage", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Get Parquet Metadata", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "GetMetadata", + "typeProperties": { + "dataset": { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@if(equals(pipeline().parameters.TaskObject.Source.ChunkSize,0),\n pipeline().parameters.TaskObject.Target.DataFileName,\n replace(\n pipeline().parameters.TaskObject.Target.DataFileName,\n '.parquet',\n concat('.chunk_', string(pipeline().parameters.Item),'.parquet')\n )\n)" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.Instance.TargetRelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "fieldList": [ + "structure" + ], + "storeSettings": { + "recursive": true, + "type": "AzureBlobFSReadSettings" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Pipeline AF Log - Copy Start", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy SQL to Storage\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy SQL to Storage", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Copy Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy SQL to Storage\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy SQL to Storage').output.rowsCopied), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'), '/Components')]" + }, + "parameters": { + "BatchCount": { + "type": "int" + }, + "Item": { + "type": "int" + }, + "Mapping": { + "type": "object" + }, + "NewWaterMark": { + "type": "string" + }, + "TaskObject": { + "defaultValue": { + "DataFactory": { + "ADFPipeline": "AZ_SQL_AZ_Storage_Parquet_Azure", + "Id": 1, + "Name": "adsgofastdatakakeacceladf", + "ResourceGroup": "AdsGoFastDataLakeAccel", + "SubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3" + }, + "DegreeOfCopyParallelism": 1, + "Enabled": 1, + "ExecutionUid": "2c5924ee-b855-4d2b-bb7e-4f5dde4c4dd3", + "KeyVaultBaseUrl": "https://adsgofastkeyvault.vault.azure.net/", + "NumberOfRetries": 111, + "ScheduleMasterId": 2, + "Source": { + "Database": { + "AuthenticationType": "MSI", + "Name": "AWSample", + "SystemName": "adsgofastdatakakeaccelsqlsvr.database.windows.net" + }, + "Extraction": { + "FullOrIncremental": "Full", + "IncrementalType": null, + "TableName": "SalesOrderHeader", + "TableSchema": "SalesLT", + "Type": "Table" + }, + "Type": "Azure SQL" + }, + "Target": { + "DataFileName": "SalesLT.SalesOrderHeader.parquet", + "FirstRowAsHeader": null, + "MaxConcorrentConnections": null, + "RelativePath": "/AwSample/SalesLT/SalesOrderHeader/2020/7/9/14/12/", + "SchemaFileName": "SalesLT.SalesOrderHeader", + "SheetName": null, + "SkipLineCount": null, + "StorageAccountAccessMethod": "MSI", + "StorageAccountContainer": "datalakeraw", + "StorageAccountName": "https://adsgofastdatalakeaccelst.blob.core.windows.net", + "Type": "Azure Blob" + }, + "TaskGroupConcurrency": 10, + "TaskGroupPriority": 0, + "TaskInstanceId": 75, + "TaskMasterId": 12, + "TaskStatus": "Untried", + "TaskType": "SQL Database to Azure Storage" + }, + "type": "object" + } + }, + "variables": { + "SQLStatement": { + "type": "String" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL0_SqlServerTable_NA_AzureBlobStorage_Parquet_Full_Load.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL0_SqlServerTable_NA_AzureBlobStorage_Parquet_Full_Load.json new file mode 100644 index 00000000..90dc9bde --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL0_SqlServerTable_NA_AzureBlobStorage_Parquet_Full_Load.json @@ -0,0 +1,390 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_SqlServerTable_NA_AzureBlobStorage_Parquet_Full_Load_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "Pipeline AF Log - Copy Start", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Set SQLStatement", + "type": "SetVariable", + "typeProperties": { + "value": { + "type": "Expression", + "value": "@replace(replace(pipeline().parameters.TaskObject.Source.SQLStatement,'',string(pipeline().parameters.BatchCount)),'',string(pipeline().parameters.Item))" + }, + "variableName": "SQLStatement" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Set SQLStatement", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Database" + }, + "KeyVaultBaseUrl": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.KeyVaultBaseUrl" + }, + "PasswordSecret": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.PasswordKeyVaultSecretName" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + }, + "TableName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableName" + }, + "TableSchema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableSchema" + }, + "UserName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Username" + } + }, + "referenceName": "[concat('GDS_SqlServerTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy SQL to Storage", + "outputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@if(equals(pipeline().parameters.TaskObject.Source.ChunkSize,0),\n pipeline().parameters.TaskObject.Target.DataFileName,\n replace(\n pipeline().parameters.TaskObject.Target.DataFileName,\n '.parquet',\n concat('.chunk_', string(pipeline().parameters.Item),'.parquet')\n )\n)" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.Instance.TargetRelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "storeSettings": { + "type": "AzureBlobStorageWriteSettings" + }, + "type": "ParquetSink" + }, + "source": { + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@variables('SQLStatement')" + }, + "type": "SqlServerSource" + }, + "translator": { + "type": "Expression", + "value": "@pipeline().parameters.Mapping" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy SQL to Storage", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Copy Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy SQL to Storage\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy SQL to Storage').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Get Parquet Metadata", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Persist Parquet Metadata", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(\n concat('{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"StorageAccountName\":\"', \n string(pipeline().parameters.TaskObject.Target.System.SystemServer), \n '\",\"StorageAccountContainer\":\"', \n string(pipeline().parameters.TaskObject.Target.System.Container), \n '\",\"RelativePath\":\"', \n string(pipeline().parameters.TaskObject.Target.Instance.TargetRelativePath), \n '\",\"SchemaFileName\":\"', \n string(pipeline().parameters.TaskObject.Target.SchemaFileName), \n '\",\"SourceType\":\"', \n string(pipeline().parameters.TaskObject.Source.System.Type), \n '\",\"TargetType\":\"', \n if(\n contains(\n string(pipeline().parameters.TaskObject.Target.System.SystemServer),\n '.dfs.core.windows.net'\n ),\n 'ADLS',\n 'Azure Blob'), \n '\",\"Data\":',\n string(activity('Get Parquet Metadata').output),\n ',\"MetadataType\":\"Parquet\"}')\n)" + }, + "functionName": "TaskExecutionSchemaFile", + "method": "POST" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy SQL to Storage", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Get Parquet Metadata", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "GetMetadata", + "typeProperties": { + "dataset": { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@if(equals(pipeline().parameters.TaskObject.Source.ChunkSize,0),\n pipeline().parameters.TaskObject.Target.DataFileName,\n replace(\n pipeline().parameters.TaskObject.Target.DataFileName,\n '.parquet',\n concat('.chunk_', string(pipeline().parameters.Item),'.parquet')\n )\n)" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.Instance.TargetRelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "fieldList": [ + "structure" + ], + "storeSettings": { + "recursive": true, + "type": "AzureBlobFSReadSettings" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Pipeline AF Log - Copy Start", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy SQL to Storage\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy SQL to Storage", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Copy Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy SQL to Storage\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy SQL to Storage').output.rowsCopied), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'), '/Components')]" + }, + "parameters": { + "BatchCount": { + "type": "int" + }, + "Item": { + "type": "int" + }, + "Mapping": { + "type": "object" + }, + "TaskObject": { + "defaultValue": { + "DataFactory": { + "ADFPipeline": "AZ_SQL_AZ_Storage_Parquet_Azure", + "Id": 1, + "Name": "adsgofastdatakakeacceladf", + "ResourceGroup": "AdsGoFastDataLakeAccel", + "SubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3" + }, + "DegreeOfCopyParallelism": 1, + "Enabled": 1, + "ExecutionUid": "2c5924ee-b855-4d2b-bb7e-4f5dde4c4dd3", + "KeyVaultBaseUrl": "https://adsgofastkeyvault.vault.azure.net/", + "NumberOfRetries": 111, + "ScheduleMasterId": 2, + "Source": { + "Database": { + "AuthenticationType": "MSI", + "Name": "AWSample", + "SystemName": "adsgofastdatakakeaccelsqlsvr.database.windows.net" + }, + "Extraction": { + "FullOrIncremental": "Full", + "IncrementalType": null, + "TableName": "SalesOrderHeader", + "TableSchema": "SalesLT", + "Type": "Table" + }, + "Type": "Azure SQL" + }, + "Target": { + "DataFileName": "SalesLT.SalesOrderHeader.parquet", + "FirstRowAsHeader": null, + "MaxConcorrentConnections": null, + "RelativePath": "/AwSample/SalesLT/SalesOrderHeader/2020/7/9/14/12/", + "SchemaFileName": "SalesLT.SalesOrderHeader", + "SheetName": null, + "SkipLineCount": null, + "StorageAccountAccessMethod": "MSI", + "StorageAccountContainer": "datalakeraw", + "StorageAccountName": "https://adsgofastdatalakeaccelst.blob.core.windows.net", + "Type": "Azure Blob" + }, + "TaskGroupConcurrency": 10, + "TaskGroupPriority": 0, + "TaskInstanceId": 75, + "TaskMasterId": 12, + "TaskStatus": "Untried", + "TaskType": "SQL Database to Azure Storage" + }, + "type": "object" + } + }, + "variables": { + "SQLStatement": { + "type": "String" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL0_SqlServerTable_NA_AzureBlobStorage_Parquet_Watermark.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL0_SqlServerTable_NA_AzureBlobStorage_Parquet_Watermark.json new file mode 100644 index 00000000..bb836c1d --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL0_SqlServerTable_NA_AzureBlobStorage_Parquet_Watermark.json @@ -0,0 +1,393 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_SqlServerTable_NA_AzureBlobStorage_Parquet_Watermark_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "Pipeline AF Log - Copy Start", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Set SQLStatement", + "type": "SetVariable", + "typeProperties": { + "value": { + "type": "Expression", + "value": "@replace(\n replace(\n replace(\n pipeline().parameters.TaskObject.Source.SQLStatement,\n '',\n string(pipeline().parameters.BatchCount)\n ),\n '',string(pipeline().parameters.Item)),\n '',string(pipeline().parameters.NewWaterMark)\n)" + }, + "variableName": "SQLStatement" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Set SQLStatement", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Database" + }, + "KeyVaultBaseUrl": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.KeyVaultBaseUrl" + }, + "PasswordSecret": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.PasswordKeyVaultSecretName" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + }, + "TableName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableName" + }, + "TableSchema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableSchema" + }, + "UserName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Username" + } + }, + "referenceName": "[concat('GDS_SqlServerTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy SQL to Storage", + "outputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@if(equals(pipeline().parameters.TaskObject.Source.ChunkSize,0),\n pipeline().parameters.TaskObject.Target.DataFileName,\n replace(\n pipeline().parameters.TaskObject.Target.DataFileName,\n '.parquet',\n concat('.chunk_', string(pipeline().parameters.Item),'.parquet')\n )\n)" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.Instance.TargetRelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "storeSettings": { + "type": "AzureBlobStorageWriteSettings" + }, + "type": "ParquetSink" + }, + "source": { + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@variables('SQLStatement')" + }, + "type": "SqlServerSource" + }, + "translator": { + "type": "Expression", + "value": "@pipeline().parameters.Mapping" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy SQL to Storage", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Copy Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy SQL to Storage\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy SQL to Storage').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Get Parquet Metadata", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Persist Parquet Metadata", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"StorageAccountName\":\"', \n string(pipeline().parameters.TaskObject.Target.System.SystemServer), \n '\",\"StorageAccountContainer\":\"', \n string(pipeline().parameters.TaskObject.Target.System.Container), \n '\",\"RelativePath\":\"', \n string(pipeline().parameters.TaskObject.Target.Instance.TargetRelativePath), \n '\",\"SchemaFileName\":\"', \n string(pipeline().parameters.TaskObject.Target.SchemaFileName), \n '\",\"SourceType\":\"', \n string(pipeline().parameters.TaskObject.Source.System.Type), \n '\",\"TargetType\":\"', \n if(\n contains(\n string(pipeline().parameters.TaskObject.Target.System.SystemServer),\n '.dfs.core.windows.net'\n ),\n 'ADLS',\n 'Azure Blob'), \n '\",\"Data\":',\n string(activity('Get Parquet Metadata').output),\n ',\"MetadataType\":\"Parquet\"}')\n)" + }, + "functionName": "TaskExecutionSchemaFile", + "method": "POST" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy SQL to Storage", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Get Parquet Metadata", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "GetMetadata", + "typeProperties": { + "dataset": { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@if(equals(pipeline().parameters.TaskObject.Source.ChunkSize,0),\n pipeline().parameters.TaskObject.Target.DataFileName,\n replace(\n pipeline().parameters.TaskObject.Target.DataFileName,\n '.parquet',\n concat('.chunk_', string(pipeline().parameters.Item),'.parquet')\n )\n)" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.Instance.TargetRelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "fieldList": [ + "structure" + ], + "storeSettings": { + "recursive": true, + "type": "AzureBlobFSReadSettings" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Pipeline AF Log - Copy Start", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy SQL to Storage\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy SQL to Storage", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Copy Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy SQL to Storage\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy SQL to Storage').output.rowsCopied), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'), '/Components')]" + }, + "parameters": { + "BatchCount": { + "type": "int" + }, + "Item": { + "type": "int" + }, + "Mapping": { + "type": "object" + }, + "NewWaterMark": { + "type": "string" + }, + "TaskObject": { + "defaultValue": { + "DataFactory": { + "ADFPipeline": "AZ_SQL_AZ_Storage_Parquet_Azure", + "Id": 1, + "Name": "adsgofastdatakakeacceladf", + "ResourceGroup": "AdsGoFastDataLakeAccel", + "SubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3" + }, + "DegreeOfCopyParallelism": 1, + "Enabled": 1, + "ExecutionUid": "2c5924ee-b855-4d2b-bb7e-4f5dde4c4dd3", + "KeyVaultBaseUrl": "https://adsgofastkeyvault.vault.azure.net/", + "NumberOfRetries": 111, + "ScheduleMasterId": 2, + "Source": { + "Database": { + "AuthenticationType": "MSI", + "Name": "AWSample", + "SystemName": "adsgofastdatakakeaccelsqlsvr.database.windows.net" + }, + "Extraction": { + "FullOrIncremental": "Full", + "IncrementalType": null, + "TableName": "SalesOrderHeader", + "TableSchema": "SalesLT", + "Type": "Table" + }, + "Type": "Azure SQL" + }, + "Target": { + "DataFileName": "SalesLT.SalesOrderHeader.parquet", + "FirstRowAsHeader": null, + "MaxConcorrentConnections": null, + "RelativePath": "/AwSample/SalesLT/SalesOrderHeader/2020/7/9/14/12/", + "SchemaFileName": "SalesLT.SalesOrderHeader", + "SheetName": null, + "SkipLineCount": null, + "StorageAccountAccessMethod": "MSI", + "StorageAccountContainer": "datalakeraw", + "StorageAccountName": "https://adsgofastdatalakeaccelst.blob.core.windows.net", + "Type": "Azure Blob" + }, + "TaskGroupConcurrency": 10, + "TaskGroupPriority": 0, + "TaskInstanceId": 75, + "TaskMasterId": 12, + "TaskStatus": "Untried", + "TaskType": "SQL Database to Azure Storage" + }, + "type": "object" + } + }, + "variables": { + "SQLStatement": { + "type": "String" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL1_AzureSqlTable_NA_AzureBlobFS_Parquet_Full_Load_Chunk.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL1_AzureSqlTable_NA_AzureBlobFS_Parquet_Full_Load_Chunk.json new file mode 100644 index 00000000..e889fe54 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL1_AzureSqlTable_NA_AzureBlobFS_Parquet_Full_Load_Chunk.json @@ -0,0 +1,143 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureSqlTable_NA_AzureBlobFS_Parquet_Full_Load_Chunk_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "ForEach Chunk", + "type": "ForEach", + "typeProperties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Full Load", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "BatchCount": { + "type": "Expression", + "value": "@pipeline().parameters.BatchCount" + }, + "Item": { + "type": "Expression", + "value": "@item()" + }, + "Mapping": { + "type": "Expression", + "value": "@pipeline().parameters.Mapping" + }, + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_AzureBlobFS_Parquet_Full_Load_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "isSequential": true, + "items": { + "type": "Expression", + "value": "@range(1, pipeline().parameters.BatchCount)" + } + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'), '/Components')]" + }, + "parameters": { + "BatchCount": { + "type": "int" + }, + "Mapping": { + "type": "object" + }, + "TaskObject": { + "defaultValue": { + "DataFactory": { + "ADFPipeline": "SH-AZ_Storage_Parquet_Azure", + "Id": 1, + "Name": "adsgofastdatakakeacceladf", + "ResourceGroup": "AdsGoFastDataLakeAccel", + "SubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3" + }, + "DegreeOfCopyParallelism": 1, + "Enabled": 1, + "ExecutionUid": "2c5924ee-b855-4d2b-bb7e-4f5dde4c4dd3", + "KeyVaultBaseUrl": "https://adsgofastkeyvault.vault.azure.net/", + "NumberOfRetries": 111, + "ScheduleMasterId": 2, + "Source": { + "Database": { + "AuthenticationType": "MSI", + "Name": "AWSample", + "SystemName": "adsgofastdatakakeaccelsqlsvr.database.windows.net" + }, + "Extraction": { + "FullOrIncremental": "Full", + "IncrementalType": null, + "TableName": "SalesOrderHeader", + "TableSchema": "SalesLT", + "Type": "Table" + }, + "Type": "Azure SQL" + }, + "Target": { + "DataFileName": "SalesLT.SalesOrderHeader.parquet", + "FirstRowAsHeader": null, + "MaxConcorrentConnections": null, + "RelativePath": "/AwSample/SalesLT/SalesOrderHeader/2020/7/9/14/12/", + "SchemaFileName": "SalesLT.SalesOrderHeader", + "SheetName": null, + "SkipLineCount": null, + "StorageAccountAccessMethod": "MSI", + "StorageAccountContainer": "datalakeraw", + "StorageAccountName": "https://adsgofastdatalakeaccelst.blob.core.windows.net", + "Type": "Azure Blob" + }, + "TaskGroupConcurrency": 10, + "TaskGroupPriority": 0, + "TaskInstanceId": 75, + "TaskMasterId": 12, + "TaskStatus": "Untried", + "TaskType": "SQL Database to Azure Storage" + }, + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL1_AzureSqlTable_NA_AzureBlobFS_Parquet_Watermark_Chunk.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL1_AzureSqlTable_NA_AzureBlobFS_Parquet_Watermark_Chunk.json new file mode 100644 index 00000000..e941c8ce --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL1_AzureSqlTable_NA_AzureBlobFS_Parquet_Watermark_Chunk.json @@ -0,0 +1,178 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureSqlTable_NA_AzureBlobFS_Parquet_Watermark_Chunk_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "ForEach Chunk", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "AF Set New Watermark", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"TaskMasterId\":\"', string(pipeline().parameters.TaskObject.TaskMasterId),'\",\"TaskMasterWaterMarkColumnType\":\"', string(pipeline().parameters.TaskObject.Source.IncrementalColumnType),'\",\"WaterMarkValue\":\"', string(pipeline().parameters.NewWatermark), '\"}'))" + }, + "FunctionName": "WaterMark", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "ForEach Chunk", + "type": "ForEach", + "typeProperties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Watermark", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "BatchCount": { + "type": "Expression", + "value": "@pipeline().parameters.BatchCount" + }, + "Item": { + "type": "Expression", + "value": "@item()" + }, + "Mapping": { + "type": "Expression", + "value": "@pipeline().parameters.Mapping" + }, + "NewWaterMark": { + "type": "Expression", + "value": "@pipeline().parameters.NewWatermark" + }, + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_AzureBlobFS_Parquet_Watermark_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "isSequential": true, + "items": { + "type": "Expression", + "value": "@range(1, pipeline().parameters.BatchCount)" + } + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'), '/Components')]" + }, + "parameters": { + "BatchCount": { + "type": "int" + }, + "Mapping": { + "type": "object" + }, + "NewWatermark": { + "type": "string" + }, + "TaskObject": { + "defaultValue": { + "DataFactory": { + "ADFPipeline": "AZ_SQL_AZ_Storage_Parquet_Azure", + "Id": 1, + "Name": "adsgofastdatakakeacceladf", + "ResourceGroup": "AdsGoFastDataLakeAccel", + "SubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3" + }, + "DegreeOfCopyParallelism": 1, + "Enabled": 1, + "ExecutionUid": "2c5924ee-b855-4d2b-bb7e-4f5dde4c4dd3", + "KeyVaultBaseUrl": "https://adsgofastkeyvault.vault.azure.net/", + "NumberOfRetries": 111, + "ScheduleMasterId": 2, + "Source": { + "Database": { + "AuthenticationType": "MSI", + "Name": "AWSample", + "SystemName": "adsgofastdatakakeaccelsqlsvr.database.windows.net" + }, + "Extraction": { + "FullOrIncremental": "Full", + "IncrementalType": null, + "TableName": "SalesOrderHeader", + "TableSchema": "SalesLT", + "Type": "Table" + }, + "Type": "Azure SQL" + }, + "Target": { + "DataFileName": "SalesLT.SalesOrderHeader.parquet", + "FirstRowAsHeader": null, + "MaxConcorrentConnections": null, + "RelativePath": "/AwSample/SalesLT/SalesOrderHeader/2020/7/9/14/12/", + "SchemaFileName": "SalesLT.SalesOrderHeader", + "SheetName": null, + "SkipLineCount": null, + "StorageAccountAccessMethod": "MSI", + "StorageAccountContainer": "datalakeraw", + "StorageAccountName": "https://adsgofastdatalakeaccelst.blob.core.windows.net", + "Type": "Azure Blob" + }, + "TaskGroupConcurrency": 10, + "TaskGroupPriority": 0, + "TaskInstanceId": 75, + "TaskMasterId": 12, + "TaskStatus": "Untried", + "TaskType": "SQL Database to Azure Storage" + }, + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL1_AzureSqlTable_NA_AzureBlobStorage_Parquet_Full_Load_Chunk.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL1_AzureSqlTable_NA_AzureBlobStorage_Parquet_Full_Load_Chunk.json new file mode 100644 index 00000000..8f8e34fc --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL1_AzureSqlTable_NA_AzureBlobStorage_Parquet_Full_Load_Chunk.json @@ -0,0 +1,143 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureSqlTable_NA_AzureBlobStorage_Parquet_Full_Load_Chunk_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "ForEach Chunk", + "type": "ForEach", + "typeProperties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Full Load", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "BatchCount": { + "type": "Expression", + "value": "@pipeline().parameters.BatchCount" + }, + "Item": { + "type": "Expression", + "value": "@item()" + }, + "Mapping": { + "type": "Expression", + "value": "@pipeline().parameters.Mapping" + }, + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_AzureBlobStorage_Parquet_Full_Load_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "isSequential": true, + "items": { + "type": "Expression", + "value": "@range(1, pipeline().parameters.BatchCount)" + } + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'), '/Components')]" + }, + "parameters": { + "BatchCount": { + "type": "int" + }, + "Mapping": { + "type": "object" + }, + "TaskObject": { + "defaultValue": { + "DataFactory": { + "ADFPipeline": "SH-AZ_Storage_Parquet_Azure", + "Id": 1, + "Name": "adsgofastdatakakeacceladf", + "ResourceGroup": "AdsGoFastDataLakeAccel", + "SubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3" + }, + "DegreeOfCopyParallelism": 1, + "Enabled": 1, + "ExecutionUid": "2c5924ee-b855-4d2b-bb7e-4f5dde4c4dd3", + "KeyVaultBaseUrl": "https://adsgofastkeyvault.vault.azure.net/", + "NumberOfRetries": 111, + "ScheduleMasterId": 2, + "Source": { + "Database": { + "AuthenticationType": "MSI", + "Name": "AWSample", + "SystemName": "adsgofastdatakakeaccelsqlsvr.database.windows.net" + }, + "Extraction": { + "FullOrIncremental": "Full", + "IncrementalType": null, + "TableName": "SalesOrderHeader", + "TableSchema": "SalesLT", + "Type": "Table" + }, + "Type": "Azure SQL" + }, + "Target": { + "DataFileName": "SalesLT.SalesOrderHeader.parquet", + "FirstRowAsHeader": null, + "MaxConcorrentConnections": null, + "RelativePath": "/AwSample/SalesLT/SalesOrderHeader/2020/7/9/14/12/", + "SchemaFileName": "SalesLT.SalesOrderHeader", + "SheetName": null, + "SkipLineCount": null, + "StorageAccountAccessMethod": "MSI", + "StorageAccountContainer": "datalakeraw", + "StorageAccountName": "https://adsgofastdatalakeaccelst.blob.core.windows.net", + "Type": "Azure Blob" + }, + "TaskGroupConcurrency": 10, + "TaskGroupPriority": 0, + "TaskInstanceId": 75, + "TaskMasterId": 12, + "TaskStatus": "Untried", + "TaskType": "SQL Database to Azure Storage" + }, + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL1_AzureSqlTable_NA_AzureBlobStorage_Parquet_Watermark_Chunk.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL1_AzureSqlTable_NA_AzureBlobStorage_Parquet_Watermark_Chunk.json new file mode 100644 index 00000000..1d1c66d5 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL1_AzureSqlTable_NA_AzureBlobStorage_Parquet_Watermark_Chunk.json @@ -0,0 +1,178 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureSqlTable_NA_AzureBlobStorage_Parquet_Watermark_Chunk_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "ForEach Chunk", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "AF Set New Watermark", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"TaskMasterId\":\"', string(pipeline().parameters.TaskObject.TaskMasterId),'\",\"TaskMasterWaterMarkColumnType\":\"', string(pipeline().parameters.TaskObject.Source.IncrementalColumnType),'\",\"WaterMarkValue\":\"', string(pipeline().parameters.NewWatermark), '\"}'))" + }, + "FunctionName": "WaterMark", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "ForEach Chunk", + "type": "ForEach", + "typeProperties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Watermark", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "BatchCount": { + "type": "Expression", + "value": "@pipeline().parameters.BatchCount" + }, + "Item": { + "type": "Expression", + "value": "@item()" + }, + "Mapping": { + "type": "Expression", + "value": "@pipeline().parameters.Mapping" + }, + "NewWaterMark": { + "type": "Expression", + "value": "@pipeline().parameters.NewWatermark" + }, + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_AzureBlobStorage_Parquet_Watermark_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "isSequential": true, + "items": { + "type": "Expression", + "value": "@range(1, pipeline().parameters.BatchCount)" + } + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'), '/Components')]" + }, + "parameters": { + "BatchCount": { + "type": "int" + }, + "Mapping": { + "type": "object" + }, + "NewWatermark": { + "type": "string" + }, + "TaskObject": { + "defaultValue": { + "DataFactory": { + "ADFPipeline": "AZ_SQL_AZ_Storage_Parquet_Azure", + "Id": 1, + "Name": "adsgofastdatakakeacceladf", + "ResourceGroup": "AdsGoFastDataLakeAccel", + "SubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3" + }, + "DegreeOfCopyParallelism": 1, + "Enabled": 1, + "ExecutionUid": "2c5924ee-b855-4d2b-bb7e-4f5dde4c4dd3", + "KeyVaultBaseUrl": "https://adsgofastkeyvault.vault.azure.net/", + "NumberOfRetries": 111, + "ScheduleMasterId": 2, + "Source": { + "Database": { + "AuthenticationType": "MSI", + "Name": "AWSample", + "SystemName": "adsgofastdatakakeaccelsqlsvr.database.windows.net" + }, + "Extraction": { + "FullOrIncremental": "Full", + "IncrementalType": null, + "TableName": "SalesOrderHeader", + "TableSchema": "SalesLT", + "Type": "Table" + }, + "Type": "Azure SQL" + }, + "Target": { + "DataFileName": "SalesLT.SalesOrderHeader.parquet", + "FirstRowAsHeader": null, + "MaxConcorrentConnections": null, + "RelativePath": "/AwSample/SalesLT/SalesOrderHeader/2020/7/9/14/12/", + "SchemaFileName": "SalesLT.SalesOrderHeader", + "SheetName": null, + "SkipLineCount": null, + "StorageAccountAccessMethod": "MSI", + "StorageAccountContainer": "datalakeraw", + "StorageAccountName": "https://adsgofastdatalakeaccelst.blob.core.windows.net", + "Type": "Azure Blob" + }, + "TaskGroupConcurrency": 10, + "TaskGroupPriority": 0, + "TaskInstanceId": 75, + "TaskMasterId": 12, + "TaskStatus": "Untried", + "TaskType": "SQL Database to Azure Storage" + }, + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL1_SqlServerTable_NA_AzureBlobFS_Parquet_Full_Load_Chunk.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL1_SqlServerTable_NA_AzureBlobFS_Parquet_Full_Load_Chunk.json new file mode 100644 index 00000000..76b59c77 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL1_SqlServerTable_NA_AzureBlobFS_Parquet_Full_Load_Chunk.json @@ -0,0 +1,143 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_SqlServerTable_NA_AzureBlobFS_Parquet_Full_Load_Chunk_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "ForEach Chunk", + "type": "ForEach", + "typeProperties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Full Load", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "BatchCount": { + "type": "Expression", + "value": "@pipeline().parameters.BatchCount" + }, + "Item": { + "type": "Expression", + "value": "@item()" + }, + "Mapping": { + "type": "Expression", + "value": "@pipeline().parameters.Mapping" + }, + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_SqlServerTable_NA_AzureBlobFS_Parquet_Full_Load_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "isSequential": true, + "items": { + "type": "Expression", + "value": "@range(1, pipeline().parameters.BatchCount)" + } + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'), '/Components')]" + }, + "parameters": { + "BatchCount": { + "type": "int" + }, + "Mapping": { + "type": "object" + }, + "TaskObject": { + "defaultValue": { + "DataFactory": { + "ADFPipeline": "SH-AZ_Storage_Parquet_Azure", + "Id": 1, + "Name": "adsgofastdatakakeacceladf", + "ResourceGroup": "AdsGoFastDataLakeAccel", + "SubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3" + }, + "DegreeOfCopyParallelism": 1, + "Enabled": 1, + "ExecutionUid": "2c5924ee-b855-4d2b-bb7e-4f5dde4c4dd3", + "KeyVaultBaseUrl": "https://adsgofastkeyvault.vault.azure.net/", + "NumberOfRetries": 111, + "ScheduleMasterId": 2, + "Source": { + "Database": { + "AuthenticationType": "MSI", + "Name": "AWSample", + "SystemName": "adsgofastdatakakeaccelsqlsvr.database.windows.net" + }, + "Extraction": { + "FullOrIncremental": "Full", + "IncrementalType": null, + "TableName": "SalesOrderHeader", + "TableSchema": "SalesLT", + "Type": "Table" + }, + "Type": "Azure SQL" + }, + "Target": { + "DataFileName": "SalesLT.SalesOrderHeader.parquet", + "FirstRowAsHeader": null, + "MaxConcorrentConnections": null, + "RelativePath": "/AwSample/SalesLT/SalesOrderHeader/2020/7/9/14/12/", + "SchemaFileName": "SalesLT.SalesOrderHeader", + "SheetName": null, + "SkipLineCount": null, + "StorageAccountAccessMethod": "MSI", + "StorageAccountContainer": "datalakeraw", + "StorageAccountName": "https://adsgofastdatalakeaccelst.blob.core.windows.net", + "Type": "Azure Blob" + }, + "TaskGroupConcurrency": 10, + "TaskGroupPriority": 0, + "TaskInstanceId": 75, + "TaskMasterId": 12, + "TaskStatus": "Untried", + "TaskType": "SQL Database to Azure Storage" + }, + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL1_SqlServerTable_NA_AzureBlobFS_Parquet_Watermark_Chunk.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL1_SqlServerTable_NA_AzureBlobFS_Parquet_Watermark_Chunk.json new file mode 100644 index 00000000..7f9408ca --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL1_SqlServerTable_NA_AzureBlobFS_Parquet_Watermark_Chunk.json @@ -0,0 +1,178 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_SqlServerTable_NA_AzureBlobFS_Parquet_Watermark_Chunk_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "ForEach Chunk", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "AF Set New Watermark", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"TaskMasterId\":\"', string(pipeline().parameters.TaskObject.TaskMasterId),'\",\"TaskMasterWaterMarkColumnType\":\"', string(pipeline().parameters.TaskObject.Source.IncrementalColumnType),'\",\"WaterMarkValue\":\"', string(pipeline().parameters.NewWatermark), '\"}'))" + }, + "FunctionName": "WaterMark", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "ForEach Chunk", + "type": "ForEach", + "typeProperties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Watermark", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "BatchCount": { + "type": "Expression", + "value": "@pipeline().parameters.BatchCount" + }, + "Item": { + "type": "Expression", + "value": "@item()" + }, + "Mapping": { + "type": "Expression", + "value": "@pipeline().parameters.Mapping" + }, + "NewWaterMark": { + "type": "Expression", + "value": "@pipeline().parameters.NewWatermark" + }, + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_SqlServerTable_NA_AzureBlobFS_Parquet_Watermark_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "isSequential": true, + "items": { + "type": "Expression", + "value": "@range(1, pipeline().parameters.BatchCount)" + } + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'), '/Components')]" + }, + "parameters": { + "BatchCount": { + "type": "int" + }, + "Mapping": { + "type": "object" + }, + "NewWatermark": { + "type": "string" + }, + "TaskObject": { + "defaultValue": { + "DataFactory": { + "ADFPipeline": "AZ_SQL_AZ_Storage_Parquet_Azure", + "Id": 1, + "Name": "adsgofastdatakakeacceladf", + "ResourceGroup": "AdsGoFastDataLakeAccel", + "SubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3" + }, + "DegreeOfCopyParallelism": 1, + "Enabled": 1, + "ExecutionUid": "2c5924ee-b855-4d2b-bb7e-4f5dde4c4dd3", + "KeyVaultBaseUrl": "https://adsgofastkeyvault.vault.azure.net/", + "NumberOfRetries": 111, + "ScheduleMasterId": 2, + "Source": { + "Database": { + "AuthenticationType": "MSI", + "Name": "AWSample", + "SystemName": "adsgofastdatakakeaccelsqlsvr.database.windows.net" + }, + "Extraction": { + "FullOrIncremental": "Full", + "IncrementalType": null, + "TableName": "SalesOrderHeader", + "TableSchema": "SalesLT", + "Type": "Table" + }, + "Type": "Azure SQL" + }, + "Target": { + "DataFileName": "SalesLT.SalesOrderHeader.parquet", + "FirstRowAsHeader": null, + "MaxConcorrentConnections": null, + "RelativePath": "/AwSample/SalesLT/SalesOrderHeader/2020/7/9/14/12/", + "SchemaFileName": "SalesLT.SalesOrderHeader", + "SheetName": null, + "SkipLineCount": null, + "StorageAccountAccessMethod": "MSI", + "StorageAccountContainer": "datalakeraw", + "StorageAccountName": "https://adsgofastdatalakeaccelst.blob.core.windows.net", + "Type": "Azure Blob" + }, + "TaskGroupConcurrency": 10, + "TaskGroupPriority": 0, + "TaskInstanceId": 75, + "TaskMasterId": 12, + "TaskStatus": "Untried", + "TaskType": "SQL Database to Azure Storage" + }, + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL1_SqlServerTable_NA_AzureBlobStorage_Parquet_Full_Load_Chunk.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL1_SqlServerTable_NA_AzureBlobStorage_Parquet_Full_Load_Chunk.json new file mode 100644 index 00000000..36dcb7a4 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL1_SqlServerTable_NA_AzureBlobStorage_Parquet_Full_Load_Chunk.json @@ -0,0 +1,143 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_SqlServerTable_NA_AzureBlobStorage_Parquet_Full_Load_Chunk_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "ForEach Chunk", + "type": "ForEach", + "typeProperties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Full Load", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "BatchCount": { + "type": "Expression", + "value": "@pipeline().parameters.BatchCount" + }, + "Item": { + "type": "Expression", + "value": "@item()" + }, + "Mapping": { + "type": "Expression", + "value": "@pipeline().parameters.Mapping" + }, + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_SqlServerTable_NA_AzureBlobStorage_Parquet_Full_Load_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "isSequential": true, + "items": { + "type": "Expression", + "value": "@range(1, pipeline().parameters.BatchCount)" + } + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'), '/Components')]" + }, + "parameters": { + "BatchCount": { + "type": "int" + }, + "Mapping": { + "type": "object" + }, + "TaskObject": { + "defaultValue": { + "DataFactory": { + "ADFPipeline": "SH-AZ_Storage_Parquet_Azure", + "Id": 1, + "Name": "adsgofastdatakakeacceladf", + "ResourceGroup": "AdsGoFastDataLakeAccel", + "SubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3" + }, + "DegreeOfCopyParallelism": 1, + "Enabled": 1, + "ExecutionUid": "2c5924ee-b855-4d2b-bb7e-4f5dde4c4dd3", + "KeyVaultBaseUrl": "https://adsgofastkeyvault.vault.azure.net/", + "NumberOfRetries": 111, + "ScheduleMasterId": 2, + "Source": { + "Database": { + "AuthenticationType": "MSI", + "Name": "AWSample", + "SystemName": "adsgofastdatakakeaccelsqlsvr.database.windows.net" + }, + "Extraction": { + "FullOrIncremental": "Full", + "IncrementalType": null, + "TableName": "SalesOrderHeader", + "TableSchema": "SalesLT", + "Type": "Table" + }, + "Type": "Azure SQL" + }, + "Target": { + "DataFileName": "SalesLT.SalesOrderHeader.parquet", + "FirstRowAsHeader": null, + "MaxConcorrentConnections": null, + "RelativePath": "/AwSample/SalesLT/SalesOrderHeader/2020/7/9/14/12/", + "SchemaFileName": "SalesLT.SalesOrderHeader", + "SheetName": null, + "SkipLineCount": null, + "StorageAccountAccessMethod": "MSI", + "StorageAccountContainer": "datalakeraw", + "StorageAccountName": "https://adsgofastdatalakeaccelst.blob.core.windows.net", + "Type": "Azure Blob" + }, + "TaskGroupConcurrency": 10, + "TaskGroupPriority": 0, + "TaskInstanceId": 75, + "TaskMasterId": 12, + "TaskStatus": "Untried", + "TaskType": "SQL Database to Azure Storage" + }, + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL1_SqlServerTable_NA_AzureBlobStorage_Parquet_Watermark_Chunk.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL1_SqlServerTable_NA_AzureBlobStorage_Parquet_Watermark_Chunk.json new file mode 100644 index 00000000..f44ec921 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL1_SqlServerTable_NA_AzureBlobStorage_Parquet_Watermark_Chunk.json @@ -0,0 +1,178 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_SqlServerTable_NA_AzureBlobStorage_Parquet_Watermark_Chunk_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "ForEach Chunk", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "AF Set New Watermark", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"TaskMasterId\":\"', string(pipeline().parameters.TaskObject.TaskMasterId),'\",\"TaskMasterWaterMarkColumnType\":\"', string(pipeline().parameters.TaskObject.Source.IncrementalColumnType),'\",\"WaterMarkValue\":\"', string(pipeline().parameters.NewWatermark), '\"}'))" + }, + "FunctionName": "WaterMark", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "ForEach Chunk", + "type": "ForEach", + "typeProperties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Watermark", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "BatchCount": { + "type": "Expression", + "value": "@pipeline().parameters.BatchCount" + }, + "Item": { + "type": "Expression", + "value": "@item()" + }, + "Mapping": { + "type": "Expression", + "value": "@pipeline().parameters.Mapping" + }, + "NewWaterMark": { + "type": "Expression", + "value": "@pipeline().parameters.NewWatermark" + }, + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_SqlServerTable_NA_AzureBlobStorage_Parquet_Watermark_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "isSequential": true, + "items": { + "type": "Expression", + "value": "@range(1, pipeline().parameters.BatchCount)" + } + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'), '/Components')]" + }, + "parameters": { + "BatchCount": { + "type": "int" + }, + "Mapping": { + "type": "object" + }, + "NewWatermark": { + "type": "string" + }, + "TaskObject": { + "defaultValue": { + "DataFactory": { + "ADFPipeline": "AZ_SQL_AZ_Storage_Parquet_Azure", + "Id": 1, + "Name": "adsgofastdatakakeacceladf", + "ResourceGroup": "AdsGoFastDataLakeAccel", + "SubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3" + }, + "DegreeOfCopyParallelism": 1, + "Enabled": 1, + "ExecutionUid": "2c5924ee-b855-4d2b-bb7e-4f5dde4c4dd3", + "KeyVaultBaseUrl": "https://adsgofastkeyvault.vault.azure.net/", + "NumberOfRetries": 111, + "ScheduleMasterId": 2, + "Source": { + "Database": { + "AuthenticationType": "MSI", + "Name": "AWSample", + "SystemName": "adsgofastdatakakeaccelsqlsvr.database.windows.net" + }, + "Extraction": { + "FullOrIncremental": "Full", + "IncrementalType": null, + "TableName": "SalesOrderHeader", + "TableSchema": "SalesLT", + "Type": "Table" + }, + "Type": "Azure SQL" + }, + "Target": { + "DataFileName": "SalesLT.SalesOrderHeader.parquet", + "FirstRowAsHeader": null, + "MaxConcorrentConnections": null, + "RelativePath": "/AwSample/SalesLT/SalesOrderHeader/2020/7/9/14/12/", + "SchemaFileName": "SalesLT.SalesOrderHeader", + "SheetName": null, + "SkipLineCount": null, + "StorageAccountAccessMethod": "MSI", + "StorageAccountContainer": "datalakeraw", + "StorageAccountName": "https://adsgofastdatalakeaccelst.blob.core.windows.net", + "Type": "Azure Blob" + }, + "TaskGroupConcurrency": 10, + "TaskGroupPriority": 0, + "TaskInstanceId": 75, + "TaskMasterId": 12, + "TaskStatus": "Untried", + "TaskType": "SQL Database to Azure Storage" + }, + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Binary_AzureBlobFS_Binary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Binary_AzureBlobFS_Binary.json new file mode 100644 index 00000000..afd55f21 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Binary_AzureBlobFS_Binary.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Binary_AzureBlobFS_Binary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobFS_Binary_AzureBlobFS_Binary_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Binary_AzureBlobFS_Binary_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Binary_AzureBlobFS_Binary_Primary.json new file mode 100644 index 00000000..df9fddfd --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Binary_AzureBlobFS_Binary_Primary.json @@ -0,0 +1,223 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Binary_AzureBlobFS_Binary_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobFS Started", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "Directory": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.RelativePath" + }, + "File": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "FileSystem": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_Binary_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy AzureBlobFS to AzureBlobFS", + "outputs": [ + { + "parameters": { + "Directory": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.RelativePath" + }, + "File": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.DataFileName" + }, + "FileSystem": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_Binary_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "storeSettings": { + "copyBehavior": "PreserveHierarchy", + "type": "AzureBlobFSWriteSettings" + }, + "type": "BinarySink" + }, + "source": { + "storeSettings": { + "deleteFilesAfterCompletion": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DeleteAfterCompletion" + }, + "formatSettings": { + "type": "BinaryReadSettings" + }, + "recursive": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.Recursively" + }, + "type": "AzureBlobFSReadSettings", + "wildcardFileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + } + }, + "type": "BinarySource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobFS", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Copy AzureBlobFS to AzureBlobFS Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy AzureBlobFS to AzureBlobFS').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Copy AzureBlobFS to AzureBlobFS Started", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobFS", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Copy AzureBlobFS to AzureBlobFS Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy AzureBlobFS to AzureBlobFS').output.filesWritten), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-05T04:14:00Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Binary_AzureBlobStorage_Binary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Binary_AzureBlobStorage_Binary.json new file mode 100644 index 00000000..843a0ddf --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Binary_AzureBlobStorage_Binary.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Binary_AzureBlobStorage_Binary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobFS_Binary_AzureBlobStorage_Binary_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Binary_AzureBlobStorage_Binary_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Binary_AzureBlobStorage_Binary_Primary.json new file mode 100644 index 00000000..69a62d66 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Binary_AzureBlobStorage_Binary_Primary.json @@ -0,0 +1,223 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Binary_AzureBlobStorage_Binary_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobStorage Started", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "Directory": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.RelativePath" + }, + "File": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "FileSystem": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_Binary_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy AzureBlobFS to AzureBlobStorage", + "outputs": [ + { + "parameters": { + "Directory": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.RelativePath" + }, + "File": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.DataFileName" + }, + "FileSystem": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_Binary_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "storeSettings": { + "copyBehavior": "PreserveHierarchy", + "type": "AzureBlobStorageWriteSettings" + }, + "type": "BinarySink" + }, + "source": { + "storeSettings": { + "deleteFilesAfterCompletion": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DeleteAfterCompletion" + }, + "formatSettings": { + "type": "BinaryReadSettings" + }, + "recursive": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.Recursively" + }, + "type": "AzureBlobFSReadSettings", + "wildcardFileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + } + }, + "type": "BinarySource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobStorage", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Copy AzureBlobFS to AzureBlobStorage Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy AzureBlobFS to AzureBlobStorage').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Copy AzureBlobFS to AzureBlobStorage Started", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobStorage", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Copy AzureBlobFS to AzureBlobStorage Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy AzureBlobFS to AzureBlobStorage').output.filesWritten), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-05T04:14:00Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_DelimitedText_AzureBlobFS_Parquet.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_DelimitedText_AzureBlobFS_Parquet.json new file mode 100644 index 00000000..fd537022 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_DelimitedText_AzureBlobFS_Parquet.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_DelimitedText_AzureBlobFS_Parquet_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobFS_DelimitedText_AzureBlobFS_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_DelimitedText_AzureBlobFS_Parquet_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_DelimitedText_AzureBlobFS_Parquet_Primary.json new file mode 100644 index 00000000..8ad2eb7f --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_DelimitedText_AzureBlobFS_Parquet_Primary.json @@ -0,0 +1,233 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_DelimitedText_AzureBlobFS_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobFS Started", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "FirstRowAsHeader": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.FirstRowAsHeader" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.RelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy AzureBlobFS to AzureBlobFS", + "outputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.DataFileName" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.RelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "storeSettings": { + "copyBehavior": "PreserveHierarchy", + "type": "AzureBlobFSWriteSettings" + }, + "type": "ParquetSink" + }, + "source": { + "storeSettings": { + "enablePartitionDiscovery": false, + "formatSettings": { + "skipLineCount": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.SkipLineCount" + }, + "type": "DelimitedTextReadSettings" + }, + "maxConcurrentConnections": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.MaxConcorrentConnections" + }, + "recursive": true, + "type": "AzureBlobFSReadSettings", + "wildcardFileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "wildcardFolderPath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.Instance.SourceRelativePath" + } + }, + "type": "DelimitedTextSource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobFS", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Copy AzureBlobFS to AzureBlobFS Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy AzureBlobFS to AzureBlobFS').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Copy AzureBlobFS to AzureBlobFS Started", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobFS", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Copy AzureBlobFS to AzureBlobFS Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy AzureBlobFS to AzureBlobFS').output.filesWritten), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-05T04:14:00Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_DelimitedText_AzureBlobStorage_Parquet.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_DelimitedText_AzureBlobStorage_Parquet.json new file mode 100644 index 00000000..f36fe9dc --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_DelimitedText_AzureBlobStorage_Parquet.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_DelimitedText_AzureBlobStorage_Parquet_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobFS_DelimitedText_AzureBlobStorage_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_DelimitedText_AzureBlobStorage_Parquet_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_DelimitedText_AzureBlobStorage_Parquet_Primary.json new file mode 100644 index 00000000..46b2a1a8 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_DelimitedText_AzureBlobStorage_Parquet_Primary.json @@ -0,0 +1,233 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_DelimitedText_AzureBlobStorage_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobStorage Started", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "FirstRowAsHeader": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.FirstRowAsHeader" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.RelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy AzureBlobFS to AzureBlobStorage", + "outputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.DataFileName" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.RelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "storeSettings": { + "copyBehavior": "PreserveHierarchy", + "type": "AzureBlobStorageWriteSettings" + }, + "type": "ParquetSink" + }, + "source": { + "storeSettings": { + "enablePartitionDiscovery": false, + "formatSettings": { + "skipLineCount": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.SkipLineCount" + }, + "type": "DelimitedTextReadSettings" + }, + "maxConcurrentConnections": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.MaxConcorrentConnections" + }, + "recursive": true, + "type": "AzureBlobFSReadSettings", + "wildcardFileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "wildcardFolderPath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.Instance.SourceRelativePath" + } + }, + "type": "DelimitedTextSource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobStorage", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Copy AzureBlobFS to AzureBlobStorage Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy AzureBlobFS to AzureBlobStorage').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Copy AzureBlobFS to AzureBlobStorage Started", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobStorage", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Copy AzureBlobFS to AzureBlobStorage Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy AzureBlobFS to AzureBlobStorage').output.filesWritten), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-05T04:14:00Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_DelimitedText_AzureSqlTable_NA.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_DelimitedText_AzureSqlTable_NA.json new file mode 100644 index 00000000..33a0372a --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_DelimitedText_AzureSqlTable_NA.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_DelimitedText_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobFS_DelimitedText_AzureSqlTable_NA_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_DelimitedText_AzureSqlTable_NA_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_DelimitedText_AzureSqlTable_NA_Primary.json new file mode 100644 index 00000000..37fb3f3b --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_DelimitedText_AzureSqlTable_NA_Primary.json @@ -0,0 +1,338 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_DelimitedText_AzureSqlTable_NA_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Pipeline AF Log - ADLS to Azure SQL Start", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy to SQL\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Pipeline AF Log - ADLS to Azure SQL Start", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "description": "Auto Creates Table Using a Schema File", + "name": "If Auto Create Table", + "type": "IfCondition", + "typeProperties": { + "expression": { + "type": "Expression", + "value": "@and(not(equals(coalesce(pipeline().parameters.TaskObject.Source.SchemaFileName,''),'')),bool(pipeline().parameters.TaskObject.Target.AutoCreateTable))" + }, + "ifTrueActivities": [ + { + "dependsOn": [ ], + "name": "Execute Create Table", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_Create_Table_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Create Table", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Get Mapping", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(\n concat('{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"StorageAccountName\":\"', \n string(pipeline().parameters.TaskObject.Source.System.SystemServer), \n '\",\"StorageAccountContainer\":\"', \n string(pipeline().parameters.TaskObject.Source.System.Container), \n '\",\"RelativePath\":\"', \n string(pipeline().parameters.TaskObject.Source.Instance.SourceRelativePath), \n '\",\"SchemaFileName\":\"', \n string(pipeline().parameters.TaskObject.Source.SchemaFileName), \n '\",\"SourceType\":\"', \n if(\n contains(string(pipeline().parameters.TaskObject.Source.System.SystemServer),'.dfs.core.windows.net'),\n 'ADLS',\n 'Azure Blob'\n ), \n '\",\"TargetType\":\"', \n string(pipeline().parameters.TaskObject.Target.System.Type), \n '\",\"MetadataType\":\"Parquet\"}')\n)" + }, + "functionName": "GetSourceTargetMapping", + "method": "POST" + }, + "userProperties": [ ] + } + ] + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "If Auto Create Table", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "FirstRowAsHeader": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.FirstRowAsHeader" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.Instance.SourceRelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy to SQL", + "outputs": [ + { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Database" + }, + "Schema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.StagingTableSchema" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + }, + "Table": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.StagingTableName" + } + }, + "referenceName": "[concat('GDS_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "disableMetricsCollection": false, + "preCopyScript": { + "type": "Expression", + "value": "@{pipeline().parameters.TaskObject.Target.PreCopySQL}" + }, + "tableOption": "autoCreate", + "type": "AzureSqlSink" + }, + "source": { + "formatSettings": { + "skipLineCount": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.SkipLineCount" + }, + "type": "DelimitedTextReadSettings" + }, + "storeSettings": { + "enablePartitionDiscovery": false, + "maxConcurrentConnections": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.MaxConcorrentConnections" + }, + "recursive": true, + "type": "AzureBlobFSReadSettings", + "wildcardFileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "wildcardFolderPath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.Instance.SourceRelativePath" + } + }, + "type": "DelimitedTextSource" + }, + "translator": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.DynamicMapping" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy to SQL", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - ADLS to Azure SQL Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy to SQL\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy to SQL').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy to SQL", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - ADLS to Azure SQL Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy to SQL\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy to SQL').output.rowsCopied), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Pipeline AF Log - ADLS to Azure SQL Succeed", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Execute AZ_SQL_Post-Copy", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_Post_Copy_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-07-29T09:43:40Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Excel_AzureBlobFS_DelimitedText.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Excel_AzureBlobFS_DelimitedText.json new file mode 100644 index 00000000..aeed462e --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Excel_AzureBlobFS_DelimitedText.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Excel_AzureBlobFS_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobFS_Excel_AzureBlobFS_DelimitedText_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Excel_AzureBlobFS_DelimitedText_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Excel_AzureBlobFS_DelimitedText_Primary.json new file mode 100644 index 00000000..e1d5851d --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Excel_AzureBlobFS_DelimitedText_Primary.json @@ -0,0 +1,226 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Excel_AzureBlobFS_DelimitedText_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobFS Started", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "FirstRowAsHeader": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.FirstRowAsHeader" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.RelativePath" + }, + "SheetName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.SheetName" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_Excel_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy AzureBlobFS to AzureBlobFS", + "outputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.DataFileName" + }, + "FirstRowAsHeader": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.FirstRowAsHeader" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.RelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "storeSettings": { + "copyBehavior": "PreserveHierarchy", + "formatSettings": { + "fileExtension": ".txt", + "quoteAllText": true, + "type": "DelimitedTextWriteSettings" + }, + "type": "AzureBlobFSWriteSettings" + }, + "type": "DelimitedTextSink" + }, + "source": { + "storeSettings": { + "recursive": true, + "type": "AzureBlobFSReadSettings" + }, + "type": "ExcelSource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobFS", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Copy AzureBlobFS to AzureBlobFS Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy AzureBlobFS to AzureBlobFS').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Copy AzureBlobFS to AzureBlobFS Started", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobFS", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Copy AzureBlobFS to AzureBlobFS Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy AzureBlobFS to AzureBlobFS').output.filesWritten), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-05T04:14:00Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Excel_AzureBlobFS_Parquet.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Excel_AzureBlobFS_Parquet.json new file mode 100644 index 00000000..ccb0177c --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Excel_AzureBlobFS_Parquet.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Excel_AzureBlobFS_Parquet_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobFS_Excel_AzureBlobFS_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Excel_AzureBlobFS_Parquet_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Excel_AzureBlobFS_Parquet_Primary.json new file mode 100644 index 00000000..d1a384a0 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Excel_AzureBlobFS_Parquet_Primary.json @@ -0,0 +1,217 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Excel_AzureBlobFS_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobFS Started", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "FirstRowAsHeader": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.FirstRowAsHeader" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.RelativePath" + }, + "SheetName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.SheetName" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_Excel_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy AzureBlobFS to AzureBlobFS", + "outputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.DataFileName" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.RelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "storeSettings": { + "copyBehavior": "PreserveHierarchy", + "type": "AzureBlobFSWriteSettings" + }, + "type": "ParquetSink" + }, + "source": { + "storeSettings": { + "recursive": true, + "type": "AzureBlobFSReadSettings" + }, + "type": "ExcelSource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobFS", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Copy AzureBlobFS to AzureBlobFS Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy AzureBlobFS to AzureBlobFS').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Copy AzureBlobFS to AzureBlobFS Started", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobFS", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Copy AzureBlobFS to AzureBlobFS Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy AzureBlobFS to AzureBlobFS').output.filesWritten), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-05T04:14:00Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Excel_AzureBlobStorage_DelimitedText.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Excel_AzureBlobStorage_DelimitedText.json new file mode 100644 index 00000000..7c075177 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Excel_AzureBlobStorage_DelimitedText.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Excel_AzureBlobStorage_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobFS_Excel_AzureBlobStorage_DelimitedText_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Excel_AzureBlobStorage_DelimitedText_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Excel_AzureBlobStorage_DelimitedText_Primary.json new file mode 100644 index 00000000..f0e03d9a --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Excel_AzureBlobStorage_DelimitedText_Primary.json @@ -0,0 +1,226 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Excel_AzureBlobStorage_DelimitedText_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobStorage Started", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "FirstRowAsHeader": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.FirstRowAsHeader" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.RelativePath" + }, + "SheetName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.SheetName" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_Excel_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy AzureBlobFS to AzureBlobStorage", + "outputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.DataFileName" + }, + "FirstRowAsHeader": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.FirstRowAsHeader" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.RelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "storeSettings": { + "copyBehavior": "PreserveHierarchy", + "formatSettings": { + "fileExtension": ".txt", + "quoteAllText": true, + "type": "DelimitedTextWriteSettings" + }, + "type": "AzureBlobStorageWriteSettings" + }, + "type": "DelimitedTextSink" + }, + "source": { + "storeSettings": { + "recursive": true, + "type": "AzureBlobFSReadSettings" + }, + "type": "ExcelSource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobStorage", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Copy AzureBlobFS to AzureBlobStorage Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy AzureBlobFS to AzureBlobStorage').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Copy AzureBlobFS to AzureBlobStorage Started", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobStorage", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Copy AzureBlobFS to AzureBlobStorage Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy AzureBlobFS to AzureBlobStorage').output.filesWritten), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-05T04:14:00Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Excel_AzureBlobStorage_Parquet.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Excel_AzureBlobStorage_Parquet.json new file mode 100644 index 00000000..797e5b41 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Excel_AzureBlobStorage_Parquet.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Excel_AzureBlobStorage_Parquet_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobFS_Excel_AzureBlobStorage_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Excel_AzureBlobStorage_Parquet_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Excel_AzureBlobStorage_Parquet_Primary.json new file mode 100644 index 00000000..7f5f6fe6 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Excel_AzureBlobStorage_Parquet_Primary.json @@ -0,0 +1,217 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Excel_AzureBlobStorage_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobStorage Started", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "FirstRowAsHeader": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.FirstRowAsHeader" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.RelativePath" + }, + "SheetName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.SheetName" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_Excel_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy AzureBlobFS to AzureBlobStorage", + "outputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.DataFileName" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.RelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "storeSettings": { + "copyBehavior": "PreserveHierarchy", + "type": "AzureBlobStorageWriteSettings" + }, + "type": "ParquetSink" + }, + "source": { + "storeSettings": { + "recursive": true, + "type": "AzureBlobFSReadSettings" + }, + "type": "ExcelSource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobStorage", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Copy AzureBlobFS to AzureBlobStorage Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy AzureBlobFS to AzureBlobStorage').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Copy AzureBlobFS to AzureBlobStorage Started", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobStorage", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Copy AzureBlobFS to AzureBlobStorage Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy AzureBlobFS to AzureBlobStorage').output.filesWritten), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-05T04:14:00Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Excel_AzureSqlTable_NA.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Excel_AzureSqlTable_NA.json new file mode 100644 index 00000000..3bdb5e4b --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Excel_AzureSqlTable_NA.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Excel_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobFS_Excel_AzureSqlTable_NA_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Excel_AzureSqlTable_NA_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Excel_AzureSqlTable_NA_Primary.json new file mode 100644 index 00000000..a8db4019 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Excel_AzureSqlTable_NA_Primary.json @@ -0,0 +1,322 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Excel_AzureSqlTable_NA_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Pipeline AF Log - ADLS to Azure SQL Start", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy to SQL\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Pipeline AF Log - ADLS to Azure SQL Start", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "description": "Auto Creates Table Using a Schema File", + "name": "If Auto Create Table", + "type": "IfCondition", + "typeProperties": { + "expression": { + "type": "Expression", + "value": "@and(not(equals(coalesce(pipeline().parameters.TaskObject.Source.SchemaFileName,''),'')),bool(pipeline().parameters.TaskObject.Target.AutoCreateTable))" + }, + "ifTrueActivities": [ + { + "dependsOn": [ ], + "name": "Execute Create Table", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_Create_Table_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Create Table", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Get Mapping", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(\n concat('{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"StorageAccountName\":\"', \n string(pipeline().parameters.TaskObject.Source.System.SystemServer), \n '\",\"StorageAccountContainer\":\"', \n string(pipeline().parameters.TaskObject.Source.System.Container), \n '\",\"RelativePath\":\"', \n string(pipeline().parameters.TaskObject.Source.Instance.SourceRelativePath), \n '\",\"SchemaFileName\":\"', \n string(pipeline().parameters.TaskObject.Source.SchemaFileName), \n '\",\"SourceType\":\"', \n if(\n contains(string(pipeline().parameters.TaskObject.Source.System.SystemServer),'.dfs.core.windows.net'),\n 'ADLS',\n 'Azure Blob'\n ), \n '\",\"TargetType\":\"', \n string(pipeline().parameters.TaskObject.Target.System.Type), \n '\",\"MetadataType\":\"Parquet\"}')\n)" + }, + "functionName": "GetSourceTargetMapping", + "method": "POST" + }, + "userProperties": [ ] + } + ] + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "If Auto Create Table", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "FirstRowAsHeader": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.FirstRowAsHeader" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.Instance.SourceRelativePath" + }, + "SheetName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.SheetName" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_Excel_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy to SQL", + "outputs": [ + { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Database" + }, + "Schema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.StagingTableSchema" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + }, + "Table": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.StagingTableName" + } + }, + "referenceName": "[concat('GDS_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "disableMetricsCollection": false, + "preCopyScript": { + "type": "Expression", + "value": "@{pipeline().parameters.TaskObject.Target.PreCopySQL}" + }, + "tableOption": "autoCreate", + "type": "AzureSqlSink" + }, + "source": { + "storeSettings": { + "recursive": true, + "type": "AzureBlobFSReadSettings" + }, + "type": "ExcelSource" + }, + "translator": { + "type": "Expression", + "value": "@if(and(not(equals(coalesce(pipeline().parameters.TaskObject.Source.SchemaFileName,''),'')),bool(pipeline().parameters.TaskObject.Target.AutoCreateTable)),activity('AF Get Mapping').output.value, null)" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy to SQL", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - ADLS to Azure SQL Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy to SQL\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy to SQL').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy to SQL", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - ADLS to Azure SQL Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy to SQL\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy to SQL').output.rowsCopied), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Pipeline AF Log - ADLS to Azure SQL Succeed", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Execute AZ_SQL_Post-Copy", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_Post_Copy_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-07-29T09:43:40Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Json_AzureSqlTable_NA.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Json_AzureSqlTable_NA.json new file mode 100644 index 00000000..4e2b95c7 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Json_AzureSqlTable_NA.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Json_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobFS_Json_AzureSqlTable_NA_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Json_AzureSqlTable_NA_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Json_AzureSqlTable_NA_Primary.json new file mode 100644 index 00000000..bdf06492 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Json_AzureSqlTable_NA_Primary.json @@ -0,0 +1,316 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Json_AzureSqlTable_NA_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Pipeline AF Log - ADLS to Azure SQL Start", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy to SQL\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Pipeline AF Log - ADLS to Azure SQL Start", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "description": "Auto Creates Table Using a Schema File", + "name": "If Auto Create Table", + "type": "IfCondition", + "typeProperties": { + "expression": { + "type": "Expression", + "value": "@and(not(equals(coalesce(pipeline().parameters.TaskObject.Source.SchemaFileName,''),'')),bool(pipeline().parameters.TaskObject.Target.AutoCreateTable))" + }, + "ifTrueActivities": [ + { + "dependsOn": [ ], + "name": "Execute Create Table", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_Create_Table_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Create Table", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Get Mapping", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(\n concat('{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"StorageAccountName\":\"', \n string(pipeline().parameters.TaskObject.Source.System.SystemServer), \n '\",\"StorageAccountContainer\":\"', \n string(pipeline().parameters.TaskObject.Source.System.Container), \n '\",\"RelativePath\":\"', \n string(pipeline().parameters.TaskObject.Source.Instance.SourceRelativePath), \n '\",\"SchemaFileName\":\"', \n string(pipeline().parameters.TaskObject.Source.SchemaFileName), \n '\",\"SourceType\":\"', \n if(\n contains(string(pipeline().parameters.TaskObject.Source.System.SystemServer),'.dfs.core.windows.net'),\n 'ADLS',\n 'Azure Blob'\n ), \n '\",\"TargetType\":\"', \n string(pipeline().parameters.TaskObject.Target.System.Type), \n '\",\"MetadataType\":\"Parquet\"}')\n)" + }, + "functionName": "GetSourceTargetMapping", + "method": "POST" + }, + "userProperties": [ ] + } + ] + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "If Auto Create Table", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.Instance.SourceRelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_Json_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy to SQL", + "outputs": [ + { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Database" + }, + "Schema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.StagingTableSchema" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + }, + "Table": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.StagingTableName" + } + }, + "referenceName": "[concat('GDS_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "disableMetricsCollection": false, + "preCopyScript": { + "type": "Expression", + "value": "@{pipeline().parameters.TaskObject.Target.PreCopySQL}" + }, + "type": "AzureSqlSink" + }, + "source": { + "formatSettings": { + "type": "JsonReadSettings" + }, + "storeSettings": { + "recursive": true, + "type": "AzureBlobFSReadSettings" + }, + "type": "JsonSource" + }, + "translator": { + "type": "Expression", + "value": "@if(and(not(equals(coalesce(pipeline().parameters.TaskObject.Source.SchemaFileName,''),'')),bool(pipeline().parameters.TaskObject.Target.AutoCreateTable)),activity('AF Get Mapping').output.value, null)" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy to SQL", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - ADLS to Azure SQL Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy to SQL\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy to SQL').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy to SQL", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - ADLS to Azure SQL Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy to SQL\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy to SQL').output.rowsCopied), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Pipeline AF Log - ADLS to Azure SQL Succeed", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Execute AZ_SQL_Post-Copy", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_Post_Copy_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-07-29T09:43:40Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Parquet_AzureBlobFS_DelimitedText.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Parquet_AzureBlobFS_DelimitedText.json new file mode 100644 index 00000000..2b6fcbd4 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Parquet_AzureBlobFS_DelimitedText.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Parquet_AzureBlobFS_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobFS_Parquet_AzureBlobFS_DelimitedText_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Parquet_AzureBlobFS_DelimitedText_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Parquet_AzureBlobFS_DelimitedText_Primary.json new file mode 100644 index 00000000..22fdbf40 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Parquet_AzureBlobFS_DelimitedText_Primary.json @@ -0,0 +1,218 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Parquet_AzureBlobFS_DelimitedText_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobFS Started", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.RelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy AzureBlobFS to AzureBlobFS", + "outputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.DataFileName" + }, + "FirstRowAsHeader": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.FirstRowAsHeader" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.RelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "storeSettings": { + "copyBehavior": "PreserveHierarchy", + "formatSettings": { + "fileExtension": ".txt", + "quoteAllText": true, + "type": "DelimitedTextWriteSettings" + }, + "type": "AzureBlobFSWriteSettings" + }, + "type": "DelimitedTextSink" + }, + "source": { + "storeSettings": { + "recursive": true, + "type": "AzureBlobFSReadSettings" + }, + "type": "ParquetSource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobFS", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Copy AzureBlobFS to AzureBlobFS Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy AzureBlobFS to AzureBlobFS').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Copy AzureBlobFS to AzureBlobFS Started", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobFS", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Copy AzureBlobFS to AzureBlobFS Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy AzureBlobFS to AzureBlobFS').output.filesWritten), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-05T04:14:00Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Parquet_AzureBlobStorage_DelimitedText.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Parquet_AzureBlobStorage_DelimitedText.json new file mode 100644 index 00000000..2119ff26 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Parquet_AzureBlobStorage_DelimitedText.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Parquet_AzureBlobStorage_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobFS_Parquet_AzureBlobStorage_DelimitedText_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Parquet_AzureBlobStorage_DelimitedText_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Parquet_AzureBlobStorage_DelimitedText_Primary.json new file mode 100644 index 00000000..c8eb6d88 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Parquet_AzureBlobStorage_DelimitedText_Primary.json @@ -0,0 +1,218 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Parquet_AzureBlobStorage_DelimitedText_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobStorage Started", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.RelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy AzureBlobFS to AzureBlobStorage", + "outputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.DataFileName" + }, + "FirstRowAsHeader": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.FirstRowAsHeader" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.RelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "storeSettings": { + "copyBehavior": "PreserveHierarchy", + "formatSettings": { + "fileExtension": ".txt", + "quoteAllText": true, + "type": "DelimitedTextWriteSettings" + }, + "type": "AzureBlobStorageWriteSettings" + }, + "type": "DelimitedTextSink" + }, + "source": { + "storeSettings": { + "recursive": true, + "type": "AzureBlobFSReadSettings" + }, + "type": "ParquetSource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobStorage", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Copy AzureBlobFS to AzureBlobStorage Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy AzureBlobFS to AzureBlobStorage').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Copy AzureBlobFS to AzureBlobStorage Started", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobFS to AzureBlobStorage", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Copy AzureBlobFS to AzureBlobStorage Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy AzureBlobFS to AzureBlobStorage').output.filesWritten), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-05T04:14:00Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Parquet_AzureSqlTable_NA.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Parquet_AzureSqlTable_NA.json new file mode 100644 index 00000000..a2fdce05 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Parquet_AzureSqlTable_NA.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Parquet_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobFS_Parquet_AzureSqlTable_NA_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Parquet_AzureSqlTable_NA_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Parquet_AzureSqlTable_NA_Primary.json new file mode 100644 index 00000000..ce1fddad --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobFS_Parquet_AzureSqlTable_NA_Primary.json @@ -0,0 +1,326 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobFS_Parquet_AzureSqlTable_NA_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Pipeline AF Log - ADLS to Azure SQL Start", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy to SQL\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Pipeline AF Log - ADLS to Azure SQL Start", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "description": "Auto Creates Table Using a Schema File", + "name": "If Auto Create Table", + "type": "IfCondition", + "typeProperties": { + "expression": { + "type": "Expression", + "value": "@and(not(equals(coalesce(pipeline().parameters.TaskObject.Source.SchemaFileName,''),'')),bool(pipeline().parameters.TaskObject.Target.AutoCreateTable))" + }, + "ifTrueActivities": [ + { + "dependsOn": [ ], + "name": "Execute Create Table", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_Create_Table_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Create Table", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Get Mapping", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(\n concat('{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"StorageAccountName\":\"', \n string(pipeline().parameters.TaskObject.Source.System.SystemServer), \n '\",\"StorageAccountContainer\":\"', \n string(pipeline().parameters.TaskObject.Source.System.Container), \n '\",\"RelativePath\":\"', \n string(pipeline().parameters.TaskObject.Source.Instance.SourceRelativePath), \n '\",\"SchemaFileName\":\"', \n string(pipeline().parameters.TaskObject.Source.SchemaFileName), \n '\",\"SourceType\":\"', \n if(\n contains(string(pipeline().parameters.TaskObject.Source.System.SystemServer),'.dfs.core.windows.net'),\n 'ADLS',\n 'Azure Blob'\n ), \n '\",\"TargetType\":\"', \n string(pipeline().parameters.TaskObject.Target.System.Type), \n '\",\"MetadataType\":\"Parquet\"}')\n)" + }, + "functionName": "GetSourceTargetMapping", + "method": "POST" + }, + "userProperties": [ ] + } + ] + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "If Auto Create Table", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.Instance.SourceRelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy to SQL", + "outputs": [ + { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Database" + }, + "Schema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.StagingTableSchema" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + }, + "Table": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.StagingTableName" + } + }, + "referenceName": "[concat('GDS_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "disableMetricsCollection": false, + "preCopyScript": { + "type": "Expression", + "value": "@{pipeline().parameters.TaskObject.Target.PreCopySQL}" + }, + "tableOption": "autoCreate", + "type": "AzureSqlSink" + }, + "source": { + "formatSettings": { + "type": "AzureBlobFSReadSettings" + }, + "storeSettings": { + "enablePartitionDiscovery": false, + "recursive": false, + "type": "AzureBlobFSReadSettings", + "wildcardFileName": { + "type": "Expression", + "value": "@concat(\n replace(\n pipeline().parameters.TaskObject.Source.DataFileName,\n '.parquet',\n ''\n ),\n '*.parquet'\n)" + }, + "wildcardFolderPath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.Instance.SourceRelativePath" + } + }, + "type": "ParquetSource" + }, + "translator": { + "type": "Expression", + "value": "@if(and(not(equals(coalesce(pipeline().parameters.TaskObject.Source.SchemaFileName,''),'')),bool(pipeline().parameters.TaskObject.Target.AutoCreateTable)),activity('AF Get Mapping').output.value, null)" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy to SQL", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - ADLS to Azure SQL Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy to SQL\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy to SQL').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy to SQL", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - ADLS to Azure SQL Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy to SQL\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy to SQL').output.rowsCopied), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Pipeline AF Log - ADLS to Azure SQL Succeed", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Execute AZ_SQL_Post-Copy", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_Post_Copy_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-07-29T09:43:40Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Binary_AzureBlobFS_Binary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Binary_AzureBlobFS_Binary.json new file mode 100644 index 00000000..dff71093 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Binary_AzureBlobFS_Binary.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Binary_AzureBlobFS_Binary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobStorage_Binary_AzureBlobFS_Binary_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Binary_AzureBlobFS_Binary_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Binary_AzureBlobFS_Binary_Primary.json new file mode 100644 index 00000000..b2f6cd33 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Binary_AzureBlobFS_Binary_Primary.json @@ -0,0 +1,223 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Binary_AzureBlobFS_Binary_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobFS Started", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "Directory": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.RelativePath" + }, + "File": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "FileSystem": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_Binary_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy AzureBlobStorage to AzureBlobFS", + "outputs": [ + { + "parameters": { + "Directory": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.RelativePath" + }, + "File": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.DataFileName" + }, + "FileSystem": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_Binary_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "storeSettings": { + "copyBehavior": "PreserveHierarchy", + "type": "AzureBlobFSWriteSettings" + }, + "type": "BinarySink" + }, + "source": { + "storeSettings": { + "deleteFilesAfterCompletion": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DeleteAfterCompletion" + }, + "formatSettings": { + "type": "BinaryReadSettings" + }, + "recursive": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.Recursively" + }, + "type": "AzureBlobStorageReadSettings", + "wildcardFileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + } + }, + "type": "BinarySource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobFS", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Copy AzureBlobStorage to AzureBlobFS Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy AzureBlobStorage to AzureBlobFS').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Copy AzureBlobStorage to AzureBlobFS Started", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobFS", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Copy AzureBlobStorage to AzureBlobFS Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy AzureBlobStorage to AzureBlobFS').output.filesWritten), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-05T04:14:00Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Binary_AzureBlobStorage_Binary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Binary_AzureBlobStorage_Binary.json new file mode 100644 index 00000000..d320aea5 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Binary_AzureBlobStorage_Binary.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Binary_AzureBlobStorage_Binary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobStorage_Binary_AzureBlobStorage_Binary_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Binary_AzureBlobStorage_Binary_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Binary_AzureBlobStorage_Binary_Primary.json new file mode 100644 index 00000000..196abb3e --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Binary_AzureBlobStorage_Binary_Primary.json @@ -0,0 +1,223 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Binary_AzureBlobStorage_Binary_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobStorage Started", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "Directory": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.RelativePath" + }, + "File": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "FileSystem": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_Binary_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy AzureBlobStorage to AzureBlobStorage", + "outputs": [ + { + "parameters": { + "Directory": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.RelativePath" + }, + "File": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.DataFileName" + }, + "FileSystem": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_Binary_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "storeSettings": { + "copyBehavior": "PreserveHierarchy", + "type": "AzureBlobStorageWriteSettings" + }, + "type": "BinarySink" + }, + "source": { + "storeSettings": { + "deleteFilesAfterCompletion": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DeleteAfterCompletion" + }, + "formatSettings": { + "type": "BinaryReadSettings" + }, + "recursive": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.Recursively" + }, + "type": "AzureBlobStorageReadSettings", + "wildcardFileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + } + }, + "type": "BinarySource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobStorage", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Copy AzureBlobStorage to AzureBlobStorage Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy AzureBlobStorage to AzureBlobStorage').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Copy AzureBlobStorage to AzureBlobStorage Started", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobStorage", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Copy AzureBlobStorage to AzureBlobStorage Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy AzureBlobStorage to AzureBlobStorage').output.filesWritten), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-05T04:14:00Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_DelimitedText_AzureBlobFS_Parquet.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_DelimitedText_AzureBlobFS_Parquet.json new file mode 100644 index 00000000..0b77f6a1 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_DelimitedText_AzureBlobFS_Parquet.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_DelimitedText_AzureBlobFS_Parquet_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobStorage_DelimitedText_AzureBlobFS_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_DelimitedText_AzureBlobFS_Parquet_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_DelimitedText_AzureBlobFS_Parquet_Primary.json new file mode 100644 index 00000000..f7716074 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_DelimitedText_AzureBlobFS_Parquet_Primary.json @@ -0,0 +1,233 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_DelimitedText_AzureBlobFS_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobFS Started", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "FirstRowAsHeader": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.FirstRowAsHeader" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.RelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy AzureBlobStorage to AzureBlobFS", + "outputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.DataFileName" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.RelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "storeSettings": { + "copyBehavior": "PreserveHierarchy", + "type": "AzureBlobFSWriteSettings" + }, + "type": "ParquetSink" + }, + "source": { + "storeSettings": { + "enablePartitionDiscovery": false, + "formatSettings": { + "skipLineCount": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.SkipLineCount" + }, + "type": "DelimitedTextReadSettings" + }, + "maxConcurrentConnections": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.MaxConcorrentConnections" + }, + "recursive": true, + "type": "AzureBlobStorageReadSettings", + "wildcardFileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "wildcardFolderPath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.Instance.SourceRelativePath" + } + }, + "type": "DelimitedTextSource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobFS", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Copy AzureBlobStorage to AzureBlobFS Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy AzureBlobStorage to AzureBlobFS').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Copy AzureBlobStorage to AzureBlobFS Started", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobFS", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Copy AzureBlobStorage to AzureBlobFS Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy AzureBlobStorage to AzureBlobFS').output.filesWritten), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-05T04:14:00Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_DelimitedText_AzureBlobStorage_Parquet.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_DelimitedText_AzureBlobStorage_Parquet.json new file mode 100644 index 00000000..c828662b --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_DelimitedText_AzureBlobStorage_Parquet.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_DelimitedText_AzureBlobStorage_Parquet_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobStorage_DelimitedText_AzureBlobStorage_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_DelimitedText_AzureBlobStorage_Parquet_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_DelimitedText_AzureBlobStorage_Parquet_Primary.json new file mode 100644 index 00000000..b4ee8501 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_DelimitedText_AzureBlobStorage_Parquet_Primary.json @@ -0,0 +1,233 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_DelimitedText_AzureBlobStorage_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobStorage Started", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "FirstRowAsHeader": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.FirstRowAsHeader" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.RelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy AzureBlobStorage to AzureBlobStorage", + "outputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.DataFileName" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.RelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "storeSettings": { + "copyBehavior": "PreserveHierarchy", + "type": "AzureBlobStorageWriteSettings" + }, + "type": "ParquetSink" + }, + "source": { + "storeSettings": { + "enablePartitionDiscovery": false, + "formatSettings": { + "skipLineCount": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.SkipLineCount" + }, + "type": "DelimitedTextReadSettings" + }, + "maxConcurrentConnections": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.MaxConcorrentConnections" + }, + "recursive": true, + "type": "AzureBlobStorageReadSettings", + "wildcardFileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "wildcardFolderPath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.Instance.SourceRelativePath" + } + }, + "type": "DelimitedTextSource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobStorage", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Copy AzureBlobStorage to AzureBlobStorage Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy AzureBlobStorage to AzureBlobStorage').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Copy AzureBlobStorage to AzureBlobStorage Started", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobStorage", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Copy AzureBlobStorage to AzureBlobStorage Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy AzureBlobStorage to AzureBlobStorage').output.filesWritten), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-05T04:14:00Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_DelimitedText_AzureSqlTable_NA.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_DelimitedText_AzureSqlTable_NA.json new file mode 100644 index 00000000..c8babcaf --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_DelimitedText_AzureSqlTable_NA.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_DelimitedText_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobStorage_DelimitedText_AzureSqlTable_NA_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_DelimitedText_AzureSqlTable_NA_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_DelimitedText_AzureSqlTable_NA_Primary.json new file mode 100644 index 00000000..339bcefc --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_DelimitedText_AzureSqlTable_NA_Primary.json @@ -0,0 +1,338 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_DelimitedText_AzureSqlTable_NA_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Pipeline AF Log - ADLS to Azure SQL Start", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy to SQL\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Pipeline AF Log - ADLS to Azure SQL Start", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "description": "Auto Creates Table Using a Schema File", + "name": "If Auto Create Table", + "type": "IfCondition", + "typeProperties": { + "expression": { + "type": "Expression", + "value": "@and(not(equals(coalesce(pipeline().parameters.TaskObject.Source.SchemaFileName,''),'')),bool(pipeline().parameters.TaskObject.Target.AutoCreateTable))" + }, + "ifTrueActivities": [ + { + "dependsOn": [ ], + "name": "Execute Create Table", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_Create_Table_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Create Table", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Get Mapping", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(\n concat('{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"StorageAccountName\":\"', \n string(pipeline().parameters.TaskObject.Source.System.SystemServer), \n '\",\"StorageAccountContainer\":\"', \n string(pipeline().parameters.TaskObject.Source.System.Container), \n '\",\"RelativePath\":\"', \n string(pipeline().parameters.TaskObject.Source.Instance.SourceRelativePath), \n '\",\"SchemaFileName\":\"', \n string(pipeline().parameters.TaskObject.Source.SchemaFileName), \n '\",\"SourceType\":\"', \n if(\n contains(string(pipeline().parameters.TaskObject.Source.System.SystemServer),'.dfs.core.windows.net'),\n 'ADLS',\n 'Azure Blob'\n ), \n '\",\"TargetType\":\"', \n string(pipeline().parameters.TaskObject.Target.System.Type), \n '\",\"MetadataType\":\"Parquet\"}')\n)" + }, + "functionName": "GetSourceTargetMapping", + "method": "POST" + }, + "userProperties": [ ] + } + ] + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "If Auto Create Table", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "FirstRowAsHeader": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.FirstRowAsHeader" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.Instance.SourceRelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy to SQL", + "outputs": [ + { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Database" + }, + "Schema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.StagingTableSchema" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + }, + "Table": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.StagingTableName" + } + }, + "referenceName": "[concat('GDS_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "disableMetricsCollection": false, + "preCopyScript": { + "type": "Expression", + "value": "@{pipeline().parameters.TaskObject.Target.PreCopySQL}" + }, + "tableOption": "autoCreate", + "type": "AzureSqlSink" + }, + "source": { + "formatSettings": { + "skipLineCount": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.SkipLineCount" + }, + "type": "DelimitedTextReadSettings" + }, + "storeSettings": { + "enablePartitionDiscovery": false, + "maxConcurrentConnections": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.MaxConcorrentConnections" + }, + "recursive": true, + "type": "AzureBlobStorageReadSettings", + "wildcardFileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "wildcardFolderPath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.Instance.SourceRelativePath" + } + }, + "type": "DelimitedTextSource" + }, + "translator": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.DynamicMapping" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy to SQL", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - ADLS to Azure SQL Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy to SQL\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy to SQL').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy to SQL", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - ADLS to Azure SQL Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy to SQL\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy to SQL').output.rowsCopied), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Pipeline AF Log - ADLS to Azure SQL Succeed", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Execute AZ_SQL_Post-Copy", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_Post_Copy_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-07-29T09:43:40Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Excel_AzureBlobFS_DelimitedText.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Excel_AzureBlobFS_DelimitedText.json new file mode 100644 index 00000000..82f6a770 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Excel_AzureBlobFS_DelimitedText.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Excel_AzureBlobFS_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobStorage_Excel_AzureBlobFS_DelimitedText_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Excel_AzureBlobFS_DelimitedText_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Excel_AzureBlobFS_DelimitedText_Primary.json new file mode 100644 index 00000000..9dd18d2f --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Excel_AzureBlobFS_DelimitedText_Primary.json @@ -0,0 +1,226 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Excel_AzureBlobFS_DelimitedText_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobFS Started", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "FirstRowAsHeader": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.FirstRowAsHeader" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.RelativePath" + }, + "SheetName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.SheetName" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_Excel_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy AzureBlobStorage to AzureBlobFS", + "outputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.DataFileName" + }, + "FirstRowAsHeader": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.FirstRowAsHeader" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.RelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "storeSettings": { + "copyBehavior": "PreserveHierarchy", + "formatSettings": { + "fileExtension": ".txt", + "quoteAllText": true, + "type": "DelimitedTextWriteSettings" + }, + "type": "AzureBlobFSWriteSettings" + }, + "type": "DelimitedTextSink" + }, + "source": { + "storeSettings": { + "recursive": true, + "type": "AzureBlobStorageReadSettings" + }, + "type": "ExcelSource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobFS", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Copy AzureBlobStorage to AzureBlobFS Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy AzureBlobStorage to AzureBlobFS').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Copy AzureBlobStorage to AzureBlobFS Started", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobFS", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Copy AzureBlobStorage to AzureBlobFS Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy AzureBlobStorage to AzureBlobFS').output.filesWritten), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-05T04:14:00Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Excel_AzureBlobFS_Parquet.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Excel_AzureBlobFS_Parquet.json new file mode 100644 index 00000000..e9d89967 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Excel_AzureBlobFS_Parquet.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Excel_AzureBlobFS_Parquet_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobStorage_Excel_AzureBlobFS_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Excel_AzureBlobFS_Parquet_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Excel_AzureBlobFS_Parquet_Primary.json new file mode 100644 index 00000000..92456950 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Excel_AzureBlobFS_Parquet_Primary.json @@ -0,0 +1,217 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Excel_AzureBlobFS_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobFS Started", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "FirstRowAsHeader": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.FirstRowAsHeader" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.RelativePath" + }, + "SheetName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.SheetName" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_Excel_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy AzureBlobStorage to AzureBlobFS", + "outputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.DataFileName" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.RelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "storeSettings": { + "copyBehavior": "PreserveHierarchy", + "type": "AzureBlobFSWriteSettings" + }, + "type": "ParquetSink" + }, + "source": { + "storeSettings": { + "recursive": true, + "type": "AzureBlobStorageReadSettings" + }, + "type": "ExcelSource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobFS", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Copy AzureBlobStorage to AzureBlobFS Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy AzureBlobStorage to AzureBlobFS').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Copy AzureBlobStorage to AzureBlobFS Started", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobFS", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Copy AzureBlobStorage to AzureBlobFS Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy AzureBlobStorage to AzureBlobFS').output.filesWritten), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-05T04:14:00Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Excel_AzureBlobStorage_DelimitedText.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Excel_AzureBlobStorage_DelimitedText.json new file mode 100644 index 00000000..59170429 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Excel_AzureBlobStorage_DelimitedText.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Excel_AzureBlobStorage_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobStorage_Excel_AzureBlobStorage_DelimitedText_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Excel_AzureBlobStorage_DelimitedText_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Excel_AzureBlobStorage_DelimitedText_Primary.json new file mode 100644 index 00000000..1cb0e972 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Excel_AzureBlobStorage_DelimitedText_Primary.json @@ -0,0 +1,226 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Excel_AzureBlobStorage_DelimitedText_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobStorage Started", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "FirstRowAsHeader": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.FirstRowAsHeader" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.RelativePath" + }, + "SheetName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.SheetName" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_Excel_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy AzureBlobStorage to AzureBlobStorage", + "outputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.DataFileName" + }, + "FirstRowAsHeader": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.FirstRowAsHeader" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.RelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "storeSettings": { + "copyBehavior": "PreserveHierarchy", + "formatSettings": { + "fileExtension": ".txt", + "quoteAllText": true, + "type": "DelimitedTextWriteSettings" + }, + "type": "AzureBlobStorageWriteSettings" + }, + "type": "DelimitedTextSink" + }, + "source": { + "storeSettings": { + "recursive": true, + "type": "AzureBlobStorageReadSettings" + }, + "type": "ExcelSource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobStorage", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Copy AzureBlobStorage to AzureBlobStorage Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy AzureBlobStorage to AzureBlobStorage').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Copy AzureBlobStorage to AzureBlobStorage Started", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobStorage", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Copy AzureBlobStorage to AzureBlobStorage Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy AzureBlobStorage to AzureBlobStorage').output.filesWritten), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-05T04:14:00Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Excel_AzureBlobStorage_Parquet.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Excel_AzureBlobStorage_Parquet.json new file mode 100644 index 00000000..50677e09 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Excel_AzureBlobStorage_Parquet.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Excel_AzureBlobStorage_Parquet_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobStorage_Excel_AzureBlobStorage_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Excel_AzureBlobStorage_Parquet_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Excel_AzureBlobStorage_Parquet_Primary.json new file mode 100644 index 00000000..5290a3ea --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Excel_AzureBlobStorage_Parquet_Primary.json @@ -0,0 +1,217 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Excel_AzureBlobStorage_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobStorage Started", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "FirstRowAsHeader": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.FirstRowAsHeader" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.RelativePath" + }, + "SheetName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.SheetName" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_Excel_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy AzureBlobStorage to AzureBlobStorage", + "outputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.DataFileName" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.RelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "storeSettings": { + "copyBehavior": "PreserveHierarchy", + "type": "AzureBlobStorageWriteSettings" + }, + "type": "ParquetSink" + }, + "source": { + "storeSettings": { + "recursive": true, + "type": "AzureBlobStorageReadSettings" + }, + "type": "ExcelSource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobStorage", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Copy AzureBlobStorage to AzureBlobStorage Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy AzureBlobStorage to AzureBlobStorage').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Copy AzureBlobStorage to AzureBlobStorage Started", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobStorage", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Copy AzureBlobStorage to AzureBlobStorage Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy AzureBlobStorage to AzureBlobStorage').output.filesWritten), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-05T04:14:00Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Excel_AzureSqlTable_NA.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Excel_AzureSqlTable_NA.json new file mode 100644 index 00000000..1a439be0 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Excel_AzureSqlTable_NA.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Excel_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobStorage_Excel_AzureSqlTable_NA_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Excel_AzureSqlTable_NA_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Excel_AzureSqlTable_NA_Primary.json new file mode 100644 index 00000000..a860edb5 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Excel_AzureSqlTable_NA_Primary.json @@ -0,0 +1,322 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Excel_AzureSqlTable_NA_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Pipeline AF Log - ADLS to Azure SQL Start", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy to SQL\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Pipeline AF Log - ADLS to Azure SQL Start", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "description": "Auto Creates Table Using a Schema File", + "name": "If Auto Create Table", + "type": "IfCondition", + "typeProperties": { + "expression": { + "type": "Expression", + "value": "@and(not(equals(coalesce(pipeline().parameters.TaskObject.Source.SchemaFileName,''),'')),bool(pipeline().parameters.TaskObject.Target.AutoCreateTable))" + }, + "ifTrueActivities": [ + { + "dependsOn": [ ], + "name": "Execute Create Table", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_Create_Table_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Create Table", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Get Mapping", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(\n concat('{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"StorageAccountName\":\"', \n string(pipeline().parameters.TaskObject.Source.System.SystemServer), \n '\",\"StorageAccountContainer\":\"', \n string(pipeline().parameters.TaskObject.Source.System.Container), \n '\",\"RelativePath\":\"', \n string(pipeline().parameters.TaskObject.Source.Instance.SourceRelativePath), \n '\",\"SchemaFileName\":\"', \n string(pipeline().parameters.TaskObject.Source.SchemaFileName), \n '\",\"SourceType\":\"', \n if(\n contains(string(pipeline().parameters.TaskObject.Source.System.SystemServer),'.dfs.core.windows.net'),\n 'ADLS',\n 'Azure Blob'\n ), \n '\",\"TargetType\":\"', \n string(pipeline().parameters.TaskObject.Target.System.Type), \n '\",\"MetadataType\":\"Parquet\"}')\n)" + }, + "functionName": "GetSourceTargetMapping", + "method": "POST" + }, + "userProperties": [ ] + } + ] + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "If Auto Create Table", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "FirstRowAsHeader": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.FirstRowAsHeader" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.Instance.SourceRelativePath" + }, + "SheetName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.SheetName" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_Excel_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy to SQL", + "outputs": [ + { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Database" + }, + "Schema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.StagingTableSchema" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + }, + "Table": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.StagingTableName" + } + }, + "referenceName": "[concat('GDS_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "disableMetricsCollection": false, + "preCopyScript": { + "type": "Expression", + "value": "@{pipeline().parameters.TaskObject.Target.PreCopySQL}" + }, + "tableOption": "autoCreate", + "type": "AzureSqlSink" + }, + "source": { + "storeSettings": { + "recursive": true, + "type": "AzureBlobStorageReadSettings" + }, + "type": "ExcelSource" + }, + "translator": { + "type": "Expression", + "value": "@if(and(not(equals(coalesce(pipeline().parameters.TaskObject.Source.SchemaFileName,''),'')),bool(pipeline().parameters.TaskObject.Target.AutoCreateTable)),activity('AF Get Mapping').output.value, null)" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy to SQL", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - ADLS to Azure SQL Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy to SQL\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy to SQL').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy to SQL", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - ADLS to Azure SQL Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy to SQL\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy to SQL').output.rowsCopied), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Pipeline AF Log - ADLS to Azure SQL Succeed", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Execute AZ_SQL_Post-Copy", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_Post_Copy_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-07-29T09:43:40Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Json_AzureSqlTable_NA.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Json_AzureSqlTable_NA.json new file mode 100644 index 00000000..e8ee0d9f --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Json_AzureSqlTable_NA.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Json_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobStorage_Json_AzureSqlTable_NA_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Json_AzureSqlTable_NA_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Json_AzureSqlTable_NA_Primary.json new file mode 100644 index 00000000..c5745a8a --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Json_AzureSqlTable_NA_Primary.json @@ -0,0 +1,317 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Json_AzureSqlTable_NA_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Pipeline AF Log - ADLS to Azure SQL Start", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy to SQL\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Pipeline AF Log - ADLS to Azure SQL Start", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "description": "Auto Creates Table Using a Schema File", + "name": "If Auto Create Table", + "type": "IfCondition", + "typeProperties": { + "expression": { + "type": "Expression", + "value": "@and(not(equals(coalesce(pipeline().parameters.TaskObject.Source.SchemaFileName,''),'')),bool(pipeline().parameters.TaskObject.Target.AutoCreateTable))" + }, + "ifTrueActivities": [ + { + "dependsOn": [ ], + "name": "Execute Create Table", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_Create_Table_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Create Table", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Get Mapping", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(\n concat('{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"StorageAccountName\":\"', \n string(pipeline().parameters.TaskObject.Source.System.SystemServer), \n '\",\"StorageAccountContainer\":\"', \n string(pipeline().parameters.TaskObject.Source.System.Container), \n '\",\"RelativePath\":\"', \n string(pipeline().parameters.TaskObject.Source.Instance.SourceRelativePath), \n '\",\"SchemaFileName\":\"', \n string(pipeline().parameters.TaskObject.Source.SchemaFileName), \n '\",\"SourceType\":\"', \n if(\n contains(string(pipeline().parameters.TaskObject.Source.System.SystemServer),'.dfs.core.windows.net'),\n 'ADLS',\n 'Azure Blob'\n ), \n '\",\"TargetType\":\"', \n string(pipeline().parameters.TaskObject.Target.System.Type), \n '\",\"MetadataType\":\"Parquet\"}')\n)" + }, + "functionName": "GetSourceTargetMapping", + "method": "POST" + }, + "userProperties": [ ] + } + ] + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "If Auto Create Table", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.Instance.SourceRelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_Json_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy to SQL", + "outputs": [ + { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Database" + }, + "Schema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.StagingTableSchema" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + }, + "Table": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.StagingTableName" + } + }, + "referenceName": "[concat('GDS_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "disableMetricsCollection": false, + "preCopyScript": { + "type": "Expression", + "value": "@{pipeline().parameters.TaskObject.Target.PreCopySQL}" + }, + "tableOption": "autoCreate", + "type": "AzureSqlSink" + }, + "source": { + "formatSettings": { + "type": "JsonReadSettings" + }, + "storeSettings": { + "recursive": true, + "type": "AzureBlobStorageReadSettings" + }, + "type": "JsonSource" + }, + "translator": { + "type": "Expression", + "value": "@if(and(not(equals(coalesce(pipeline().parameters.TaskObject.Source.SchemaFileName,''),'')),bool(pipeline().parameters.TaskObject.Target.AutoCreateTable)),activity('AF Get Mapping').output.value, null)" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy to SQL", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - ADLS to Azure SQL Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy to SQL\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy to SQL').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy to SQL", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - ADLS to Azure SQL Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy to SQL\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy to SQL').output.rowsCopied), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Pipeline AF Log - ADLS to Azure SQL Succeed", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Execute AZ_SQL_Post-Copy", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_Post_Copy_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-07-29T09:43:40Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Parquet_AzureBlobFS_DelimitedText.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Parquet_AzureBlobFS_DelimitedText.json new file mode 100644 index 00000000..154c39d5 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Parquet_AzureBlobFS_DelimitedText.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Parquet_AzureBlobFS_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobStorage_Parquet_AzureBlobFS_DelimitedText_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Parquet_AzureBlobFS_DelimitedText_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Parquet_AzureBlobFS_DelimitedText_Primary.json new file mode 100644 index 00000000..2c9e25c2 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Parquet_AzureBlobFS_DelimitedText_Primary.json @@ -0,0 +1,218 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Parquet_AzureBlobFS_DelimitedText_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobFS Started", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.RelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy AzureBlobStorage to AzureBlobFS", + "outputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.DataFileName" + }, + "FirstRowAsHeader": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.FirstRowAsHeader" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.RelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobFS_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "storeSettings": { + "copyBehavior": "PreserveHierarchy", + "formatSettings": { + "fileExtension": ".txt", + "quoteAllText": true, + "type": "DelimitedTextWriteSettings" + }, + "type": "AzureBlobFSWriteSettings" + }, + "type": "DelimitedTextSink" + }, + "source": { + "storeSettings": { + "recursive": true, + "type": "AzureBlobStorageReadSettings" + }, + "type": "ParquetSource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobFS", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Copy AzureBlobStorage to AzureBlobFS Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy AzureBlobStorage to AzureBlobFS').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Copy AzureBlobStorage to AzureBlobFS Started", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobFS", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Copy AzureBlobStorage to AzureBlobFS Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy AzureBlobStorage to AzureBlobFS').output.filesWritten), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-05T04:14:00Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Parquet_AzureBlobStorage_DelimitedText.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Parquet_AzureBlobStorage_DelimitedText.json new file mode 100644 index 00000000..a3c684f0 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Parquet_AzureBlobStorage_DelimitedText.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Parquet_AzureBlobStorage_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobStorage_Parquet_AzureBlobStorage_DelimitedText_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Parquet_AzureBlobStorage_DelimitedText_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Parquet_AzureBlobStorage_DelimitedText_Primary.json new file mode 100644 index 00000000..ec908ca5 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Parquet_AzureBlobStorage_DelimitedText_Primary.json @@ -0,0 +1,218 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Parquet_AzureBlobStorage_DelimitedText_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobStorage Started", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.RelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy AzureBlobStorage to AzureBlobStorage", + "outputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.DataFileName" + }, + "FirstRowAsHeader": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.FirstRowAsHeader" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.RelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_DelimitedText_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "storeSettings": { + "copyBehavior": "PreserveHierarchy", + "formatSettings": { + "fileExtension": ".txt", + "quoteAllText": true, + "type": "DelimitedTextWriteSettings" + }, + "type": "AzureBlobStorageWriteSettings" + }, + "type": "DelimitedTextSink" + }, + "source": { + "storeSettings": { + "recursive": true, + "type": "AzureBlobStorageReadSettings" + }, + "type": "ParquetSource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobStorage", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Copy AzureBlobStorage to AzureBlobStorage Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy AzureBlobStorage to AzureBlobStorage').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Copy AzureBlobStorage to AzureBlobStorage Started", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy AzureBlobStorage to AzureBlobStorage", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Copy AzureBlobStorage to AzureBlobStorage Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy Blob to Blob\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy AzureBlobStorage to AzureBlobStorage').output.filesWritten), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-05T04:14:00Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Parquet_AzureSqlTable_NA.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Parquet_AzureSqlTable_NA.json new file mode 100644 index 00000000..91f65897 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Parquet_AzureSqlTable_NA.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Parquet_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureBlobStorage_Parquet_AzureSqlTable_NA_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Parquet_AzureSqlTable_NA_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Parquet_AzureSqlTable_NA_Primary.json new file mode 100644 index 00000000..3a19cd41 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureBlobStorage_Parquet_AzureSqlTable_NA_Primary.json @@ -0,0 +1,326 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureBlobStorage_Parquet_AzureSqlTable_NA_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Pipeline AF Log - ADLS to Azure SQL Start", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":3,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy to SQL\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"Status\":\"Started\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Pipeline AF Log - ADLS to Azure SQL Start", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "description": "Auto Creates Table Using a Schema File", + "name": "If Auto Create Table", + "type": "IfCondition", + "typeProperties": { + "expression": { + "type": "Expression", + "value": "@and(not(equals(coalesce(pipeline().parameters.TaskObject.Source.SchemaFileName,''),'')),bool(pipeline().parameters.TaskObject.Target.AutoCreateTable))" + }, + "ifTrueActivities": [ + { + "dependsOn": [ ], + "name": "Execute Create Table", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_Create_Table_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Create Table", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Get Mapping", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(\n concat('{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"StorageAccountName\":\"', \n string(pipeline().parameters.TaskObject.Source.System.SystemServer), \n '\",\"StorageAccountContainer\":\"', \n string(pipeline().parameters.TaskObject.Source.System.Container), \n '\",\"RelativePath\":\"', \n string(pipeline().parameters.TaskObject.Source.Instance.SourceRelativePath), \n '\",\"SchemaFileName\":\"', \n string(pipeline().parameters.TaskObject.Source.SchemaFileName), \n '\",\"SourceType\":\"', \n if(\n contains(string(pipeline().parameters.TaskObject.Source.System.SystemServer),'.dfs.core.windows.net'),\n 'ADLS',\n 'Azure Blob'\n ), \n '\",\"TargetType\":\"', \n string(pipeline().parameters.TaskObject.Target.System.Type), \n '\",\"MetadataType\":\"Parquet\"}')\n)" + }, + "functionName": "GetSourceTargetMapping", + "method": "POST" + }, + "userProperties": [ ] + } + ] + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "If Auto Create Table", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "inputs": [ + { + "parameters": { + "FileName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.DataFileName" + }, + "RelativePath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.Instance.SourceRelativePath" + }, + "StorageAccountContainerName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Container" + }, + "StorageAccountEndpoint": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + } + }, + "referenceName": "[concat('GDS_AzureBlobStorage_Parquet_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "name": "Copy to SQL", + "outputs": [ + { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.Database" + }, + "Schema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.StagingTableSchema" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.System.SystemServer" + }, + "Table": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Target.StagingTableName" + } + }, + "referenceName": "[concat('GDS_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + } + ], + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Copy", + "typeProperties": { + "enableStaging": false, + "parallelCopies": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.DegreeOfCopyParallelism" + }, + "sink": { + "disableMetricsCollection": false, + "preCopyScript": { + "type": "Expression", + "value": "@{pipeline().parameters.TaskObject.Target.PreCopySQL}" + }, + "tableOption": "autoCreate", + "type": "AzureSqlSink" + }, + "source": { + "formatSettings": { + "type": "AzureBlobStorageReadSettings" + }, + "storeSettings": { + "enablePartitionDiscovery": false, + "recursive": true, + "type": "AzureBlobStorageReadSettings", + "wildcardFileName": { + "type": "Expression", + "value": "@concat(\n replace(\n pipeline().parameters.TaskObject.Source.DataFileName,\n '.parquet',\n ''\n ),\n '*.parquet'\n)" + }, + "wildcardFolderPath": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.Instance.SourceRelativePath" + } + }, + "type": "ParquetSource" + }, + "translator": { + "type": "Expression", + "value": "@if(and(not(equals(coalesce(pipeline().parameters.TaskObject.Source.SchemaFileName,''),'')),bool(pipeline().parameters.TaskObject.Target.AutoCreateTable)),activity('AF Get Mapping').output.value, null)" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy to SQL", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - ADLS to Azure SQL Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy to SQL\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Copy to SQL').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Copy to SQL", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - ADLS to Azure SQL Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Copy to SQL\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"RowsInserted\":\"', string(activity('Copy to SQL').output.rowsCopied), '\",\"Comment\":\"\",\"Status\":\"Complete\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Pipeline AF Log - ADLS to Azure SQL Succeed", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Execute AZ_SQL_Post-Copy", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_Post_Copy_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-07-29T09:43:40Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureSqlTable_NA_AzureBlobFS_Parquet.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureSqlTable_NA_AzureBlobFS_Parquet.json new file mode 100644 index 00000000..8760c73b --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureSqlTable_NA_AzureBlobFS_Parquet.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureSqlTable_NA_AzureBlobFS_Parquet_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_AzureBlobFS_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureSqlTable_NA_AzureBlobFS_Parquet_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureSqlTable_NA_AzureBlobFS_Parquet_Primary.json new file mode 100644 index 00000000..dff95779 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureSqlTable_NA_AzureBlobFS_Parquet_Primary.json @@ -0,0 +1,551 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureSqlTable_NA_AzureBlobFS_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Get Information Schema SQL", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"TableSchema\":\"', string(pipeline().parameters.TaskObject.Source.TableSchema), '\",\"TableName\":\"', string(pipeline().parameters.TaskObject.Source.TableName),'\"}'))" + }, + "functionName": "GetInformationSchemaSQL", + "method": "POST" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "AF Get Information Schema SQL", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Lookup Get SQL Metadata", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Lookup", + "typeProperties": { + "dataset": { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Database" + }, + "Schema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableSchema" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + }, + "Table": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableName" + } + }, + "referenceName": "[concat('GDS_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "firstRowOnly": false, + "source": { + "partitionOption": "None", + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@activity('AF Get Information Schema SQL').output.InformationSchemaSQL" + }, + "type": "AzureSqlSource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Lookup Get SQL Metadata", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "AF Log - Get Metadata Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Get Metadata\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Lookup Get SQL Metadata').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Lookup Get SQL Metadata", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Persist Metadata and Get Mapping", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(\n concat('{\"TaskInstanceId\":\"',\n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', string(pipeline().RunId), \n '\",\"StorageAccountName\":\"', \n string(pipeline().parameters.TaskObject.Target.System.SystemServer),\n '\",\"StorageAccountContainer\":\"', \n string(pipeline().parameters.TaskObject.Target.System.Container), \n '\",\"RelativePath\":\"', \n string(pipeline().parameters.TaskObject.Target.Instance.TargetRelativePath), \n '\",\"SchemaFileName\":\"', \n string(pipeline().parameters.TaskObject.Target.SchemaFileName), \n '\",\"SourceType\":\"', \n string(pipeline().parameters.TaskObject.Source.System.Type), \n '\",\"TargetType\":\"', \n if(\n contains(\n string(pipeline().parameters.TaskObject.Target.System.SystemServer),\n '.dfs.core.windows.net'\n ),\n 'ADLS',\n 'Azure Blob'), \n '\",\"Data\":',\n string(activity('Lookup Get SQL Metadata').output),\n ',\"MetadataType\":\"SQL\"}')\n)" + }, + "functionName": "TaskExecutionSchemaFile", + "method": "POST" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "AF Persist Metadata and Get Mapping", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Switch Load Type", + "type": "Switch", + "typeProperties": { + "cases": [ + { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Full_Load Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "BatchCount": "1", + "Item": "1", + "Mapping": { + "type": "Expression", + "value": "@activity('AF Persist Metadata and Get Mapping').output.value" + }, + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_AzureBlobFS_Parquet_Full_Load_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "value": "Full" + }, + { + "activities": [ + { + "dependsOn": [ + { + "activity": "Lookup New Watermark", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Execute Watermark Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "BatchCount": "1", + "Item": "1", + "Mapping": { + "type": "Expression", + "value": "@activity('AF Persist Metadata and Get Mapping').output.value" + }, + "NewWaterMark": { + "type": "Expression", + "value": "@activity('Lookup New Watermark').output.firstRow.newWatermark" + }, + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_AzureBlobFS_Parquet_Watermark_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Lookup New Watermark", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "0.00:30:00" + }, + "type": "Lookup", + "typeProperties": { + "dataset": { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Database" + }, + "Schema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableSchema" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + }, + "Table": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableName" + } + }, + "referenceName": "[concat('GDS_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "firstRowOnly": true, + "source": { + "partitionOption": "None", + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@{pipeline().parameters.TaskObject.Source.IncrementalSQLStatement}" + }, + "type": "AzureSqlSource" + } + }, + "userProperties": [ ] + } + ], + "value": "Watermark" + }, + { + "activities": [ + { + "dependsOn": [ + { + "activity": "Lookup Chunk", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Execute Full Load Chunk Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "BatchCount": { + "type": "Expression", + "value": "@activity('Lookup Chunk').output.firstRow.batchcount" + }, + "Mapping": { + "type": "Expression", + "value": "@activity('AF Persist Metadata and Get Mapping').output.value" + }, + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_AzureBlobFS_Parquet_Full_Load_Chunk_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Lookup Chunk", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "0.00:30:00" + }, + "type": "Lookup", + "typeProperties": { + "dataset": { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Database" + }, + "Schema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableSchema" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + }, + "Table": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableName" + } + }, + "referenceName": "[concat('GDS_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "firstRowOnly": true, + "source": { + "partitionOption": "None", + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@{pipeline().parameters.TaskObject.Source.IncrementalSQLStatement}" + }, + "type": "AzureSqlSource" + } + }, + "userProperties": [ ] + } + ], + "value": "Full_Chunk" + }, + { + "activities": [ + { + "dependsOn": [ + { + "activity": "Lookup New Watermark and Chunk", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Execute Watermark Chunk Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "BatchCount": { + "type": "Expression", + "value": "@activity('Lookup New Watermark and Chunk').output.firstRow.batchcount" + }, + "Mapping": { + "type": "Expression", + "value": "@activity('AF Persist Metadata and Get Mapping').output.value" + }, + "NewWatermark": { + "type": "Expression", + "value": "@activity('Lookup New Watermark and Chunk').output.firstRow.newWatermark" + }, + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_AzureBlobFS_Parquet_Watermark_Chunk_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Lookup New Watermark and Chunk", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "0.00:30:00" + }, + "type": "Lookup", + "typeProperties": { + "dataset": { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Database" + }, + "Schema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableSchema" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + }, + "Table": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableName" + } + }, + "referenceName": "[concat('GDS_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "firstRowOnly": true, + "source": { + "partitionOption": "None", + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@{pipeline().parameters.TaskObject.Source.IncrementalSQLStatement}" + }, + "type": "AzureSqlSource" + } + }, + "userProperties": [ ] + } + ], + "value": "Watermark_Chunk" + } + ], + "on": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.IncrementalType" + } + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-04T12:40:45Z", + "parameters": { + "TaskObject": { + "defaultValue": { + "DataFactory": { + "ADFPipeline": "AZ_SQL_AZ_Storage_Parquet_Azure", + "Id": 1, + "Name": "adsgofastdatakakeacceladf", + "ResourceGroup": "AdsGoFastDataLakeAccel", + "SubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3" + }, + "DegreeOfCopyParallelism": 1, + "Enabled": 1, + "ExecutionUid": "2c5924ee-b855-4d2b-bb7e-4f5dde4c4dd3", + "KeyVaultBaseUrl": "https://adsgofastkeyvault.vault.azure.net/", + "NumberOfRetries": 111, + "ScheduleMasterId": 2, + "Source": { + "Database": { + "AuthenticationType": "MSI", + "Name": "AWSample", + "SystemName": "adsgofastdatakakeaccelsqlsvr.database.windows.net" + }, + "Extraction": { + "FullOrIncremental": "Full", + "IncrementalType": null, + "TableName": "SalesOrderHeader", + "TableSchema": "SalesLT", + "Type": "Table" + }, + "Type": "Azure SQL" + }, + "Target": { + "DataFileName": "SalesLT.SalesOrderHeader.parquet", + "FirstRowAsHeader": null, + "MaxConcorrentConnections": null, + "RelativePath": "/AwSample/SalesLT/SalesOrderHeader/2020/7/9/14/12/", + "SchemaFileName": "SalesLT.SalesOrderHeader", + "SheetName": null, + "SkipLineCount": null, + "StorageAccountAccessMethod": "MSI", + "StorageAccountContainer": "datalakeraw", + "StorageAccountName": "https://adsgofastdatalakeaccelst.blob.core.windows.net", + "Type": "Azure Blob" + }, + "TaskGroupConcurrency": 10, + "TaskGroupPriority": 0, + "TaskInstanceId": 75, + "TaskMasterId": 12, + "TaskStatus": "Untried", + "TaskType": "SQL Database to Azure Storage" + }, + "type": "object" + } + }, + "variables": { + "SQLStatement": { + "type": "String" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureSqlTable_NA_AzureBlobStorage_Parquet.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureSqlTable_NA_AzureBlobStorage_Parquet.json new file mode 100644 index 00000000..be74a9e4 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureSqlTable_NA_AzureBlobStorage_Parquet.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureSqlTable_NA_AzureBlobStorage_Parquet_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_AzureBlobStorage_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureSqlTable_NA_AzureBlobStorage_Parquet_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureSqlTable_NA_AzureBlobStorage_Parquet_Primary.json new file mode 100644 index 00000000..a1698575 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_AzureSqlTable_NA_AzureBlobStorage_Parquet_Primary.json @@ -0,0 +1,551 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_AzureSqlTable_NA_AzureBlobStorage_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Get Information Schema SQL", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"TableSchema\":\"', string(pipeline().parameters.TaskObject.Source.TableSchema), '\",\"TableName\":\"', string(pipeline().parameters.TaskObject.Source.TableName),'\"}'))" + }, + "functionName": "GetInformationSchemaSQL", + "method": "POST" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "AF Get Information Schema SQL", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Lookup Get SQL Metadata", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Lookup", + "typeProperties": { + "dataset": { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Database" + }, + "Schema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableSchema" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + }, + "Table": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableName" + } + }, + "referenceName": "[concat('GDS_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "firstRowOnly": false, + "source": { + "partitionOption": "None", + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@activity('AF Get Information Schema SQL').output.InformationSchemaSQL" + }, + "type": "AzureSqlSource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Lookup Get SQL Metadata", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "AF Log - Get Metadata Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Get Metadata\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Lookup Get SQL Metadata').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Lookup Get SQL Metadata", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Persist Metadata and Get Mapping", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(\n concat('{\"TaskInstanceId\":\"',\n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', string(pipeline().RunId), \n '\",\"StorageAccountName\":\"', \n string(pipeline().parameters.TaskObject.Target.System.SystemServer),\n '\",\"StorageAccountContainer\":\"', \n string(pipeline().parameters.TaskObject.Target.System.Container), \n '\",\"RelativePath\":\"', \n string(pipeline().parameters.TaskObject.Target.Instance.TargetRelativePath), \n '\",\"SchemaFileName\":\"', \n string(pipeline().parameters.TaskObject.Target.SchemaFileName), \n '\",\"SourceType\":\"', \n string(pipeline().parameters.TaskObject.Source.System.Type), \n '\",\"TargetType\":\"', \n if(\n contains(\n string(pipeline().parameters.TaskObject.Target.System.SystemServer),\n '.dfs.core.windows.net'\n ),\n 'ADLS',\n 'Azure Blob'), \n '\",\"Data\":',\n string(activity('Lookup Get SQL Metadata').output),\n ',\"MetadataType\":\"SQL\"}')\n)" + }, + "functionName": "TaskExecutionSchemaFile", + "method": "POST" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "AF Persist Metadata and Get Mapping", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Switch Load Type", + "type": "Switch", + "typeProperties": { + "cases": [ + { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Full_Load Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "BatchCount": "1", + "Item": "1", + "Mapping": { + "type": "Expression", + "value": "@activity('AF Persist Metadata and Get Mapping').output.value" + }, + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_AzureBlobStorage_Parquet_Full_Load_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "value": "Full" + }, + { + "activities": [ + { + "dependsOn": [ + { + "activity": "Lookup New Watermark", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Execute Watermark Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "BatchCount": "1", + "Item": "1", + "Mapping": { + "type": "Expression", + "value": "@activity('AF Persist Metadata and Get Mapping').output.value" + }, + "NewWaterMark": { + "type": "Expression", + "value": "@activity('Lookup New Watermark').output.firstRow.newWatermark" + }, + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_AzureBlobStorage_Parquet_Watermark_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Lookup New Watermark", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "0.00:30:00" + }, + "type": "Lookup", + "typeProperties": { + "dataset": { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Database" + }, + "Schema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableSchema" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + }, + "Table": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableName" + } + }, + "referenceName": "[concat('GDS_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "firstRowOnly": true, + "source": { + "partitionOption": "None", + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@{pipeline().parameters.TaskObject.Source.IncrementalSQLStatement}" + }, + "type": "AzureSqlSource" + } + }, + "userProperties": [ ] + } + ], + "value": "Watermark" + }, + { + "activities": [ + { + "dependsOn": [ + { + "activity": "Lookup Chunk", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Execute Full Load Chunk Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "BatchCount": { + "type": "Expression", + "value": "@activity('Lookup Chunk').output.firstRow.batchcount" + }, + "Mapping": { + "type": "Expression", + "value": "@activity('AF Persist Metadata and Get Mapping').output.value" + }, + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_AzureBlobStorage_Parquet_Full_Load_Chunk_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Lookup Chunk", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "0.00:30:00" + }, + "type": "Lookup", + "typeProperties": { + "dataset": { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Database" + }, + "Schema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableSchema" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + }, + "Table": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableName" + } + }, + "referenceName": "[concat('GDS_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "firstRowOnly": true, + "source": { + "partitionOption": "None", + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@{pipeline().parameters.TaskObject.Source.IncrementalSQLStatement}" + }, + "type": "AzureSqlSource" + } + }, + "userProperties": [ ] + } + ], + "value": "Full_Chunk" + }, + { + "activities": [ + { + "dependsOn": [ + { + "activity": "Lookup New Watermark and Chunk", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Execute Watermark Chunk Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "BatchCount": { + "type": "Expression", + "value": "@activity('Lookup New Watermark and Chunk').output.firstRow.batchcount" + }, + "Mapping": { + "type": "Expression", + "value": "@activity('AF Persist Metadata and Get Mapping').output.value" + }, + "NewWatermark": { + "type": "Expression", + "value": "@activity('Lookup New Watermark and Chunk').output.firstRow.newWatermark" + }, + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_AzureSqlTable_NA_AzureBlobStorage_Parquet_Watermark_Chunk_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Lookup New Watermark and Chunk", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "0.00:30:00" + }, + "type": "Lookup", + "typeProperties": { + "dataset": { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Database" + }, + "Schema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableSchema" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + }, + "Table": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableName" + } + }, + "referenceName": "[concat('GDS_AzureSqlTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "firstRowOnly": true, + "source": { + "partitionOption": "None", + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@{pipeline().parameters.TaskObject.Source.IncrementalSQLStatement}" + }, + "type": "AzureSqlSource" + } + }, + "userProperties": [ ] + } + ], + "value": "Watermark_Chunk" + } + ], + "on": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.IncrementalType" + } + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-04T12:40:45Z", + "parameters": { + "TaskObject": { + "defaultValue": { + "DataFactory": { + "ADFPipeline": "AZ_SQL_AZ_Storage_Parquet_Azure", + "Id": 1, + "Name": "adsgofastdatakakeacceladf", + "ResourceGroup": "AdsGoFastDataLakeAccel", + "SubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3" + }, + "DegreeOfCopyParallelism": 1, + "Enabled": 1, + "ExecutionUid": "2c5924ee-b855-4d2b-bb7e-4f5dde4c4dd3", + "KeyVaultBaseUrl": "https://adsgofastkeyvault.vault.azure.net/", + "NumberOfRetries": 111, + "ScheduleMasterId": 2, + "Source": { + "Database": { + "AuthenticationType": "MSI", + "Name": "AWSample", + "SystemName": "adsgofastdatakakeaccelsqlsvr.database.windows.net" + }, + "Extraction": { + "FullOrIncremental": "Full", + "IncrementalType": null, + "TableName": "SalesOrderHeader", + "TableSchema": "SalesLT", + "Type": "Table" + }, + "Type": "Azure SQL" + }, + "Target": { + "DataFileName": "SalesLT.SalesOrderHeader.parquet", + "FirstRowAsHeader": null, + "MaxConcorrentConnections": null, + "RelativePath": "/AwSample/SalesLT/SalesOrderHeader/2020/7/9/14/12/", + "SchemaFileName": "SalesLT.SalesOrderHeader", + "SheetName": null, + "SkipLineCount": null, + "StorageAccountAccessMethod": "MSI", + "StorageAccountContainer": "datalakeraw", + "StorageAccountName": "https://adsgofastdatalakeaccelst.blob.core.windows.net", + "Type": "Azure Blob" + }, + "TaskGroupConcurrency": 10, + "TaskGroupPriority": 0, + "TaskInstanceId": 75, + "TaskMasterId": 12, + "TaskStatus": "Untried", + "TaskType": "SQL Database to Azure Storage" + }, + "type": "object" + } + }, + "variables": { + "SQLStatement": { + "type": "String" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_SqlServerTable_NA_AzureBlobFS_Parquet.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_SqlServerTable_NA_AzureBlobFS_Parquet.json new file mode 100644 index 00000000..d0a6d554 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_SqlServerTable_NA_AzureBlobFS_Parquet.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_SqlServerTable_NA_AzureBlobFS_Parquet_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_SqlServerTable_NA_AzureBlobFS_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_SqlServerTable_NA_AzureBlobFS_Parquet_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_SqlServerTable_NA_AzureBlobFS_Parquet_Primary.json new file mode 100644 index 00000000..8972156a --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_SqlServerTable_NA_AzureBlobFS_Parquet_Primary.json @@ -0,0 +1,599 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_SqlServerTable_NA_AzureBlobFS_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Get Information Schema SQL", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"TableSchema\":\"', string(pipeline().parameters.TaskObject.Source.TableSchema), '\",\"TableName\":\"', string(pipeline().parameters.TaskObject.Source.TableName),'\"}'))" + }, + "functionName": "GetInformationSchemaSQL", + "method": "POST" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "AF Get Information Schema SQL", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Lookup Get SQL Metadata", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Lookup", + "typeProperties": { + "dataset": { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Database" + }, + "KeyVaultBaseUrl": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.KeyVaultBaseUrl" + }, + "PasswordSecret": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.PasswordKeyVaultSecretName" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + }, + "TableName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableName" + }, + "TableSchema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableSchema" + }, + "UserName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Username" + } + }, + "referenceName": "[concat('GDS_SqlServerTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "firstRowOnly": false, + "source": { + "partitionOption": "None", + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@activity('AF Get Information Schema SQL').output.InformationSchemaSQL" + }, + "type": "SqlServerSource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Lookup Get SQL Metadata", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "AF Log - Get Metadata Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Get Metadata\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Lookup Get SQL Metadata').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Lookup Get SQL Metadata", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Persist Metadata and Get Mapping", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(\n concat('{\"TaskInstanceId\":\"',\n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', string(pipeline().RunId), \n '\",\"StorageAccountName\":\"', \n string(pipeline().parameters.TaskObject.Target.System.SystemServer),\n '\",\"StorageAccountContainer\":\"', \n string(pipeline().parameters.TaskObject.Target.System.Container), \n '\",\"RelativePath\":\"', \n string(pipeline().parameters.TaskObject.Target.Instance.TargetRelativePath), \n '\",\"SchemaFileName\":\"', \n string(pipeline().parameters.TaskObject.Target.SchemaFileName), \n '\",\"SourceType\":\"', \n string(pipeline().parameters.TaskObject.Source.System.Type), \n '\",\"TargetType\":\"', \n if(\n contains(\n string(pipeline().parameters.TaskObject.Target.System.SystemServer),\n '.dfs.core.windows.net'\n ),\n 'ADLS',\n 'Azure Blob'), \n '\",\"Data\":',\n string(activity('Lookup Get SQL Metadata').output),\n ',\"MetadataType\":\"SQL\"}')\n)" + }, + "functionName": "TaskExecutionSchemaFile", + "method": "POST" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "AF Persist Metadata and Get Mapping", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Switch Load Type", + "type": "Switch", + "typeProperties": { + "cases": [ + { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Full_Load Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "BatchCount": "1", + "Item": "1", + "Mapping": { + "type": "Expression", + "value": "@activity('AF Persist Metadata and Get Mapping').output.value" + }, + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_SqlServerTable_NA_AzureBlobFS_Parquet_Full_Load_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "value": "Full" + }, + { + "activities": [ + { + "dependsOn": [ + { + "activity": "Lookup New Watermark", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Execute Watermark Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "BatchCount": "1", + "Item": "1", + "Mapping": { + "type": "Expression", + "value": "@activity('AF Persist Metadata and Get Mapping').output.value" + }, + "NewWaterMark": { + "type": "Expression", + "value": "@activity('Lookup New Watermark').output.firstRow.newWatermark" + }, + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_SqlServerTable_NA_AzureBlobFS_Parquet_Watermark_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Lookup New Watermark", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "0.00:30:00" + }, + "type": "Lookup", + "typeProperties": { + "dataset": { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Database" + }, + "KeyVaultBaseUrl": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.KeyVaultBaseUrl" + }, + "PasswordSecret": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.PasswordKeyVaultSecretName" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + }, + "TableName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableName" + }, + "TableSchema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableSchema" + }, + "UserName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Username" + } + }, + "referenceName": "[concat('GDS_SqlServerTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "firstRowOnly": true, + "source": { + "partitionOption": "None", + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@{pipeline().parameters.TaskObject.Source.IncrementalSQLStatement}" + }, + "type": "SqlServerSource" + } + }, + "userProperties": [ ] + } + ], + "value": "Watermark" + }, + { + "activities": [ + { + "dependsOn": [ + { + "activity": "Lookup Chunk", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Execute Full Load Chunk Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "BatchCount": { + "type": "Expression", + "value": "@activity('Lookup Chunk').output.firstRow.batchcount" + }, + "Mapping": { + "type": "Expression", + "value": "@activity('AF Persist Metadata and Get Mapping').output.value" + }, + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_SqlServerTable_NA_AzureBlobFS_Parquet_Full_Load_Chunk_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Lookup Chunk", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "0.00:30:00" + }, + "type": "Lookup", + "typeProperties": { + "dataset": { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Database" + }, + "KeyVaultBaseUrl": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.KeyVaultBaseUrl" + }, + "PasswordSecret": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.PasswordKeyVaultSecretName" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + }, + "TableName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableName" + }, + "TableSchema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableSchema" + }, + "UserName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Username" + } + }, + "referenceName": "[concat('GDS_SqlServerTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "firstRowOnly": true, + "source": { + "partitionOption": "None", + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@{pipeline().parameters.TaskObject.Source.IncrementalSQLStatement}" + }, + "type": "SqlServerSource" + } + }, + "userProperties": [ ] + } + ], + "value": "Full_Chunk" + }, + { + "activities": [ + { + "dependsOn": [ + { + "activity": "Lookup New Watermark and Chunk", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Execute Watermark Chunk Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "BatchCount": { + "type": "Expression", + "value": "@activity('Lookup New Watermark and Chunk').output.firstRow.batchcount" + }, + "Mapping": { + "type": "Expression", + "value": "@activity('AF Persist Metadata and Get Mapping').output.value" + }, + "NewWatermark": { + "type": "Expression", + "value": "@activity('Lookup New Watermark and Chunk').output.firstRow.newWatermark" + }, + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_SqlServerTable_NA_AzureBlobFS_Parquet_Watermark_Chunk_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Lookup New Watermark and Chunk", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "0.00:30:00" + }, + "type": "Lookup", + "typeProperties": { + "dataset": { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Database" + }, + "KeyVaultBaseUrl": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.KeyVaultBaseUrl" + }, + "PasswordSecret": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.PasswordKeyVaultSecretName" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + }, + "TableName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableName" + }, + "TableSchema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableSchema" + }, + "UserName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Username" + } + }, + "referenceName": "[concat('GDS_SqlServerTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "firstRowOnly": true, + "source": { + "partitionOption": "None", + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@{pipeline().parameters.TaskObject.Source.IncrementalSQLStatement}" + }, + "type": "SqlServerSource" + } + }, + "userProperties": [ ] + } + ], + "value": "Watermark_Chunk" + } + ], + "on": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.IncrementalType" + } + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-04T12:40:45Z", + "parameters": { + "TaskObject": { + "defaultValue": { + "DataFactory": { + "ADFPipeline": "AZ_SQL_AZ_Storage_Parquet_Azure", + "Id": 1, + "Name": "adsgofastdatakakeacceladf", + "ResourceGroup": "AdsGoFastDataLakeAccel", + "SubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3" + }, + "DegreeOfCopyParallelism": 1, + "Enabled": 1, + "ExecutionUid": "2c5924ee-b855-4d2b-bb7e-4f5dde4c4dd3", + "KeyVaultBaseUrl": "https://adsgofastkeyvault.vault.azure.net/", + "NumberOfRetries": 111, + "ScheduleMasterId": 2, + "Source": { + "Database": { + "AuthenticationType": "MSI", + "Name": "AWSample", + "SystemName": "adsgofastdatakakeaccelsqlsvr.database.windows.net" + }, + "Extraction": { + "FullOrIncremental": "Full", + "IncrementalType": null, + "TableName": "SalesOrderHeader", + "TableSchema": "SalesLT", + "Type": "Table" + }, + "Type": "Azure SQL" + }, + "Target": { + "DataFileName": "SalesLT.SalesOrderHeader.parquet", + "FirstRowAsHeader": null, + "MaxConcorrentConnections": null, + "RelativePath": "/AwSample/SalesLT/SalesOrderHeader/2020/7/9/14/12/", + "SchemaFileName": "SalesLT.SalesOrderHeader", + "SheetName": null, + "SkipLineCount": null, + "StorageAccountAccessMethod": "MSI", + "StorageAccountContainer": "datalakeraw", + "StorageAccountName": "https://adsgofastdatalakeaccelst.blob.core.windows.net", + "Type": "Azure Blob" + }, + "TaskGroupConcurrency": 10, + "TaskGroupPriority": 0, + "TaskInstanceId": 75, + "TaskMasterId": 12, + "TaskStatus": "Untried", + "TaskType": "SQL Database to Azure Storage" + }, + "type": "object" + } + }, + "variables": { + "SQLStatement": { + "type": "String" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_SqlServerTable_NA_AzureBlobStorage_Parquet.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_SqlServerTable_NA_AzureBlobStorage_Parquet.json new file mode 100644 index 00000000..a6daa874 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_SqlServerTable_NA_AzureBlobStorage_Parquet.json @@ -0,0 +1,118 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_SqlServerTable_NA_AzureBlobStorage_Parquet_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Main Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_SqlServerTable_NA_AzureBlobStorage_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "Pipeline AF Log - Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"', \n string(activity('Execute Main Pipeline').error.message), \n '\",\"Status\":\"Failed\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Execute Main Pipeline", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Pipeline AF Log - Succeed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(\n concat(\n '{\"TaskInstanceId\":\"', \n string(\n pipeline().parameters.TaskObject.TaskInstanceId\n ), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', \n string(pipeline().RunId), \n '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Data-Movement-Master\",\"StartDateTimeOffSet\":\"', \n string(pipeline().TriggerTime), \n '\",\"EndDateTimeOffSet\":\"', \n string(utcnow()), \n '\",\"Comment\":\"\",\"Status\":\"Complete\",\"NumberOfRetries\":\"', \n string(pipeline().parameters.TaskObject.NumberOfRetries),\n '\"}'\n )\n)" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-06T06:27:14Z", + "parameters": { + "TaskObject": { + "type": "object" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_SqlServerTable_NA_AzureBlobStorage_Parquet_Primary.json b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_SqlServerTable_NA_AzureBlobStorage_Parquet_Primary.json new file mode 100644 index 00000000..14b31d48 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/arm/GPL_SqlServerTable_NA_AzureBlobStorage_Parquet_Primary.json @@ -0,0 +1,599 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "dataFactoryName": { + "metadata": "The name of the data factory", + "type": "String" + }, + "integrationRuntimeName": { + "metadata": "The name of the integration runtime this pipeline uses", + "type": "String" + }, + "integrationRuntimeShortName": { + "metadata": "The short name of the integration runtime this pipeline uses", + "type": "String" + }, + "sharedKeyVaultUri": { + "metadata": "The uri of the shared KeyVault", + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2018-06-01", + "name": "[concat(parameters('dataFactoryName'), '/','GPL_SqlServerTable_NA_AzureBlobStorage_Parquet_Primary_', parameters('integrationRuntimeShortName'))]", + "properties": { + "activities": [ + { + "dependsOn": [ ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Get Information Schema SQL", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"TableSchema\":\"', string(pipeline().parameters.TaskObject.Source.TableSchema), '\",\"TableName\":\"', string(pipeline().parameters.TaskObject.Source.TableName),'\"}'))" + }, + "functionName": "GetInformationSchemaSQL", + "method": "POST" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "AF Get Information Schema SQL", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Lookup Get SQL Metadata", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "Lookup", + "typeProperties": { + "dataset": { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Database" + }, + "KeyVaultBaseUrl": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.KeyVaultBaseUrl" + }, + "PasswordSecret": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.PasswordKeyVaultSecretName" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + }, + "TableName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableName" + }, + "TableSchema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableSchema" + }, + "UserName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Username" + } + }, + "referenceName": "[concat('GDS_SqlServerTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "firstRowOnly": false, + "source": { + "partitionOption": "None", + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@activity('AF Get Information Schema SQL').output.InformationSchemaSQL" + }, + "type": "SqlServerSource" + } + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Lookup Get SQL Metadata", + "dependencyConditions": [ + "Failed" + ] + } + ], + "name": "AF Log - Get Metadata Failed", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "Body": { + "type": "Expression", + "value": "@json(concat('{\"TaskInstanceId\":\"', string(pipeline().parameters.TaskObject.TaskInstanceId), '\",\"ExecutionUid\":\"', string(pipeline().parameters.TaskObject.ExecutionUid), '\",\"RunId\":\"', string(pipeline().RunId), '\",\"LogTypeId\":1,\"LogSource\":\"ADF\",\"ActivityType\":\"Get Metadata\",\"StartDateTimeOffSet\":\"', string(pipeline().TriggerTime), '\",\"EndDateTimeOffSet\":\"', string(utcnow()), '\",\"Comment\":\"', string(activity('Lookup Get SQL Metadata').error.message), '\",\"Status\":\"Failed\"}'))" + }, + "FunctionName": "Log", + "Method": "Post" + }, + "pipeline": { + "referenceName": "SPL_AzureFunction", + "type": "PipelineReference" + }, + "waitOnCompletion": false + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "Lookup Get SQL Metadata", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "linkedServiceName": { + "referenceName": "SLS_AzureFunctionApp", + "type": "LinkedServiceReference" + }, + "name": "AF Persist Metadata and Get Mapping", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "7.00:00:00" + }, + "type": "AzureFunctionActivity", + "typeProperties": { + "body": { + "type": "Expression", + "value": "@json(\n concat('{\"TaskInstanceId\":\"',\n string(pipeline().parameters.TaskObject.TaskInstanceId), \n '\",\"ExecutionUid\":\"', \n string(pipeline().parameters.TaskObject.ExecutionUid), \n '\",\"RunId\":\"', string(pipeline().RunId), \n '\",\"StorageAccountName\":\"', \n string(pipeline().parameters.TaskObject.Target.System.SystemServer),\n '\",\"StorageAccountContainer\":\"', \n string(pipeline().parameters.TaskObject.Target.System.Container), \n '\",\"RelativePath\":\"', \n string(pipeline().parameters.TaskObject.Target.Instance.TargetRelativePath), \n '\",\"SchemaFileName\":\"', \n string(pipeline().parameters.TaskObject.Target.SchemaFileName), \n '\",\"SourceType\":\"', \n string(pipeline().parameters.TaskObject.Source.System.Type), \n '\",\"TargetType\":\"', \n if(\n contains(\n string(pipeline().parameters.TaskObject.Target.System.SystemServer),\n '.dfs.core.windows.net'\n ),\n 'ADLS',\n 'Azure Blob'), \n '\",\"Data\":',\n string(activity('Lookup Get SQL Metadata').output),\n ',\"MetadataType\":\"SQL\"}')\n)" + }, + "functionName": "TaskExecutionSchemaFile", + "method": "POST" + }, + "userProperties": [ ] + }, + { + "dependsOn": [ + { + "activity": "AF Persist Metadata and Get Mapping", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Switch Load Type", + "type": "Switch", + "typeProperties": { + "cases": [ + { + "activities": [ + { + "dependsOn": [ ], + "name": "Execute Full_Load Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "BatchCount": "1", + "Item": "1", + "Mapping": { + "type": "Expression", + "value": "@activity('AF Persist Metadata and Get Mapping').output.value" + }, + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_SqlServerTable_NA_AzureBlobStorage_Parquet_Full_Load_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + } + ], + "value": "Full" + }, + { + "activities": [ + { + "dependsOn": [ + { + "activity": "Lookup New Watermark", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Execute Watermark Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "BatchCount": "1", + "Item": "1", + "Mapping": { + "type": "Expression", + "value": "@activity('AF Persist Metadata and Get Mapping').output.value" + }, + "NewWaterMark": { + "type": "Expression", + "value": "@activity('Lookup New Watermark').output.firstRow.newWatermark" + }, + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_SqlServerTable_NA_AzureBlobStorage_Parquet_Watermark_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Lookup New Watermark", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "0.00:30:00" + }, + "type": "Lookup", + "typeProperties": { + "dataset": { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Database" + }, + "KeyVaultBaseUrl": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.KeyVaultBaseUrl" + }, + "PasswordSecret": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.PasswordKeyVaultSecretName" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + }, + "TableName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableName" + }, + "TableSchema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableSchema" + }, + "UserName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Username" + } + }, + "referenceName": "[concat('GDS_SqlServerTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "firstRowOnly": true, + "source": { + "partitionOption": "None", + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@{pipeline().parameters.TaskObject.Source.IncrementalSQLStatement}" + }, + "type": "SqlServerSource" + } + }, + "userProperties": [ ] + } + ], + "value": "Watermark" + }, + { + "activities": [ + { + "dependsOn": [ + { + "activity": "Lookup Chunk", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Execute Full Load Chunk Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "BatchCount": { + "type": "Expression", + "value": "@activity('Lookup Chunk').output.firstRow.batchcount" + }, + "Mapping": { + "type": "Expression", + "value": "@activity('AF Persist Metadata and Get Mapping').output.value" + }, + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_SqlServerTable_NA_AzureBlobStorage_Parquet_Full_Load_Chunk_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Lookup Chunk", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "0.00:30:00" + }, + "type": "Lookup", + "typeProperties": { + "dataset": { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Database" + }, + "KeyVaultBaseUrl": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.KeyVaultBaseUrl" + }, + "PasswordSecret": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.PasswordKeyVaultSecretName" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + }, + "TableName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableName" + }, + "TableSchema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableSchema" + }, + "UserName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Username" + } + }, + "referenceName": "[concat('GDS_SqlServerTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "firstRowOnly": true, + "source": { + "partitionOption": "None", + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@{pipeline().parameters.TaskObject.Source.IncrementalSQLStatement}" + }, + "type": "SqlServerSource" + } + }, + "userProperties": [ ] + } + ], + "value": "Full_Chunk" + }, + { + "activities": [ + { + "dependsOn": [ + { + "activity": "Lookup New Watermark and Chunk", + "dependencyConditions": [ + "Succeeded" + ] + } + ], + "name": "Execute Watermark Chunk Pipeline", + "type": "ExecutePipeline", + "typeProperties": { + "parameters": { + "BatchCount": { + "type": "Expression", + "value": "@activity('Lookup New Watermark and Chunk').output.firstRow.batchcount" + }, + "Mapping": { + "type": "Expression", + "value": "@activity('AF Persist Metadata and Get Mapping').output.value" + }, + "NewWatermark": { + "type": "Expression", + "value": "@activity('Lookup New Watermark and Chunk').output.firstRow.newWatermark" + }, + "TaskObject": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject" + } + }, + "pipeline": { + "referenceName": "[concat('GPL_SqlServerTable_NA_AzureBlobStorage_Parquet_Watermark_Chunk_', parameters('integrationRuntimeShortName'))]", + "type": "PipelineReference" + }, + "waitOnCompletion": true + }, + "userProperties": [ ] + }, + { + "dependsOn": [ ], + "name": "Lookup New Watermark and Chunk", + "policy": { + "retry": 0, + "retryIntervalInSeconds": 30, + "secureInput": false, + "secureOutput": false, + "timeout": "0.00:30:00" + }, + "type": "Lookup", + "typeProperties": { + "dataset": { + "parameters": { + "Database": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Database" + }, + "KeyVaultBaseUrl": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.KeyVaultBaseUrl" + }, + "PasswordSecret": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.PasswordKeyVaultSecretName" + }, + "Server": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.SystemServer" + }, + "TableName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableName" + }, + "TableSchema": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.TableSchema" + }, + "UserName": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.System.Username" + } + }, + "referenceName": "[concat('GDS_SqlServerTable_NA_', parameters('integrationRuntimeShortName'))]", + "type": "DatasetReference" + }, + "firstRowOnly": true, + "source": { + "partitionOption": "None", + "queryTimeout": "02:00:00", + "sqlReaderQuery": { + "type": "Expression", + "value": "@{pipeline().parameters.TaskObject.Source.IncrementalSQLStatement}" + }, + "type": "SqlServerSource" + } + }, + "userProperties": [ ] + } + ], + "value": "Watermark_Chunk" + } + ], + "on": { + "type": "Expression", + "value": "@pipeline().parameters.TaskObject.Source.IncrementalType" + } + }, + "userProperties": [ ] + } + ], + "annotations": [ ], + "folder": { + "name": "[concat('ADS Go Fast/Data Movement/', parameters('integrationRuntimeShortName'))]" + }, + "lastPublishTime": "2020-08-04T12:40:45Z", + "parameters": { + "TaskObject": { + "defaultValue": { + "DataFactory": { + "ADFPipeline": "AZ_SQL_AZ_Storage_Parquet_Azure", + "Id": 1, + "Name": "adsgofastdatakakeacceladf", + "ResourceGroup": "AdsGoFastDataLakeAccel", + "SubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3" + }, + "DegreeOfCopyParallelism": 1, + "Enabled": 1, + "ExecutionUid": "2c5924ee-b855-4d2b-bb7e-4f5dde4c4dd3", + "KeyVaultBaseUrl": "https://adsgofastkeyvault.vault.azure.net/", + "NumberOfRetries": 111, + "ScheduleMasterId": 2, + "Source": { + "Database": { + "AuthenticationType": "MSI", + "Name": "AWSample", + "SystemName": "adsgofastdatakakeaccelsqlsvr.database.windows.net" + }, + "Extraction": { + "FullOrIncremental": "Full", + "IncrementalType": null, + "TableName": "SalesOrderHeader", + "TableSchema": "SalesLT", + "Type": "Table" + }, + "Type": "Azure SQL" + }, + "Target": { + "DataFileName": "SalesLT.SalesOrderHeader.parquet", + "FirstRowAsHeader": null, + "MaxConcorrentConnections": null, + "RelativePath": "/AwSample/SalesLT/SalesOrderHeader/2020/7/9/14/12/", + "SchemaFileName": "SalesLT.SalesOrderHeader", + "SheetName": null, + "SkipLineCount": null, + "StorageAccountAccessMethod": "MSI", + "StorageAccountContainer": "datalakeraw", + "StorageAccountName": "https://adsgofastdatalakeaccelst.blob.core.windows.net", + "Type": "Azure Blob" + }, + "TaskGroupConcurrency": 10, + "TaskGroupPriority": 0, + "TaskInstanceId": 75, + "TaskMasterId": 12, + "TaskStatus": "Untried", + "TaskType": "SQL Database to Azure Storage" + }, + "type": "object" + } + }, + "variables": { + "SQLStatement": { + "type": "String" + } + } + }, + "type": "Microsoft.DataFactory/factories/pipelines" + } + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/main.tf b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/main.tf new file mode 100644 index 00000000..8369a941 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/main.tf @@ -0,0 +1,81 @@ +resource "azurerm_resource_group_template_deployment" "sh_pipelines_level_0" { + for_each = { + for pipeline in fileset(path.module, "arm/GPL0_*.json"): + pipeline => pipeline + } + name = substr(sha256("${replace(replace(each.value, ".json", ""), "arm/", "")}_${var.integration_runtime_short_name}_${var.name_suffix}"), 0,30) + resource_group_name = var.resource_group_name + deployment_mode = "Incremental" + parameters_content = jsonencode({ + "dataFactoryName" = { + value = var.data_factory_name + }, + "integrationRuntimeName" = { + value = var.integration_runtime_name + }, + "integrationRuntimeShortName" = { + value = var.integration_runtime_short_name + }, + "sharedKeyVaultUri" = { + value = var.shared_keyvault_uri + }, + }) + template_content = file("${path.module}/${each.value}") +} + +resource "azurerm_resource_group_template_deployment" "sh_pipelines_level_1" { + for_each = { + for pipeline in fileset(path.module, "arm/GPL1_*.json"): + pipeline => pipeline + } + name = substr(sha256("${replace(replace(each.value, ".json", ""), "arm/", "")}_${var.integration_runtime_short_name}_${var.name_suffix}"), 0,30) + resource_group_name = var.resource_group_name + deployment_mode = "Incremental" + parameters_content = jsonencode({ + "dataFactoryName" = { + value = var.data_factory_name + }, + "integrationRuntimeName" = { + value = var.integration_runtime_name + }, + "integrationRuntimeShortName" = { + value = var.integration_runtime_short_name + }, + "sharedKeyVaultUri" = { + value = var.shared_keyvault_uri + }, + }) + template_content = file("${path.module}/${each.value}") + depends_on = [ + azurerm_resource_group_template_deployment.sh_pipelines_level_0 + ] +} + +resource "azurerm_resource_group_template_deployment" "sh_pipelines" { + for_each = { + for pipeline in fileset(path.module, "arm/GPL_*.json"): + pipeline => pipeline + } + name = substr(sha256("${replace(replace(each.value, ".json", ""), "arm/", "")}_${var.integration_runtime_short_name}_${var.name_suffix}"), 0,30) + resource_group_name = var.resource_group_name + deployment_mode = "Incremental" + parameters_content = jsonencode({ + "dataFactoryName" = { + value = var.data_factory_name + }, + "integrationRuntimeName" = { + value = var.integration_runtime_name + }, + "integrationRuntimeShortName" = { + value = var.integration_runtime_short_name + }, + "sharedKeyVaultUri" = { + value = var.shared_keyvault_uri + }, + }) + template_content = file("${path.module}/${each.value}") + depends_on = [ + azurerm_resource_group_template_deployment.sh_pipelines_level_0, + azurerm_resource_group_template_deployment.sh_pipelines_level_1 + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/outputs.tf b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/outputs.tf new file mode 100644 index 00000000..e69de29b diff --git a/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/vars.tf b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/vars.tf new file mode 100644 index 00000000..548a6dc9 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/data_factory_pipelines_selfhosted/vars.tf @@ -0,0 +1,28 @@ +variable "resource_group_name" { + description = "The name of the resource group to deploy into" + type = string +} + +variable "data_factory_name" { + description = "The name of the data factory" + type = string +} +variable "shared_keyvault_uri" { + description = "The uri of the shared keyvault" + type = string +} + + +variable "integration_runtime_name" { + description = "The name of the integration runtime" + type = string +} + +variable "integration_runtime_short_name" { + description = "The short name of the integration runtime" + type = string +} +variable "name_suffix" { + description = "Used to give resource group deployments unique names for an environment" + type = string +} \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer2/modules/purview_ingestion_private_endpoints/arm/privatelinks.json b/solution/DeploymentV2/terraform_layer2/modules/purview_ingestion_private_endpoints/arm/privatelinks.json new file mode 100644 index 00000000..5ffeaed2 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/purview_ingestion_private_endpoints/arm/privatelinks.json @@ -0,0 +1,355 @@ +{ + "$schema": "http://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "subscriptionId" : { + "type" : "string" + }, + "purviewAccountName": { + "type": "String" + }, + "location": { + "type": "String" + }, + "queuePrivateLinkName": { + "type": "String" + }, + "storagePrivateLinkName": { + "type": "String" + }, + "eventHubPrivateLinkName": { + "type": "String" + }, + "resourceGroupName" : { + "type" : "string" + }, + "managedResourceGroupName": { + "type": "String" + }, + "subnetId": { + "type": "String" + }, + "queueDnsId": { + "type": "String" + }, + "storageDnsId": { + "type": "String" + }, + "serviceBusDnsId": { + "type": "String" + } + }, + "variables": {}, + "resources": [ + { + "type": "Microsoft.Purview/accounts", + "apiVersion": "2021-07-01", + "name": "[parameters('purviewAccountName')]", + "location": "[parameters('location')]", + "dependsOn": [], + "tags": {}, + "sku": { + "name": "Standard", + "capacity": 1 + }, + "identity": { + "type": "SystemAssigned" + }, + "properties": { + "managedResourceGroupName": "[parameters('managedResourceGroupName')]" + } + }, + { + "type": "Microsoft.Resources/deployments", + "apiVersion": "2017-05-10", + "name": "blob_1df2c831-46e3-41fd-831f-9c8ede5a9040", + "dependsOn": [], + "properties": { + "mode": "Incremental", + "parameters": {}, + "template": { + "$schema": "http://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": {}, + "variables": {}, + "resources": [ + { + "apiVersion": "2020-03-01", + "name": "purview-blob", + "type": "Microsoft.Network/privateEndpoints", + "location": "[parameters('location')]", + "properties": { + "privateLinkServiceConnections": [ + { + "name": "purview-blob", + "properties": { + "privateLinkServiceId": "[reference(parameters('purviewAccountName')).managedResources.storageAccount]", + "groupIds": [ + "blob" + ] + } + } + ], + "subnet": { + "id": "[parameters('subnetId')]" + } + }, + "tags": {} + } + ], + "outputs": {} + } + }, + "resources": [], + "subscriptionId": "[parameters('subscriptionId')]", + "resourceGroup": "[parameters('resourceGroupName')]" + }, + { + "type": "Microsoft.Resources/deployments", + "apiVersion": "2017-05-10", + "name": "queue_1df2c831-46e3-41fd-831f-9c8ede5a9043", + "dependsOn": [], + "properties": { + "mode": "Incremental", + "parameters": {}, + "template": { + "$schema": "http://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": {}, + "variables": {}, + "resources": [ + { + "apiVersion": "2020-03-01", + "name": "purview-queue", + "type": "Microsoft.Network/privateEndpoints", + "location": "[parameters('location')]", + "properties": { + "privateLinkServiceConnections": [ + { + "name": "purview-queue", + "properties": { + "privateLinkServiceId": "[reference(parameters('purviewAccountName')).managedResources.storageAccount]", + "groupIds": [ + "queue" + ] + } + } + ], + "subnet": { + "id": "[parameters('subnetId')]" + } + }, + "tags": {} + } + ], + "outputs": {} + } + }, + "resources": [], + "subscriptionId": "[parameters('subscriptionId')]", + "resourceGroup": "[parameters('resourceGroupName')]" + }, + { + "type": "Microsoft.Resources/deployments", + "apiVersion": "2017-05-10", + "name": "namespace_1df2c831-46e3-41fd-831f-9c8ede5a9046", + "dependsOn": [], + "properties": { + "mode": "Incremental", + "parameters": {}, + "template": { + "$schema": "http://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": {}, + "variables": {}, + "resources": [ + { + "apiVersion": "2020-03-01", + "name": "purview-namespace", + "type": "Microsoft.Network/privateEndpoints", + "location": "[parameters('location')]", + "properties": { + "privateLinkServiceConnections": [ + { + "name": "purview-namespace", + "properties": { + "privateLinkServiceId": "[reference(parameters('purviewAccountName')).managedResources.eventHubNamespace]", + "groupIds": [ + "namespace" + ] + } + } + ], + "subnet": { + "id": "[parameters('subnetId')]" + } + }, + "tags": {} + } + ], + "outputs": {} + } + }, + "resources": [], + "subscriptionId": "[parameters('subscriptionId')]", + "resourceGroup": "[parameters('resourceGroupName')]" + }, + { + "type": "Microsoft.Resources/deployments", + "apiVersion": "2017-05-10", + "name": "PrivateDns-blob-1df2c83146e341fd831f9c8ede5a9041", + "dependsOn": [ + "blob_1df2c831-46e3-41fd-831f-9c8ede5a9040" + ], + "properties": { + "mode": "Incremental", + "template": { + "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "resources": [ + { + "apiVersion": "2017-05-10", + "name": "DnsZoneGroup-1df2c83146e341fd831f9c8ede5a9041", + "type": "Microsoft.Resources/deployments", + "resourceGroup": "[parameters('resourceGroupName')]", + "subscriptionId": "[parameters('subscriptionId')]", + "dependsOn": [], + "properties": { + "mode": "Incremental", + "template": { + "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "resources": [ + { + "type": "Microsoft.Network/privateEndpoints/privateDnsZoneGroups", + "apiVersion": "2020-03-01", + "name": "[concat('purview-blob', '/', 'default')]", + "location": "[parameters('location')]", + "properties": { + "privateDnsZoneConfigs": [ + { + "name": "privatelink-blob-core-windows-net", + "properties": { + "privateDnsZoneId": "[parameters('storageDnsId')]" + } + } + ] + } + } + ] + } + } + } + ] + } + }, + "subscriptionId": "[parameters('subscriptionId')]", + "resourceGroup": "[parameters('resourceGroupName')]" + }, + { + "type": "Microsoft.Resources/deployments", + "apiVersion": "2017-05-10", + "name": "PrivateDns-queue-1df2c83146e341fd831f9c8ede5a9044", + "dependsOn": [ + "queue_1df2c831-46e3-41fd-831f-9c8ede5a9043" + ], + "properties": { + "mode": "Incremental", + "template": { + "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "resources": [ + { + "apiVersion": "2017-05-10", + "name": "DnsZoneGroup-1df2c83146e341fd831f9c8ede5a9044", + "type": "Microsoft.Resources/deployments", + "resourceGroup": "[parameters('resourceGroupName')]", + "subscriptionId": "[parameters('subscriptionId')]", + "dependsOn": [], + "properties": { + "mode": "Incremental", + "template": { + "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "resources": [ + { + "type": "Microsoft.Network/privateEndpoints/privateDnsZoneGroups", + "apiVersion": "2020-03-01", + "name": "[concat('purview-queue', '/', 'default')]", + "location": "[parameters('location')]", + "properties": { + "privateDnsZoneConfigs": [ + { + "name": "privatelink-queue-core-windows-net", + "properties": { + "privateDnsZoneId": "[parameters('queueDnsId')]" + } + } + ] + } + } + ] + } + } + } + ] + } + }, + "subscriptionId": "[parameters('subscriptionId')]", + "resourceGroup": "[parameters('resourceGroupName')]" + }, + { + "type": "Microsoft.Resources/deployments", + "apiVersion": "2017-05-10", + "name": "PrivateDns-servicebus-1df2c83146e341fd831f9c8ede5a9047", + "dependsOn": [ + "namespace_1df2c831-46e3-41fd-831f-9c8ede5a9046" + ], + "properties": { + "mode": "Incremental", + "template": { + "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "resources": [ + { + "apiVersion": "2017-05-10", + "name": "DnsZoneGroup-1df2c83146e341fd831f9c8ede5a9047", + "type": "Microsoft.Resources/deployments", + "resourceGroup": "[parameters('resourceGroupName')]", + "subscriptionId": "[parameters('subscriptionId')]", + "dependsOn": [], + "properties": { + "mode": "Incremental", + "template": { + "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "resources": [ + { + "type": "Microsoft.Network/privateEndpoints/privateDnsZoneGroups", + "apiVersion": "2020-03-01", + "name": "[concat('purview-namespace', '/', 'default')]", + "location": "[parameters('location')]", + "properties": { + "privateDnsZoneConfigs": [ + { + "name": "privatelink-servicebus-windows-net", + "properties": { + "privateDnsZoneId": "[parameters('serviceBusDnsId')]" + } + } + ] + } + } + ] + } + } + } + ] + } + }, + "subscriptionId": "[parameters('subscriptionId')]", + "resourceGroup": "[parameters('resourceGroupName')]" + } + ], + "outputs": {} +} \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer2/modules/purview_ingestion_private_endpoints/main.tf b/solution/DeploymentV2/terraform_layer2/modules/purview_ingestion_private_endpoints/main.tf new file mode 100644 index 00000000..b333a712 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/purview_ingestion_private_endpoints/main.tf @@ -0,0 +1,44 @@ +resource "azurerm_resource_group_template_deployment" "ingestion_private_endpoints" { + name = "purview_ingestion_private_endpoints_${var.name_suffix}" + resource_group_name = var.resource_group_name + deployment_mode = "Incremental" + parameters_content = jsonencode({ + "purviewAccountName" = { + value = var.purview_account_name + }, + "subscriptionId" = { + value = var.subscription_id + }, + "location" = { + value = var.resource_location + }, + "queuePrivateLinkName" = { + value = var.queue_privatelink_name + }, + "storagePrivateLinkName" = { + value = var.storage_privatelink_name + }, + "eventHubPrivateLinkName" = { + value = var.eventhub_privatelink_name + }, + "subnetId" = { + value = var.subnet_id + }, + "managedResourceGroupName" = { + value = var.managed_resource_group_name + }, + "resourceGroupName" = { + value = var.resource_group_name + }, + "queueDnsId" = { + value = var.queue_private_dns_id + }, + "storageDnsId" = { + value = var.blob_private_dns_id + }, + "serviceBusDnsId" = { + value = var.servicebus_private_dns_id + } + }) + template_content = file("${path.module}/arm/privatelinks.json") +} diff --git a/solution/DeploymentV2/terraform_layer2/modules/purview_ingestion_private_endpoints/outputs.tf b/solution/DeploymentV2/terraform_layer2/modules/purview_ingestion_private_endpoints/outputs.tf new file mode 100644 index 00000000..e69de29b diff --git a/solution/DeploymentV2/terraform_layer2/modules/purview_ingestion_private_endpoints/vars.tf b/solution/DeploymentV2/terraform_layer2/modules/purview_ingestion_private_endpoints/vars.tf new file mode 100644 index 00000000..59c213f7 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/modules/purview_ingestion_private_endpoints/vars.tf @@ -0,0 +1,62 @@ +variable "resource_group_name" { + description = "The name of the resource group to deploy into" + type = string +} + +variable "purview_account_name" { + description = "The name of the data factory" + type = string +} +variable "resource_location" { + description = "The uri of the shared keyvault" + type = string +} + +variable "subscription_id" { + description = "The Id of the azure sub" + type = string +} + +variable "queue_privatelink_name" { + description = "The name of the queue private link" + type = string +} + +variable "storage_privatelink_name" { + description = "The name of the storage private link" + type = string +} + +variable "eventhub_privatelink_name" { + description = "The name of the eventhub private link" + type = string +} + +variable "queue_private_dns_id" { + description = "The id of the queue private DNS" + type = string +} + +variable "blob_private_dns_id" { + description = "The id of the queue private DNS" + type = string +} + +variable "servicebus_private_dns_id" { + description = "The id of the queue private DNS" + type = string +} + +variable "subnet_id" { + description = "The id of the subnet to attach the purview ingestion resources" + type = string +} +variable "managed_resource_group_name" { + description = "The name of the purview managed resource group" + type = string +} +variable "name_suffix" { + description = "Used to give resource group deployments unique names for an environment" + type = string +} + diff --git a/solution/DeploymentV2/terraform_layer2/nsg_app_service.tf b/solution/DeploymentV2/terraform_layer2/nsg_app_service.tf new file mode 100644 index 00000000..2c0c2421 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/nsg_app_service.tf @@ -0,0 +1,53 @@ +resource "azurerm_network_security_group" "app_service_nsg" { + count = (var.is_vnet_isolated && var.existing_app_service_subnet_id == "" ? 1 : 0) + name = local.app_service_nsg_name + location = var.resource_location + resource_group_name = var.resource_group_name + + tags = local.tags + lifecycle { + ignore_changes = [ + tags + ] + } +} + +# start inbound rules +resource "azurerm_network_security_rule" "app_service_in_deny_all" { + count = (var.is_vnet_isolated && var.existing_app_service_subnet_id == "" ? 1 : 0) + name = "app_service_in_deny_all" + priority = 110 + direction = "Inbound" + access = "Deny" + protocol = "*" + + source_port_range = "*" + source_address_prefix = "*" + + destination_port_range = "*" + destination_address_prefix = "*" + + resource_group_name = var.resource_group_name + network_security_group_name = azurerm_network_security_group.app_service_nsg[0].name + + depends_on = [ + azurerm_network_security_group.app_service_nsg[0], + ] +} +# end Inbound rules + +# start outbound rules + +# association +resource "azurerm_subnet_network_security_group_association" "app_service_nsg" { + count = (var.is_vnet_isolated && var.existing_app_service_subnet_id == "" ? 1 : 0) + subnet_id = local.app_service_subnet_id + network_security_group_id = azurerm_network_security_group.app_service_nsg[0].id + timeouts {} + # The subnet will refuse to accept the NSG if it's not this exact + # list so we need to ensure the rules are deployed before the association + depends_on = [ + azurerm_network_security_rule.app_service_in_deny_all[0], + azurerm_subnet.app_service_subnet[0], + ] +} diff --git a/solution/DeploymentV2/terraform_layer2/nsg_bastion.tf b/solution/DeploymentV2/terraform_layer2/nsg_bastion.tf new file mode 100644 index 00000000..55a2f5be --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/nsg_bastion.tf @@ -0,0 +1,207 @@ +resource "azurerm_network_security_group" "bastion_nsg" { + count = (var.is_vnet_isolated && var.existing_bastion_subnet_id == "" ? 1 : 0) + name = local.bastion_nsg_name + location = var.resource_location + resource_group_name = var.resource_group_name + tags = local.tags + lifecycle { + ignore_changes = [ + tags + ] + } +} + +# Inbound Rules +resource "azurerm_network_security_rule" "bastion_inbound_internet" { + count = (var.is_vnet_isolated && var.existing_bastion_subnet_id == "" ? 1 : 0) + name = "inbound_internet_allow" + priority = 100 + direction = "Inbound" + access = "Allow" + protocol = "*" + + source_port_range = "*" + source_address_prefix = "Internet" + + destination_port_range = "443" + destination_address_prefix = "*" + + resource_group_name = var.resource_group_name + network_security_group_name = azurerm_network_security_group.bastion_nsg[0].name + + depends_on = [ + azurerm_network_security_group.bastion_nsg[0], + ] +} +resource "azurerm_network_security_rule" "bastion_inbound_control_plane" { + count = (var.is_vnet_isolated && var.existing_bastion_subnet_id == "" ? 1 : 0) + name = "inbound_control_plane_allow" + priority = 110 + direction = "Inbound" + access = "Allow" + protocol = "*" + + source_port_range = "*" + source_address_prefix = "GatewayManager" + + destination_port_range = "443" + destination_address_prefix = "*" + + resource_group_name = var.resource_group_name + network_security_group_name = azurerm_network_security_group.bastion_nsg[0].name + + depends_on = [ + azurerm_network_security_group.bastion_nsg[0], + ] +} +resource "azurerm_network_security_rule" "bastion_inbound_data_plane" { + count = (var.is_vnet_isolated && var.existing_bastion_subnet_id == "" ? 1 : 0) + name = "inbound_data_plane_allow" + priority = 120 + direction = "Inbound" + access = "Allow" + protocol = "*" + + source_port_range = "*" + source_address_prefix = "VirtualNetwork" + + destination_port_ranges = ["8080", "5701"] + destination_address_prefix = "*" + + resource_group_name = var.resource_group_name + network_security_group_name = azurerm_network_security_group.bastion_nsg[0].name + + depends_on = [ + azurerm_network_security_group.bastion_nsg[0], + ] +} +resource "azurerm_network_security_rule" "bastion_inbound_load_balancer" { + count = (var.is_vnet_isolated && var.existing_bastion_subnet_id == "" ? 1 : 0) + name = "inbound_load_balancer_allow" + priority = 130 + direction = "Inbound" + access = "Allow" + protocol = "*" + + source_port_range = "*" + source_address_prefix = "AzureLoadBalancer" + + destination_port_range = "443" + destination_address_prefix = "*" + + resource_group_name = var.resource_group_name + network_security_group_name = azurerm_network_security_group.bastion_nsg[0].name + + depends_on = [ + azurerm_network_security_group.bastion_nsg[0], + ] +} +#-------------------------------------------------------------------------------------------------------- +# Outbound Rules +resource "azurerm_network_security_rule" "bastion_outbound_bastion_vms" { + count = (var.is_vnet_isolated && var.existing_bastion_subnet_id == "" ? 1 : 0) + name = "outbound_bastion_vnet_allow" + priority = 110 + direction = "Outbound" + access = "Allow" + protocol = "*" + + source_port_range = "*" + source_address_prefix = "*" + + destination_port_ranges = ["3389", "22"] + destination_address_prefix = "VirtualNetwork" + + resource_group_name = var.resource_group_name + network_security_group_name = azurerm_network_security_group.bastion_nsg[0].name + + depends_on = [ + azurerm_network_security_group.bastion_nsg[0], + ] +} +resource "azurerm_network_security_rule" "bastion_outbound_dataplane" { + count = (var.is_vnet_isolated && var.existing_bastion_subnet_id == "" ? 1 : 0) + name = "bastion_outbound_dataplane_allow" + priority = 120 + direction = "Outbound" + access = "Allow" + protocol = "*" + + source_port_range = "*" + source_address_prefix = "*" + + destination_port_ranges = ["8080", "5701"] + destination_address_prefix = "VirtualNetwork" + + resource_group_name = var.resource_group_name + network_security_group_name = azurerm_network_security_group.bastion_nsg[0].name + + depends_on = [ + azurerm_network_security_group.bastion_nsg[0], + ] +} +resource "azurerm_network_security_rule" "bastion_outbound_azure" { + count = (var.is_vnet_isolated && var.existing_bastion_subnet_id == "" ? 1 : 0) + name = "bastion_outbound_azure_allow" + priority = 130 + direction = "Outbound" + access = "Allow" + protocol = "*" + + source_port_range = "*" + source_address_prefix = "*" + + destination_port_range = "443" + destination_address_prefix = "AzureCloud" + + resource_group_name = var.resource_group_name + network_security_group_name = azurerm_network_security_group.bastion_nsg[0].name + + depends_on = [ + azurerm_network_security_group.bastion_nsg[0], + ] +} +resource "azurerm_network_security_rule" "bastion_outbound_internet" { + count = (var.is_vnet_isolated && var.existing_bastion_subnet_id == "" ? 1 : 0) + name = "bastion_outbound_internet_allow" + priority = 140 + direction = "Outbound" + access = "Allow" + protocol = "*" + + source_port_range = "*" + source_address_prefix = "*" + + destination_port_range = "80" + destination_address_prefix = "Internet" + + resource_group_name = var.resource_group_name + network_security_group_name = azurerm_network_security_group.bastion_nsg[0].name + + depends_on = [ + azurerm_network_security_group.bastion_nsg[0], + ] +} + +# Associate NSG with subnet + +resource "azurerm_subnet_network_security_group_association" "bastion_nsg" { + count = (var.is_vnet_isolated && var.existing_bastion_subnet_id == "" ? 1 : 0) + subnet_id = local.bastion_subnet_id + network_security_group_id = azurerm_network_security_group.bastion_nsg[0].id + + # The subnet will refuse to accept the NSG if it's not this exact + # list so we need to ensure the rules are deployed before the association + depends_on = [ + azurerm_network_security_rule.bastion_inbound_internet[0], + azurerm_network_security_rule.bastion_inbound_control_plane[0], + azurerm_network_security_rule.bastion_inbound_data_plane[0], + azurerm_network_security_rule.bastion_inbound_load_balancer[0], + azurerm_network_security_rule.bastion_outbound_bastion_vms[0], + azurerm_network_security_rule.bastion_outbound_dataplane[0], + azurerm_network_security_rule.bastion_outbound_azure[0], + azurerm_network_security_rule.bastion_outbound_internet[0], + azurerm_subnet.bastion_subnet[0], + ] + timeouts {} +} diff --git a/solution/DeploymentV2/terraform_layer2/nsg_plink.tf b/solution/DeploymentV2/terraform_layer2/nsg_plink.tf new file mode 100644 index 00000000..f50b6b4e --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/nsg_plink.tf @@ -0,0 +1,53 @@ +resource "azurerm_network_security_group" "plink_nsg" { + count = (var.is_vnet_isolated && var.existing_plink_subnet_id == "" ? 1 : 0) + name = local.plink_nsg_name + location = var.resource_location + resource_group_name = var.resource_group_name + tags = local.tags + lifecycle { + ignore_changes = [ + tags + ] + } +} + +# Inbound Rules + +# Outbound Rules +resource "azurerm_network_security_rule" "plink_out_deny_all" { + count = (var.is_vnet_isolated && var.existing_plink_subnet_id == "" ? 1 : 0) + name = "plink_out_deny_all" + priority = 110 + direction = "Outbound" + access = "Deny" + protocol = "*" + + source_port_range = "*" + source_address_prefix = "*" + + destination_port_range = "*" + destination_address_prefix = "*" + + resource_group_name = var.resource_group_name + network_security_group_name = azurerm_network_security_group.plink_nsg[0].name + + depends_on = [ + azurerm_network_security_group.plink_nsg[0], + ] +} + +# Associate NSG with subnet + +resource "azurerm_subnet_network_security_group_association" "plink_nsg" { + count = (var.is_vnet_isolated && var.existing_plink_subnet_id == "" ? 1 : 0) + subnet_id = local.plink_subnet_id + network_security_group_id = azurerm_network_security_group.plink_nsg[0].id + + # The subnet will refuse to accept the NSG if it's not this exact + # list so we need to ensure the rules are deployed before the association + depends_on = [ + azurerm_network_security_rule.plink_out_deny_all[0], + azurerm_subnet.plink_subnet[0], + ] + timeouts {} +} diff --git a/solution/DeploymentV2/terraform_layer2/nsg_vms.tf b/solution/DeploymentV2/terraform_layer2/nsg_vms.tf new file mode 100644 index 00000000..76ec79ec --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/nsg_vms.tf @@ -0,0 +1,51 @@ +resource "azurerm_network_security_group" "vm_nsg" { + count = (var.is_vnet_isolated && var.existing_vm_subnet_id == "" ? 1 : 0) + name = local.vm_nsg_name + location = var.resource_location + resource_group_name = var.resource_group_name + tags = local.tags + lifecycle { + ignore_changes = [ + tags + ] + } +} + +# Inbound Rules +resource "azurerm_network_security_rule" "vm_inbound_bastion" { + count = (var.is_vnet_isolated && var.existing_vm_subnet_id == "" ? 1 : 0) + name = "inbound_bastion_allow" + priority = 110 + direction = "Inbound" + access = "Allow" + protocol = "*" + + source_port_range = "*" + source_address_prefix = var.bastion_subnet_cidr + + destination_port_ranges = ["22", "3389"] + destination_address_prefix = "*" + + resource_group_name = var.resource_group_name + network_security_group_name = azurerm_network_security_group.vm_nsg[0].name + + depends_on = [ + azurerm_network_security_group.vm_nsg[0], + ] +} +# Outbound Rules + +# Associate NSG with subnet + +resource "azurerm_subnet_network_security_group_association" "vm_nsg" { + count = (var.is_vnet_isolated && var.existing_vm_subnet_id == "" ? 1 : 0) + subnet_id = local.vm_subnet_id + network_security_group_id = azurerm_network_security_group.vm_nsg[0].id + + # The subnet will refuse to accept the NSG if it's not this exact + # list so we need to ensure the rules are deployed before the association + depends_on = [ + azurerm_subnet.vm_subnet[0], + ] + timeouts {} +} diff --git a/solution/DeploymentV2/terraform_layer2/outputs.tf b/solution/DeploymentV2/terraform_layer2/outputs.tf index 21e32368..1b140392 100644 --- a/solution/DeploymentV2/terraform_layer2/outputs.tf +++ b/solution/DeploymentV2/terraform_layer2/outputs.tf @@ -6,6 +6,266 @@ output "resource_group_name" { value = var.resource_group_name } +output "webapp_name" { + value = local.webapp_name +} + +output "functionapp_name" { + value = local.functionapp_name +} + +output "sqlserver_name" { + value = local.sql_server_name +} + +output "blobstorage_name" { + value = local.blob_storage_account_name +} + +output "adlsstorage_name" { + value = local.adls_storage_account_name +} + +output "datafactory_name" { + value = local.data_factory_name +} + +output "datafactory_principal_id" { + value = var.deploy_data_factory ? azurerm_data_factory.data_factory[0].identity[0].principal_id : "" +} + +output "function_app_principal_id" { + value = var.deploy_function_app ? azurerm_function_app.function_app[0].identity[0].principal_id : "" +} + +output "azurerm_key_vault_app_vault_id" { + value = azurerm_key_vault.app_vault.id +} + +output "keyvault_name" { + value = local.key_vault_name +} + +output "stagingdb_name" { + value = local.staging_database_name +} + +output "sampledb_name" { + value = local.sample_database_name +} + +output "metadatadb_name" { + value = local.metadata_database_name +} +output "loganalyticsworkspace_id" { + value = local.log_analytics_workspace_id +} +output "subscription_id" { + value = var.subscription_id +} +output "purview_name" { + value = local.purview_name +} +output "purview_sp_name" { + value = local.purview_ir_app_reg_name +} +output "is_vnet_isolated" { + value = var.is_vnet_isolated +} +output "aad_webreg_id" { + value = data.terraform_remote_state.layer1.outputs.aad_webreg_id +} output "aad_funcreg_id" { - value = data.terraform_remote_state.layer1.outputs.aad_funcreg_id + value = data.terraform_remote_state.layer1.outputs.aad_funcreg_id +} +output "purview_sp_id" { + value = var.deploy_purview && var.is_vnet_isolated ? azuread_application.purview_ir[0].application_id : "0" +} +output "integration_runtimes" { + value = local.integration_runtimes +} +output "is_onprem_datafactory_ir_registered" { + value = var.is_onprem_datafactory_ir_registered +} + +output "jumphost_vm_name" { + value = local.jumphost_vm_name +} + +output "deploy_web_app" { + value = var.deploy_web_app +} +output "deploy_function_app" { + value = var.deploy_function_app +} + +output "publish_web_app" { + value = var.publish_web_app +} +output "publish_function_app" { + value = var.publish_function_app +} +output "deploy_custom_terraform" { + value = var.deploy_custom_terraform +} +output "publish_sample_files" { + value = var.publish_sample_files +} +output "publish_metadata_database" { + value = var.publish_metadata_database +} +output "publish_sql_logins" { + value = var.publish_sql_logins +} +output "publish_functional_tests" { + value = var.publish_functional_tests +} +output "publish_purview_configuration" { + value = var.publish_purview_configuration +} +output "deploy_sql_server" { + value = var.deploy_sql_server +} +output "deploy_synapse" { + value = var.deploy_synapse +} +output "deploy_metadata_database" { + value = var.deploy_metadata_database +} +output "configure_networking" { + value = var.configure_networking +} +output "publish_datafactory_pipelines" { + value = var.publish_datafactory_pipelines +} +output "publish_web_app_addcurrentuserasadmin" { + value = var.publish_web_app_addcurrentuserasadmin +} +output "synapse_workspace_name" { + value = var.deploy_synapse ? azurerm_synapse_workspace.synapse[0].name : "" +} +output "synapse_sql_pool_name" { + value = var.deploy_synapse && var.deploy_synapse_sqlpool ? azurerm_synapse_sql_pool.synapse_sql_pool[0].name : "" +} +output "synapse_spark_pool_name" { + value = var.deploy_synapse && var.deploy_synapse_sparkpool ? azurerm_synapse_spark_pool.synapse_spark_pool[0].name : "" +} +output "selfhostedsqlvm_name" { + value = local.selfhostedsqlvm_name +} + +output "synapse_git_toggle_integration" { + value = var.synapse_git_toggle_integration +} +output "synapse_git_integration_type" { + value = var.synapse_git_toggle_integration ? var.synapse_git_integration_type : "" +} +output "synapse_git_repository_root_folder" { + value = var.synapse_git_toggle_integration ? var.synapse_git_repository_root_folder : "" +} +output "synapse_git_repository_owner" { + value = var.synapse_git_toggle_integration ? var.synapse_git_repository_owner : "" +} +output "synapse_git_repository_name" { + value = var.synapse_git_toggle_integration ? var.synapse_git_repository_name : "" +} +output "synapse_git_repository_branch_name" { + value = var.synapse_git_toggle_integration ? var.synapse_git_repository_branch_name : "" +} +output "functionapp_url" { + value = var.synapse_git_toggle_integration ? local.functionapp_url : "" +} +output "keyvault_url" { + value = var.synapse_git_toggle_integration ? azurerm_key_vault.app_vault.vault_uri : "" +} +output "synapse_git_devops_project_name" { + value = var.synapse_git_toggle_integration ? var.synapse_git_devops_project_name : "" +} +/* NOT CURRENTLY USED +output "synapse_git_repository_base_url" { + value = var.synapse_git_toggle_integration ? var.synapse_git_repository_base_url : "" +} */ +output "synapse_git_use_pat" { + value = var.synapse_git_toggle_integration ? var.synapse_git_use_pat : false +} +output "synapse_git_pat" { + value = var.synapse_git_use_pat ? var.synapse_git_pat : "" +} +output "synapse_git_user_name" { + value = var.synapse_git_user_name +} +output "synapse_git_email_address" { + value = var.synapse_git_email_address +} +output "synapse_git_github_host_url" { + value = var.synapse_git_toggle_integration ? var.synapse_git_github_host_url : "" +} +output "synapse_git_devops_tenant_id" { + value = var.synapse_git_devops_tenant_id != "" ? var.synapse_git_devops_tenant_id : var.tenant_id +} +output "adf_git_toggle_integration" { + value = var.adf_git_toggle_integration +} +output "adf_git_repository_root_folder" { + value = var.adf_git_toggle_integration ? var.adf_git_repository_root_folder : "" +} +output "adf_git_repository_owner" { + value = var.adf_git_toggle_integration ? var.adf_git_repository_owner : "" +} +output "adf_git_repository_name" { + value = var.adf_git_toggle_integration ? var.adf_git_repository_name : "" +} +output "adf_git_repository_branch_name" { + value = var.adf_git_toggle_integration ? var.adf_git_repository_branch_name : "" +} +output "adf_git_use_pat" { + value = var.adf_git_toggle_integration ? var.adf_git_use_pat : false +} +output "adf_git_pat" { + value = var.adf_git_use_pat ? var.adf_git_pat : "" +} +output "adf_git_user_name" { + value = var.adf_git_user_name +} +output "adf_git_email_address" { + value = var.adf_git_email_address +} +output "adf_git_host_url" { + value = var.adf_git_toggle_integration ? var.adf_git_host_url : "" +} +output "synapse_lakedatabase_container_name" { + value = var.deploy_synapse ? azurerm_storage_data_lake_gen2_filesystem.dlfs[0].name : "" +} +output "publish_sif_database" { + value = var.publish_sif_database +} +output "sif_database_name" { + value = var.sif_database_name +} + +output "azurerm_function_app_identity_principal_id" { + value = var.deploy_function_app ? azurerm_function_app.function_app[0].identity[0].principal_id : "" +} + + +/*Variables from Previous Layer*/ +output "random_uuid_function_app_reg_role_id" { + value = data.terraform_remote_state.layer1.outputs.random_uuid_function_app_reg_role_id +} + +output "azuread_service_principal_function_app_object_id" { + value = data.terraform_remote_state.layer1.outputs.azuread_service_principal_function_app_object_id +} + +output "azuread_application_function_app_reg_object_id" { + value = data.terraform_remote_state.layer1.outputs.azuread_application_function_app_reg_object_id +} + +/*Variables for Naming Module*/ +output "naming_unique_seed" { + value = data.terraform_remote_state.layer1.outputs.naming_unique_seed +} + +output "naming_unique_suffix" { + value = data.terraform_remote_state.layer1.outputs.naming_unique_suffix } \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer2/private_dns.tf b/solution/DeploymentV2/terraform_layer2/private_dns.tf new file mode 100644 index 00000000..0ac8e32c --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/private_dns.tf @@ -0,0 +1,176 @@ +resource "azurerm_private_dns_zone" "private_dns_zone_db" { + count = (var.is_vnet_isolated && var.existing_private_dns_zone_db_id == "" ? 1 : 0) + name = "privatelink.database.windows.net" + resource_group_name = var.resource_group_name +} + +resource "azurerm_private_dns_zone_virtual_network_link" "database" { + count = (var.is_vnet_isolated && var.existing_private_dns_zone_db_id == "" ? 1 : 0) + name = "${local.vnet_name}-database" + resource_group_name = var.resource_group_name + private_dns_zone_name = azurerm_private_dns_zone.private_dns_zone_db[0].name + virtual_network_id = azurerm_virtual_network.vnet[0].id +} + +locals { + private_dns_zone_db_id = (var.is_vnet_isolated && var.existing_private_dns_zone_db_id == "" ? azurerm_private_dns_zone.private_dns_zone_db[0].id : var.existing_private_dns_zone_db_id) + private_dns_zone_kv_id = (var.is_vnet_isolated && var.existing_private_dns_zone_kv_id == "" ? azurerm_private_dns_zone.private_dns_zone_kv[0].id : var.existing_private_dns_zone_kv_id) + private_dns_zone_blob_id = (var.is_vnet_isolated && var.existing_private_dns_zone_blob_id == "" ? azurerm_private_dns_zone.private_dns_zone_blob[0].id : var.existing_private_dns_zone_blob_id) + private_dns_zone_queue_id = (var.is_vnet_isolated && var.existing_private_dns_zone_queue_id == "" ? azurerm_private_dns_zone.private_dns_zone_queue[0].id : var.existing_private_dns_zone_queue_id) + private_dns_zone_dfs_id = (var.is_vnet_isolated && var.existing_private_dns_zone_dfs_id == "" ? azurerm_private_dns_zone.private_dns_zone_dfs[0].id : var.existing_private_dns_zone_dfs_id) + private_dns_zone_purview_id = (var.is_vnet_isolated && var.existing_private_dns_zone_purview_id == "" ? azurerm_private_dns_zone.private_dns_zone_purview[0].id : var.existing_private_dns_zone_purview_id) + private_dns_zone_purview_studio_id = (var.is_vnet_isolated && var.existing_private_dns_zone_purview_studio_id == "" ? azurerm_private_dns_zone.private_dns_zone_purview_studio[0].id : var.existing_private_dns_zone_purview_studio_id) + private_dns_zone_servicebus_id = (var.is_vnet_isolated && var.existing_private_dns_zone_servicebus_id == "" ? azurerm_private_dns_zone.private_dns_zone_servicebus[0].id : var.existing_private_dns_zone_servicebus_id) + private_dns_zone_synapse_gateway_id = (var.is_vnet_isolated && var.existing_private_dns_zone_synapse_gateway_id == "" ? azurerm_private_dns_zone.synapse_gateway[0].id : var.existing_private_dns_zone_synapse_gateway_id) + private_dns_zone_synapse_studio_id = (var.is_vnet_isolated && var.existing_private_dns_zone_synapse_studio_id == "" ? azurerm_private_dns_zone.synapse_studio[0].id : var.existing_private_dns_zone_synapse_studio_id) + private_dns_zone_synapse_sql_id = (var.is_vnet_isolated && var.existing_private_dns_zone_synapse_sql_id == "" ? azurerm_private_dns_zone.synapse_sql[0].id : var.existing_private_dns_zone_synapse_sql_id) + +} + + +resource "azurerm_private_dns_zone" "private_dns_zone_kv" { + count = (var.is_vnet_isolated && var.existing_private_dns_zone_kv_id == "" ? 1 : 0) + name = "privatelink.vaultcore.azure.net" + resource_group_name = var.resource_group_name +} + +resource "azurerm_private_dns_zone_virtual_network_link" "vaultcore" { + count = (var.is_vnet_isolated && var.existing_private_dns_zone_kv_id == "" ? 1 : 0) + name = "${local.vnet_name}-vaultcore" + resource_group_name = var.resource_group_name + private_dns_zone_name = azurerm_private_dns_zone.private_dns_zone_kv[0].name + virtual_network_id = azurerm_virtual_network.vnet[0].id + tags = local.tags + lifecycle { + ignore_changes = [ + tags + ] + } +} + +resource "azurerm_private_dns_zone" "private_dns_zone_blob" { + count = (var.is_vnet_isolated && var.existing_private_dns_zone_blob_id == "" ? 1 : 0) + name = "privatelink.blob.core.windows.net" + resource_group_name = var.resource_group_name +} + +resource "azurerm_private_dns_zone_virtual_network_link" "blob" { + count = (var.is_vnet_isolated && var.existing_private_dns_zone_blob_id == "" ? 1 : 0) + name = "${local.vnet_name}-blob" + resource_group_name = var.resource_group_name + private_dns_zone_name = azurerm_private_dns_zone.private_dns_zone_blob[0].name + virtual_network_id = azurerm_virtual_network.vnet[0].id +} + +resource "azurerm_private_dns_zone" "private_dns_zone_queue" { + count = (var.is_vnet_isolated && var.existing_private_dns_zone_queue_id == "" ? 1 : 0) + name = "privatelink.queue.core.windows.net" + resource_group_name = var.resource_group_name +} + +resource "azurerm_private_dns_zone_virtual_network_link" "queue" { + count = (var.is_vnet_isolated && var.existing_private_dns_zone_queue_id == "" ? 1 : 0) + name = "${local.vnet_name}-queue" + resource_group_name = var.resource_group_name + private_dns_zone_name = azurerm_private_dns_zone.private_dns_zone_queue[0].name + virtual_network_id = azurerm_virtual_network.vnet[0].id +} + +resource "azurerm_private_dns_zone" "private_dns_zone_dfs" { + count = (var.is_vnet_isolated && var.existing_private_dns_zone_dfs_id == "" ? 1 : 0) + name = "privatelink.dfs.core.windows.net" + resource_group_name = var.resource_group_name +} + +resource "azurerm_private_dns_zone_virtual_network_link" "dfs" { + count = (var.is_vnet_isolated && var.existing_private_dns_zone_dfs_id == "" ? 1 : 0) + name = "${local.vnet_name}-dfs" + resource_group_name = var.resource_group_name + private_dns_zone_name = azurerm_private_dns_zone.private_dns_zone_dfs[0].name + virtual_network_id = azurerm_virtual_network.vnet[0].id +} + +resource "azurerm_private_dns_zone" "private_dns_zone_purview" { + count = (var.is_vnet_isolated && var.existing_private_dns_zone_purview_id == "" ? 1 : 0) + name = "privatelink.purview.azure.com" + resource_group_name = var.resource_group_name +} + +resource "azurerm_private_dns_zone_virtual_network_link" "purview" { + count = (var.is_vnet_isolated && var.existing_private_dns_zone_purview_id == "" ? 1 : 0) + name = "${local.vnet_name}-purview" + resource_group_name = var.resource_group_name + private_dns_zone_name = azurerm_private_dns_zone.private_dns_zone_purview[0].name + virtual_network_id = azurerm_virtual_network.vnet[0].id +} + +resource "azurerm_private_dns_zone" "private_dns_zone_purview_studio" { + count = (var.is_vnet_isolated && var.existing_private_dns_zone_purview_studio_id == "" ? 1 : 0) + name = "privatelink.purviewstudio.azure.com" + resource_group_name = var.resource_group_name +} + +resource "azurerm_private_dns_zone_virtual_network_link" "purview_studio" { + count = (var.is_vnet_isolated && var.existing_private_dns_zone_purview_studio_id == "" ? 1 : 0) + name = "${local.vnet_name}-purviewstudio" + resource_group_name = var.resource_group_name + private_dns_zone_name = azurerm_private_dns_zone.private_dns_zone_purview_studio[0].name + virtual_network_id = azurerm_virtual_network.vnet[0].id +} + +resource "azurerm_private_dns_zone" "private_dns_zone_servicebus" { + count = (var.is_vnet_isolated && var.existing_private_dns_zone_servicebus_id == "" ? 1 : 0) + name = "privatelink.servicebus.windows.net" + resource_group_name = var.resource_group_name +} + +resource "azurerm_private_dns_zone_virtual_network_link" "servicebus" { + count = (var.is_vnet_isolated && var.existing_private_dns_zone_servicebus_id == "" ? 1 : 0) + name = "${local.vnet_name}-servicebus" + resource_group_name = var.resource_group_name + private_dns_zone_name = azurerm_private_dns_zone.private_dns_zone_servicebus[0].name + virtual_network_id = azurerm_virtual_network.vnet[0].id +} + +# Synapse Private DNS Zones +resource "azurerm_private_dns_zone" "synapse_gateway" { + count = (var.is_vnet_isolated && var.deploy_synapse && var.existing_private_dns_zone_synapse_gateway_id == "" ? 1 : 0) + name = "privatelink.azuresynapse.net" + resource_group_name = var.resource_group_name +} + +resource "azurerm_private_dns_zone_virtual_network_link" "synapse_gateway" { + count = (var.is_vnet_isolated && var.deploy_synapse && var.existing_private_dns_zone_synapse_gateway_id == "" ? 1 : 0) + name = "${local.vnet_name}-synapsegateway" + resource_group_name = var.resource_group_name + private_dns_zone_name = azurerm_private_dns_zone.synapse_gateway[0].name + virtual_network_id = azurerm_virtual_network.vnet[0].id +} + +resource "azurerm_private_dns_zone" "synapse_sql" { + count = (var.is_vnet_isolated && var.deploy_synapse && var.existing_private_dns_zone_synapse_sql_id == "" ? 1 : 0) + name = "privatelink.sql.azuresynapse.net" + resource_group_name = var.resource_group_name +} + +resource "azurerm_private_dns_zone_virtual_network_link" "synapse_sql" { + count = (var.is_vnet_isolated && var.deploy_synapse && var.existing_private_dns_zone_synapse_sql_id == "" ? 1 : 0) + name = "${local.vnet_name}-synapsesql" + resource_group_name = var.resource_group_name + private_dns_zone_name = azurerm_private_dns_zone.synapse_sql[0].name + virtual_network_id = azurerm_virtual_network.vnet[0].id +} + +resource "azurerm_private_dns_zone" "synapse_studio" { + count = (var.is_vnet_isolated && var.deploy_synapse && var.existing_private_dns_zone_synapse_studio_id == "" ? 1 : 0) + name = "privatelink.dev.azuresynapse.net" + resource_group_name = var.resource_group_name +} + +resource "azurerm_private_dns_zone_virtual_network_link" "synapse_studio" { + count = (var.is_vnet_isolated && var.deploy_synapse && var.existing_private_dns_zone_synapse_studio_id == "" ? 1 : 0) + name = "${local.vnet_name}-synapsestudio" + resource_group_name = var.resource_group_name + private_dns_zone_name = azurerm_private_dns_zone.synapse_studio[0].name + virtual_network_id = azurerm_virtual_network.vnet[0].id +} diff --git a/solution/DeploymentV2/terraform_layer2/purview.tf b/solution/DeploymentV2/terraform_layer2/purview.tf new file mode 100644 index 00000000..017c00e3 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/purview.tf @@ -0,0 +1,120 @@ +resource "azurerm_purview_account" "purview" { + count = var.deploy_purview ? 1 : 0 + name = local.purview_name + resource_group_name = var.resource_group_name + location = var.purview_resource_location == "" ? var.resource_location : var.purview_resource_location + managed_resource_group_name = local.purview_resource_group_name + public_network_enabled = var.is_vnet_isolated == false || var.delay_private_access + tags = local.tags + + identity { + type = "SystemAssigned" + } + + lifecycle { + ignore_changes = [ + tags + ] + } +} + + +resource "azurerm_private_endpoint" "purview_account_private_endpoint_with_dns" { + count = var.is_vnet_isolated && var.deploy_purview ? 1 : 0 + name = local.purview_account_plink + location = var.resource_location + resource_group_name = var.resource_group_name + subnet_id = local.plink_subnet_id + + private_service_connection { + name = "${local.purview_account_plink}-conn" + private_connection_resource_id = azurerm_purview_account.purview[0].id + is_manual_connection = false + subresource_names = ["account"] + } + + private_dns_zone_group { + name = "privatednszonegroup" + private_dns_zone_ids = [local.private_dns_zone_purview_id] + } + + depends_on = [ + azurerm_purview_account.purview[0] + ] + + tags = local.tags + lifecycle { + ignore_changes = [ + tags + ] + } +} + +resource "azurerm_private_endpoint" "purview_portal_private_endpoint_with_dns" { + count = var.is_vnet_isolated && var.deploy_purview ? 1 : 0 + name = local.purview_portal_plink + location = var.resource_location + resource_group_name = var.resource_group_name + subnet_id = local.plink_subnet_id + + private_service_connection { + name = "${local.purview_portal_plink}-conn" + private_connection_resource_id = azurerm_purview_account.purview[0].id + is_manual_connection = false + subresource_names = ["portal"] + } + + private_dns_zone_group { + name = "privatednszonegroup" + private_dns_zone_ids = [local.private_dns_zone_purview_studio_id] + } + + depends_on = [ + azurerm_purview_account.purview[0] + ] + + tags = local.tags + lifecycle { + ignore_changes = [ + tags + ] + } +} + +# Azure private endpoints +module "purview_ingestion_private_endpoints" { + source = "./modules/purview_ingestion_private_endpoints" + count = var.is_vnet_isolated && var.deploy_purview ? 1 : 0 + resource_group_name = var.resource_group_name + purview_account_name = azurerm_purview_account.purview[0].name + resource_location = var.resource_location + queue_privatelink_name = "${local.purview_name}-queue-plink" + storage_privatelink_name = "${local.purview_name}-storage-plink" + eventhub_privatelink_name = "${local.purview_name}-event-plink" + blob_private_dns_id = local.private_dns_zone_blob_id + queue_private_dns_id = local.private_dns_zone_queue_id + servicebus_private_dns_id = local.private_dns_zone_servicebus_id + subnet_id = local.plink_subnet_id + managed_resource_group_name = local.purview_resource_group_name + name_suffix = random_id.rg_deployment_unique.id + subscription_id = var.subscription_id +} + +// Create an IR service principal (private linked resources can't use the azure hosted IRs) +resource "azuread_application" "purview_ir" { + count = var.deploy_purview && var.is_vnet_isolated ? 1 : 0 + display_name = local.purview_ir_app_reg_name + owners = [data.azurerm_client_config.current.object_id] +} + +resource "azuread_service_principal" "purview_ir" { + count = var.deploy_purview && var.is_vnet_isolated ? 1 : 0 + application_id = azuread_application.purview_ir[0].application_id + owners = [data.azurerm_client_config.current.object_id] +} + + +resource "azuread_application_password" "purview_ir" { + count = var.deploy_purview && var.is_vnet_isolated ? 1 : 0 + application_object_id = azuread_application.purview_ir[0].object_id +} diff --git a/solution/DeploymentV2/terraform_layer2/readme.md b/solution/DeploymentV2/terraform_layer2/readme.md new file mode 100644 index 00000000..0f05e929 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/readme.md @@ -0,0 +1,101 @@ +# Setting up a new environment +This section describes how to set up a new environment. It is suggested that for development purposes, developers use their own unique development environment, i.e. a separate resource group, storage account and Terraform state file. + + +# Setting up Infrastructure from Local Machine +This describes how to run the Terraform configuration from your local machine. We will be running with local state and not persisting to a remote state store. + +## Run Terraform/Terragrunt locally +Log into environment as yourself and change to infra directory. This is a quick way to get going but doesn't test the permissions of the account which will be actually deploying. + +``` PowerShell +az login +az account set --subscription +``` + +``` PowerShell +terragrunt init --terragrunt-config vars/local/terragrunt.hcl -reconfigure +terragrunt plan --terragrunt-config vars/local/terragrunt.hcl +terragrunt apply --terragrunt-config vars/local/terragrunt.hcl +``` + +``` PowerShell +terragrunt init --terragrunt-config vars/staging/terragrunt.hcl -reconfigure +terragrunt plan --terragrunt-config vars/staging/terragrunt.hcl +terragrunt apply --terragrunt-config vars/staging/terragrunt.hcl +``` + +# Setting up Infrastructure for CI/CD managed environment +This describes how to run the Terraform configuration to create the state store for an environment. This should be run once per environment and provides the location for terraform state to be stored. + +## Set up Terraform state +This will set up a resource group, storage account and container to be used for Terraform state. The same resource group will be used for deployed artefacts. + +Run PowerShell + +Log into environment. + +``` PowerShell +az login +az account set --subscription +``` + +Edit *infrastructure\state\create-state-store.ps1* so that *$RESOURCE_GROUP_NAME* and *$RESOURCE_GROUP_NAME* reflect the environment. + +Run the script to create the resources. + +## Set up Terragrunt config file +Set up the config file in location *infrastructure\vars\\terragrunt.hcl* + +Set *remote_state.config.resource_group_name* and *remote_state.config.storage_account_name* as appropriate for the environment, and point to the resource group and storage created above. + +Set up the *inputs* section to reflect the environment being deployed to. + + +## Init the state for the environment +Run PowerShell + +Log into environment and change to infra directory. + +``` PowerShell +az login +az account set --subscription +cd infrastructure +``` + +Initialise state + +``` PowerShell +cd infrastructure +terragrunt init --terragrunt-config vars/development/terragrunt.hcl +``` + +## If you need to import existing resources + + +1. Grant you service principal rights to the resources. +eg. $assignment = az role assignment create --role "Owner" --assignee 4c732d19-4076-4a76-87f3-6fbfd77f007d --resource-group "gft2" + +az ad app owner add --id db2c4f38-1566-41af-a1d4-495cd59097cc --owner-object-id 4c732d19-4076-4a76-87f3-6fbfd77f007d +az ad app owner add --id d2e89752-2e75-48ba-a5a7-cb4bbc7bcfc8 --owner-object-id 4c732d19-4076-4a76-87f3-6fbfd77f007d + + + +2. Then import resources into state + +terraform import azuread_application.web_reg[0] 497fb46f-3d88-4445-b9e8-7065970e3b40 +terraform import azuread_application.function_app_reg[0] db2c4f38-1566-41af-a1d4-495cd59097cc + + +# Required Azure resource providers +Microsoft.Storage +Microsoft.Network +Microsoft.Web +microsoft.insights +Microsoft.ManagedIdentity +Microsoft.KeyVault +Microsoft.OperationalInsights +Microsoft.Purview +Microsoft.EventHub +Microsoft.Compute + diff --git a/solution/DeploymentV2/terraform_layer2/security.tf b/solution/DeploymentV2/terraform_layer2/security.tf new file mode 100644 index 00000000..b33e34fe --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/security.tf @@ -0,0 +1,112 @@ + +resource "azurerm_log_analytics_workspace" "log_analytics_workspace" { + count = (var.existing_log_analytics_workspace_id == "" ? 1 : 0) + name = local.log_analytics_workspace_name + location = var.resource_location + resource_group_name = var.resource_group_name + sku = "PerGB2018" + tags = local.tags + lifecycle { + ignore_changes = [ + tags + ] + } +} + +locals { + log_analytics_resource_id = (var.existing_log_analytics_resource_id == "" ? azurerm_log_analytics_workspace.log_analytics_workspace[0].id : var.existing_log_analytics_resource_id) + log_analytics_workspace_id = (var.existing_log_analytics_workspace_id == "" ? azurerm_log_analytics_workspace.log_analytics_workspace[0].workspace_id : var.existing_log_analytics_workspace_id) + +} + +resource "azurerm_log_analytics_solution" "sentinel" { + count = var.deploy_sentinel ? 1 : 0 + solution_name = "SecurityInsights" + location = var.resource_location + resource_group_name = var.resource_group_name + workspace_resource_id = local.log_analytics_resource_id + workspace_name = local.log_analytics_workspace_name + plan { + publisher = "Microsoft" + product = "OMSGallery/SecurityInsights" + } +} + +resource "azurerm_role_assignment" "loganalytics_function_app" { + count = var.deploy_function_app ? 1 : 0 + scope = local.log_analytics_resource_id + role_definition_name = "Contributor" + principal_id = azurerm_function_app.function_app[0].identity[0].principal_id +} + +resource "azurerm_role_assignment" "loganalytics_web_app" { + count = var.deploy_web_app ? 1 : 0 + scope = local.log_analytics_resource_id + role_definition_name = "Contributor" + principal_id = azurerm_app_service.web[0].identity[0].principal_id +} + +resource "azurerm_storage_account" "storage_acccount_security_logs" { + name = local.logs_storage_account_name + resource_group_name = var.resource_group_name + location = var.resource_location + account_tier = "Standard" + account_replication_type = "LRS" + account_kind = "StorageV2" + is_hns_enabled = "true" + min_tls_version = "TLS1_2" + #allow_blob_public_access = "false" + + identity { + type = "SystemAssigned" + } + network_rules { + default_action = var.is_vnet_isolated ? "Deny" : "Allow" + bypass = ["Metrics", "AzureServices"] + } + + tags = local.tags + lifecycle { + ignore_changes = [ + tags + ] + } +} + +resource "azurerm_role_assignment" "blob_function_app_sec" { + count = var.deploy_function_app ? 1 : 0 + scope = azurerm_storage_account.storage_acccount_security_logs.id + role_definition_name = "Storage Blob Data Contributor" + principal_id = azurerm_function_app.function_app[0].identity[0].principal_id +} + +resource "azurerm_private_endpoint" "storage_private_endpoint_with_dns" { + count = var.is_vnet_isolated ? 1 : 0 + name = "${azurerm_storage_account.storage_acccount_security_logs.name}-plink" + location = var.resource_location + resource_group_name = var.resource_group_name + subnet_id = local.plink_subnet_id + + private_service_connection { + name = "${azurerm_storage_account.storage_acccount_security_logs.name}-plink-conn" + private_connection_resource_id = azurerm_storage_account.storage_acccount_security_logs.id + is_manual_connection = false + subresource_names = ["blob"] + } + + private_dns_zone_group { + name = "privatednszonegroupstorage" + private_dns_zone_ids = [local.private_dns_zone_blob_id] + } + + depends_on = [ + azurerm_storage_account.storage_acccount_security_logs + ] + + tags = local.tags + lifecycle { + ignore_changes = [ + tags + ] + } +} \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer2/storage_adls.tf b/solution/DeploymentV2/terraform_layer2/storage_adls.tf new file mode 100644 index 00000000..d533516d --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/storage_adls.tf @@ -0,0 +1,168 @@ +resource "azurerm_storage_account" "adls" { + count = var.deploy_adls ? 1 : 0 + name = local.adls_storage_account_name + resource_group_name = var.resource_group_name + location = var.resource_location + account_tier = "Standard" + account_replication_type = "GRS" + account_kind = "StorageV2" + is_hns_enabled = "true" + min_tls_version = "TLS1_2" + #allow_blob_public_access = "false" + network_rules { + default_action = var.is_vnet_isolated ? "Deny" : "Allow" + bypass = ["Metrics", "AzureServices"] + ip_rules = var.is_vnet_isolated ? [var.ip_address] : [] // This is required to allow us to create the initial Synapse Managed Private endpoint + } + + tags = local.tags + lifecycle { + ignore_changes = [ + tags + ] + } +} + +resource "azurerm_role_assignment" "adls_function_app" { + count = var.deploy_adls && var.deploy_function_app ? 1 : 0 + scope = azurerm_storage_account.adls[0].id + role_definition_name = "Storage Blob Data Contributor" + principal_id = azurerm_function_app.function_app[0].identity[0].principal_id +} + +resource "azurerm_role_assignment" "adls_data_factory" { + count = var.deploy_adls && var.deploy_data_factory ? 1 : 0 + scope = azurerm_storage_account.adls[0].id + role_definition_name = "Storage Blob Data Contributor" + principal_id = azurerm_data_factory.data_factory[0].identity[0].principal_id +} + +resource "azurerm_role_assignment" "synapse" { + count = var.deploy_adls && var.deploy_synapse ? 1 : 0 + scope = azurerm_storage_account.adls[0].id + role_definition_name = "Storage Blob Data Contributor" + principal_id = azurerm_synapse_workspace.synapse[0].identity[0].principal_id +} + +resource "azurerm_role_assignment" "adls_purview_sp" { + count = var.deploy_purview && var.is_vnet_isolated ? 1 : 0 + scope = azurerm_storage_account.adls[0].id + role_definition_name = "Storage Blob Data Contributor" + principal_id = azuread_service_principal.purview_ir[0].object_id +} + + +resource "azurerm_private_endpoint" "adls_storage_private_endpoint_with_dns" { + count = var.deploy_adls && var.is_vnet_isolated ? 1 : 0 + name = "${local.adls_storage_account_name}-blob-plink" + location = var.resource_location + resource_group_name = var.resource_group_name + subnet_id = local.plink_subnet_id + + private_service_connection { + name = "${local.adls_storage_account_name}-blob-plink-conn" + private_connection_resource_id = azurerm_storage_account.adls[0].id + is_manual_connection = false + subresource_names = ["blob"] + } + + private_dns_zone_group { + name = "privatednszonegroupstorageblob" + private_dns_zone_ids = [local.private_dns_zone_blob_id] + } + + depends_on = [ + azurerm_storage_account.adls + ] + + tags = local.tags + lifecycle { + ignore_changes = [ + tags + ] + } +} + +resource "azurerm_private_endpoint" "adls_dfs_storage_private_endpoint_with_dns" { + count = var.deploy_adls && var.is_vnet_isolated ? 1 : 0 + name = "${local.adls_storage_account_name}-dfs-plink" + location = var.resource_location + resource_group_name = var.resource_group_name + subnet_id = local.plink_subnet_id + + private_service_connection { + name = "${local.adls_storage_account_name}-dfs-plink-conn" + private_connection_resource_id = azurerm_storage_account.adls[0].id + is_manual_connection = false + subresource_names = ["dfs"] + } + + private_dns_zone_group { + name = "privatednszonegroupstoragedfs" + private_dns_zone_ids = [local.private_dns_zone_dfs_id] + } + + depends_on = [ + azurerm_storage_account.adls + ] + + tags = local.tags + lifecycle { + ignore_changes = [ + tags + ] + } +} + +# // Diagnostic logs-------------------------------------------------------------------------- +resource "azurerm_monitor_diagnostic_setting" "adls_storage_diagnostic_logs" { + count = var.deploy_adls ? 1 : 0 + name = "diagnosticlogs" + target_resource_id = "${azurerm_storage_account.adls[0].id}/blobServices/default/" + log_analytics_workspace_id = local.log_analytics_resource_id + # ignore_changes is here given the bug https://github.com/terraform-providers/terraform-provider-azurerm/issues/10388 + lifecycle { + ignore_changes = [log, metric] + } + log { + category = "StorageRead" + enabled = true + retention_policy { + days = 0 + enabled = true + } + } + log { + category = "StorageWrite" + enabled = true + retention_policy { + days = 0 + enabled = true + } + } + log { + category = "StorageDelete" + enabled = true + retention_policy { + days = 0 + enabled = true + } + } + + metric { + category = "Transaction" + enabled = false + retention_policy { + days = 0 + enabled = false + } + } + metric { + category = "Capacity" + enabled = false + retention_policy { + days = 0 + enabled = false + } + } +} diff --git a/solution/DeploymentV2/terraform_layer2/storage_blob.tf b/solution/DeploymentV2/terraform_layer2/storage_blob.tf new file mode 100644 index 00000000..18b444e5 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/storage_blob.tf @@ -0,0 +1,133 @@ + + +resource "azurerm_storage_account" "blob" { + name = local.blob_storage_account_name + count = var.deploy_storage_account ? 1 : 0 + resource_group_name = var.resource_group_name + location = var.resource_location + account_tier = "Standard" + account_replication_type = "GRS" + account_kind = "StorageV2" + is_hns_enabled = "false" + min_tls_version = "TLS1_2" + #allow_blob_public_access = "false" + network_rules { + default_action = var.is_vnet_isolated ? "Deny" : "Allow" + bypass = ["Metrics", "AzureServices"] + ip_rules = var.is_vnet_isolated ? [var.ip_address] : [] // This is required to allow us to create the initial Synapse Managed Private endpoint + } + + tags = local.tags + lifecycle { + ignore_changes = [ + tags + ] + } +} + +resource "azurerm_role_assignment" "blob_function_app" { + count = var.deploy_storage_account && var.deploy_function_app ? 1 : 0 + scope = azurerm_storage_account.blob[0].id + role_definition_name = "Storage Blob Data Contributor" + principal_id = azurerm_function_app.function_app[0].identity[0].principal_id +} + +resource "azurerm_role_assignment" "blob_data_factory" { + count = var.deploy_storage_account && var.deploy_data_factory ? 1 : 0 + scope = azurerm_storage_account.blob[0].id + role_definition_name = "Storage Blob Data Contributor" + principal_id = azurerm_data_factory.data_factory[0].identity[0].principal_id +} + +resource "azurerm_role_assignment" "blob_purview_sp" { + count = var.deploy_purview && var.is_vnet_isolated ? 1 : 0 + scope = azurerm_storage_account.blob[0].id + role_definition_name = "Storage Blob Data Contributor" + principal_id = azuread_service_principal.purview_ir[0].object_id +} + + +resource "azurerm_private_endpoint" "blob_storage_private_endpoint_with_dns" { + count = var.deploy_storage_account && var.is_vnet_isolated ? 1 : 0 + name = "${local.blob_storage_account_name}-plink" + location = var.resource_location + resource_group_name = var.resource_group_name + subnet_id = local.plink_subnet_id + + private_service_connection { + name = "${local.blob_storage_account_name}-plink-conn" + private_connection_resource_id = azurerm_storage_account.blob[0].id + is_manual_connection = false + subresource_names = ["blob"] + } + + private_dns_zone_group { + name = "privatednszonegroupstorage" + private_dns_zone_ids = [local.private_dns_zone_blob_id] + } + + depends_on = [ + azurerm_storage_account.adls + ] + + tags = local.tags + lifecycle { + ignore_changes = [ + tags + ] + } +} + + +# // Diagnostic logs-------------------------------------------------------------------------- +resource "azurerm_monitor_diagnostic_setting" "blob_storage_diagnostic_logs" { + name = "diagnosticlogs" + count = var.deploy_storage_account ? 1 : 0 + target_resource_id = "${azurerm_storage_account.blob[0].id}/blobServices/default/" + log_analytics_workspace_id = local.log_analytics_resource_id + # ignore_changes is here given the bug https://github.com/terraform-providers/terraform-provider-azurerm/issues/10388 + lifecycle { + ignore_changes = [log, metric] + } + log { + category = "StorageRead" + enabled = true + retention_policy { + days = 0 + enabled = true + } + } + log { + category = "StorageWrite" + enabled = true + retention_policy { + days = 0 + enabled = true + } + } + log { + category = "StorageDelete" + enabled = true + retention_policy { + days = 0 + enabled = true + } + } + + metric { + category = "Transaction" + enabled = true + retention_policy { + days = 0 + enabled = false + } + } + metric { + category = "Capacity" + enabled = true + retention_policy { + days = 0 + enabled = false + } + } +} diff --git a/solution/DeploymentV2/terraform_layer2/subnet.tf b/solution/DeploymentV2/terraform_layer2/subnet.tf new file mode 100644 index 00000000..4b73c126 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/subnet.tf @@ -0,0 +1,63 @@ +resource "azurerm_subnet" "plink_subnet" { + count = (var.is_vnet_isolated && var.existing_plink_subnet_id == "" ? 1 : 0) + name = local.plink_subnet_name + resource_group_name = var.resource_group_name + virtual_network_name = azurerm_virtual_network.vnet[0].name + address_prefixes = [var.plink_subnet_cidr] + enforce_private_link_endpoint_network_policies = true +} + +locals { + plink_subnet_id = (var.existing_plink_subnet_id == "" && (var.is_vnet_isolated) ? azurerm_subnet.plink_subnet[0].id : var.existing_plink_subnet_id) +} + +resource "azurerm_subnet" "bastion_subnet" { + count = (var.is_vnet_isolated && var.deploy_bastion && var.existing_bastion_subnet_id == "" ? 1 : 0) + name = "AzureBastionSubnet" + resource_group_name = var.resource_group_name + virtual_network_name = azurerm_virtual_network.vnet[0].name + address_prefixes = [var.bastion_subnet_cidr] + enforce_private_link_endpoint_network_policies = true +} + +locals { + bastion_subnet_id = (var.existing_bastion_subnet_id == "" && (var.is_vnet_isolated) && var.deploy_bastion ? azurerm_subnet.bastion_subnet[0].id : var.existing_bastion_subnet_id) +} + +resource "azurerm_subnet" "vm_subnet" { + count = (var.is_vnet_isolated || (var.deploy_selfhostedsql || var.deploy_h2o-ai) && var.existing_vm_subnet_id == "" ? 1 : 0) + name = local.vm_subnet_name + resource_group_name = var.resource_group_name + virtual_network_name = azurerm_virtual_network.vnet[0].name + address_prefixes = [var.vm_subnet_cidr] + enforce_private_link_endpoint_network_policies = true +} + +locals { + vm_subnet_id = (var.existing_vm_subnet_id == "" && ((var.is_vnet_isolated) || var.deploy_selfhostedsql || var.deploy_h2o-ai) ? azurerm_subnet.vm_subnet[0].id : var.existing_vm_subnet_id) +} + + +resource "azurerm_subnet" "app_service_subnet" { + count = (var.is_vnet_isolated && var.deploy_app_service_plan && var.existing_app_service_subnet_id == "" ? 1 : 0) + name = local.app_service_subnet_name + resource_group_name = var.resource_group_name + virtual_network_name = azurerm_virtual_network.vnet[0].name + address_prefixes = [var.app_service_subnet_cidr] + enforce_private_link_endpoint_network_policies = false + + + # required for VNet integration with app services (functions) + # https://docs.microsoft.com/en-us/azure/app-service/web-sites-integrate-with-vnet#regional-vnet-integration + delegation { + name = "app-service-delegation" + + service_delegation { + name = "Microsoft.Web/serverFarms" + actions = ["Microsoft.Network/virtualNetworks/subnets/action"] + } + } +} +locals { + app_service_subnet_id = (var.existing_app_service_subnet_id == "" && (var.is_vnet_isolated) ? azurerm_subnet.app_service_subnet[0].id : var.existing_app_service_subnet_id) +} diff --git a/solution/DeploymentV2/terraform_layer2/synapse.tf b/solution/DeploymentV2/terraform_layer2/synapse.tf new file mode 100644 index 00000000..b1446472 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/synapse.tf @@ -0,0 +1,455 @@ +# -------------------------------------------------------------------------------------------------------------------- +# Workspace +# -------------------------------------------------------------------------------------------------------------------- + + + + +resource "azurerm_storage_data_lake_gen2_filesystem" "dlfs" { + count = var.deploy_adls && var.deploy_synapse ? 1 : 0 + name = local.synapse_data_lake_name + storage_account_id = azurerm_storage_account.adls[0].id +} + +resource "azurerm_synapse_workspace" "synapse" { + count = var.deploy_adls && var.deploy_synapse ? 1 : 0 + name = local.synapse_workspace_name + resource_group_name = var.resource_group_name + location = var.resource_location + storage_data_lake_gen2_filesystem_id = azurerm_storage_data_lake_gen2_filesystem.dlfs[0].id + sql_administrator_login = var.synapse_sql_login + sql_administrator_login_password = local.synapse_sql_password + sql_identity_control_enabled = true + public_network_access_enabled = ((var.is_vnet_isolated == false) || (var.delay_private_access == true)) + managed_virtual_network_enabled = true + managed_resource_group_name = local.synapse_resource_group_name + purview_id = var.deploy_purview ? azurerm_purview_account.purview[0].id : null + + #github_repo { + # account_name = var.synapse_git_account_name + # branch_name = var.synapse_git_repository_branch_name + # repository_name = var.synapse_git_repository_name + # root_folder = var.synapse_git_repository_root_folder + # git_url = (Optional) Specifies the GitHub Enterprise host name. For example: https://github.mydomain.com. + + #} + + dynamic "github_repo" { + for_each = ((var.synapse_git_toggle_integration && var.synapse_git_integration_type == "github") ? [true] : []) + content { + account_name = var.synapse_git_repository_owner + branch_name = var.synapse_git_repository_branch_name + repository_name = var.synapse_git_repository_name + root_folder = var.synapse_git_repository_root_folder + git_url = var.synapse_git_github_host_url + } + } + + dynamic "azure_devops_repo" { + for_each = ((var.synapse_git_toggle_integration && var.synapse_git_integration_type == "devops") ? [true] : []) + content { + account_name = var.synapse_git_repository_owner + branch_name = var.synapse_git_repository_branch_name + repository_name = var.synapse_git_repository_name + root_folder = var.synapse_git_repository_root_folder + project_name = var.synapse_git_devops_project_name + #if a custom tenant id isnt assigned, will use the terraform tenant_id + tenant_id = var.synapse_git_devops_tenant_id != "" ? var.synapse_git_devops_tenant_id : var.tenant_id + } + } + + identity { + type = "SystemAssigned" + } + + tags = local.tags + lifecycle { + ignore_changes = [ + tags + ] + } +} + + + + +# -------------------------------------------------------------------------------------------------------------------- +# SQL Dedicated Pool +# -------------------------------------------------------------------------------------------------------------------- +resource "azurerm_synapse_sql_pool" "synapse_sql_pool" { + count = var.deploy_adls && var.deploy_synapse && var.deploy_synapse_sqlpool ? 1 : 0 + name = local.synapse_dwpool_name + synapse_workspace_id = azurerm_synapse_workspace.synapse[0].id + sku_name = var.synapse_sku + create_mode = "Default" + tags = local.tags + lifecycle { + ignore_changes = [ + tags + ] + } +} + +# -------------------------------------------------------------------------------------------------------------------- +# Spark Pool +# -------------------------------------------------------------------------------------------------------------------- +resource "azurerm_synapse_spark_pool" "synapse_spark_pool" { + count = var.deploy_adls && var.deploy_synapse && var.deploy_synapse_sparkpool ? 1 : 0 + name = local.synapse_sppool_name + synapse_workspace_id = azurerm_synapse_workspace.synapse[0].id + node_size_family = "MemoryOptimized" + node_size = "Small" + cache_size = 100 + + auto_scale { + max_node_count = var.synapse_spark_max_node_count + min_node_count = var.synapse_spark_min_node_count + } + + auto_pause { + delay_in_minutes = 15 + } + tags = local.tags + lifecycle { + ignore_changes = [ + tags + ] + } +} + +# -------------------------------------------------------------------------------------------------------------------- +# Synapse Workspace Firewall Rules (Allow Public Access) +# -------------------------------------------------------------------------------------------------------------------- +resource "azurerm_synapse_firewall_rule" "cicd" { + count = var.deploy_adls && var.deploy_synapse ? 1 : 0 + name = "AllowGitHub" + synapse_workspace_id = azurerm_synapse_workspace.synapse[0].id + start_ip_address = var.ip_address + end_ip_address = var.ip_address +} + +# -------------------------------------------------------------------------------------------------------------------- +# Synapse Workspace Firewall Rules (Allow Public Access) +# -------------------------------------------------------------------------------------------------------------------- +resource "azurerm_synapse_firewall_rule" "public_access" { + count = var.deploy_adls && var.deploy_synapse && var.allow_public_access_to_synapse_studio ? 1 : 0 + name = "AllowAll" + synapse_workspace_id = azurerm_synapse_workspace.synapse[0].id + start_ip_address = "0.0.0.0" + end_ip_address = "255.255.255.255" +} + +resource "time_sleep" "azurerm_synapse_firewall_rule_wait_30_seconds_cicd" { + depends_on = [azurerm_synapse_firewall_rule.cicd] + create_duration = "30s" +} + +# -------------------------------------------------------------------------------------------------------------------- +# Synapse Workspace Roles and Linked Services +# -------------------------------------------------------------------------------------------------------------------- +resource "azurerm_synapse_role_assignment" "synapse_function_app_assignment" { + count = var.deploy_synapse && var.deploy_function_app ? 1 : 0 + synapse_workspace_id = azurerm_synapse_workspace.synapse[0].id + role_name = "Synapse Administrator" + principal_id = azurerm_function_app.function_app[0].identity[0].principal_id + depends_on = [ + azurerm_synapse_firewall_rule.public_access, + time_sleep.azurerm_synapse_firewall_rule_wait_30_seconds_cicd + ] + +} + +resource "azurerm_synapse_linked_service" "synapse_keyvault_linkedservice" { + count = var.deploy_synapse ? 1 : 0 + name = "SLS_AzureKeyVault" + synapse_workspace_id = azurerm_synapse_workspace.synapse[0].id + type = "AzureKeyVault" + depends_on = [ + azurerm_synapse_firewall_rule.public_access, + time_sleep.azurerm_synapse_firewall_rule_wait_30_seconds_cicd + ] + type_properties_json = < Date: Sun, 24 Jul 2022 12:05:03 +0800 Subject: [PATCH 041/151] Breaking up terraform into layers --- solution/Deployment/environments/test.ps1 | 25 +++++++ solution/DeploymentV2/Deploy.ps1 | 6 +- .../environments/vars/admz/common_vars.yaml | 10 +++ .../environments/vars/local/common_vars.yaml | 10 +++ .../vars/production/common_vars.yaml | 10 +++ .../vars/staging/common_vars.yaml | 10 +++ .../vars/local/terragrunt.hcl | 31 ++++---- .../vars/production/terragrunt.hcl | 30 ++++---- .../vars/staging/terragrunt.hcl | 27 ++++--- .../DeploymentV2/terraform_layer2/vars.tf | 2 +- .../vars/local/terragrunt.hcl | 66 ++++++++++++++--- .../vars/production/terragrunt.hcl | 72 +++++++++++++++---- .../vars/staging/terragrunt.hcl | 39 +++++----- .../vars/local/terragrunt.hcl | 47 ++++++++---- .../vars/production/terragrunt.hcl | 48 +++++++++---- .../vars/staging/terragrunt.hcl | 32 +++++---- 16 files changed, 342 insertions(+), 123 deletions(-) create mode 100644 solution/Deployment/environments/test.ps1 create mode 100644 solution/DeploymentV2/environments/vars/admz/common_vars.yaml create mode 100644 solution/DeploymentV2/environments/vars/local/common_vars.yaml create mode 100644 solution/DeploymentV2/environments/vars/production/common_vars.yaml create mode 100644 solution/DeploymentV2/environments/vars/staging/common_vars.yaml diff --git a/solution/Deployment/environments/test.ps1 b/solution/Deployment/environments/test.ps1 new file mode 100644 index 00000000..2409a0f2 --- /dev/null +++ b/solution/Deployment/environments/test.ps1 @@ -0,0 +1,25 @@ +$environment = Get-Content development.json | ConvertFrom-Json -Depth 10 + +foreach ($prop in $environment | Get-Member | Where-Object {$_.MemberType -eq 'NoteProperty'} ) +{ + #Write-Host $prop.Definition.ty + $property = $prop.Name + $value = $environment.$property + Write-Host $value.GetType() + if($value.GetType().Name -eq "String") + { + Write-Host "cat ../terraform_layer2/staging/vars/terragrunt.hcl | hclq set inputs.$property ""$value""" + } + else + { + if($value.GetType().Name -eq "Boolean") + { + Write-Host "cat ../terraform_layer2/staging/vars/terragrunt.hcl | hclq set inputs.$property" + $value.ToString().ToLower() + } + else + { + Write-Host "cat ../terraform_layer2/staging/vars/terragrunt.hcl | hclq set inputs.$property $value" + } + } + +} diff --git a/solution/DeploymentV2/Deploy.ps1 b/solution/DeploymentV2/Deploy.ps1 index bf982ebc..6343654d 100644 --- a/solution/DeploymentV2/Deploy.ps1 +++ b/solution/DeploymentV2/Deploy.ps1 @@ -77,16 +77,16 @@ $AddSpecificUserAsWebAppAdmin = $env:AdsGf_AddSpecificUserAsWebAppAdmin #------------------------------------------------------------------------------------------------------------ # Main Terraform #------------------------------------------------------------------------------------------------------------ -#Invoke-Expression ./Deploy_1_Infra0.ps1 +Invoke-Expression ./Deploy_1_Infra0.ps1 #------------------------------------------------------------------------------------------------------------ # Get all the outputs from terraform so we can use them in subsequent steps #Mandatory #------------------------------------------------------------------------------------------------------------ - Set-Location "./terraform" + Set-Location "./terraform_layer2" Write-Host "Reading Terraform Outputs" #Run Init Just in Case we skipped the Infra Section - $init = terragrunt init --terragrunt-config vars/$environmentName/terragrunt.hcl -reconfigure + #$init = terragrunt init --terragrunt-config vars/$environmentName/terragrunt.hcl -reconfigure Import-Module .\..\GatherOutputsFromTerraform.psm1 -force $tout = GatherOutputsFromTerraform $outputs = terragrunt output -json --terragrunt-config ./vars/$environmentName/terragrunt.hcl | ConvertFrom-Json diff --git a/solution/DeploymentV2/environments/vars/admz/common_vars.yaml b/solution/DeploymentV2/environments/vars/admz/common_vars.yaml new file mode 100644 index 00000000..bb9d67f9 --- /dev/null +++ b/solution/DeploymentV2/environments/vars/admz/common_vars.yaml @@ -0,0 +1,10 @@ +resource_group_name: gft2 # Theresourcegroupallresourceswillbedeployedto +tenant_id: 72f988bf-86f1-41af-91ab-2d7cd011db47 # ThisistheAzureADtenantID +prefix: ads # Allazureresourceswillbeprefixedwiththis +domain: microsoft.com # UsedwhenconfiguringAADconfigforAzurefunctions +subscription_id: 035a1364-f00d-48e2-b582-4fe125905ee3 # Theazuresubscriptionidtodeployto +resource_location: AustraliaEast # Thelocationoftheresources +owner_tag: Contoso # OwnertagvalueforAzureresources +environment_tag: stg # ThisisusedonAzuretagsaswellasallresourcenames +ip_address: 144.138.148.220 # Thisistheipaddressoftheagent/currentIP.Usedtocreatefirewallexemptions. +deployment_principal_layers1and3: "ccbdbba4-669c-48d6-86b8-75c9ab2ee578" diff --git a/solution/DeploymentV2/environments/vars/local/common_vars.yaml b/solution/DeploymentV2/environments/vars/local/common_vars.yaml new file mode 100644 index 00000000..bb9d67f9 --- /dev/null +++ b/solution/DeploymentV2/environments/vars/local/common_vars.yaml @@ -0,0 +1,10 @@ +resource_group_name: gft2 # Theresourcegroupallresourceswillbedeployedto +tenant_id: 72f988bf-86f1-41af-91ab-2d7cd011db47 # ThisistheAzureADtenantID +prefix: ads # Allazureresourceswillbeprefixedwiththis +domain: microsoft.com # UsedwhenconfiguringAADconfigforAzurefunctions +subscription_id: 035a1364-f00d-48e2-b582-4fe125905ee3 # Theazuresubscriptionidtodeployto +resource_location: AustraliaEast # Thelocationoftheresources +owner_tag: Contoso # OwnertagvalueforAzureresources +environment_tag: stg # ThisisusedonAzuretagsaswellasallresourcenames +ip_address: 144.138.148.220 # Thisistheipaddressoftheagent/currentIP.Usedtocreatefirewallexemptions. +deployment_principal_layers1and3: "ccbdbba4-669c-48d6-86b8-75c9ab2ee578" diff --git a/solution/DeploymentV2/environments/vars/production/common_vars.yaml b/solution/DeploymentV2/environments/vars/production/common_vars.yaml new file mode 100644 index 00000000..bb9d67f9 --- /dev/null +++ b/solution/DeploymentV2/environments/vars/production/common_vars.yaml @@ -0,0 +1,10 @@ +resource_group_name: gft2 # Theresourcegroupallresourceswillbedeployedto +tenant_id: 72f988bf-86f1-41af-91ab-2d7cd011db47 # ThisistheAzureADtenantID +prefix: ads # Allazureresourceswillbeprefixedwiththis +domain: microsoft.com # UsedwhenconfiguringAADconfigforAzurefunctions +subscription_id: 035a1364-f00d-48e2-b582-4fe125905ee3 # Theazuresubscriptionidtodeployto +resource_location: AustraliaEast # Thelocationoftheresources +owner_tag: Contoso # OwnertagvalueforAzureresources +environment_tag: stg # ThisisusedonAzuretagsaswellasallresourcenames +ip_address: 144.138.148.220 # Thisistheipaddressoftheagent/currentIP.Usedtocreatefirewallexemptions. +deployment_principal_layers1and3: "ccbdbba4-669c-48d6-86b8-75c9ab2ee578" diff --git a/solution/DeploymentV2/environments/vars/staging/common_vars.yaml b/solution/DeploymentV2/environments/vars/staging/common_vars.yaml new file mode 100644 index 00000000..bb9d67f9 --- /dev/null +++ b/solution/DeploymentV2/environments/vars/staging/common_vars.yaml @@ -0,0 +1,10 @@ +resource_group_name: gft2 # Theresourcegroupallresourceswillbedeployedto +tenant_id: 72f988bf-86f1-41af-91ab-2d7cd011db47 # ThisistheAzureADtenantID +prefix: ads # Allazureresourceswillbeprefixedwiththis +domain: microsoft.com # UsedwhenconfiguringAADconfigforAzurefunctions +subscription_id: 035a1364-f00d-48e2-b582-4fe125905ee3 # Theazuresubscriptionidtodeployto +resource_location: AustraliaEast # Thelocationoftheresources +owner_tag: Contoso # OwnertagvalueforAzureresources +environment_tag: stg # ThisisusedonAzuretagsaswellasallresourcenames +ip_address: 144.138.148.220 # Thisistheipaddressoftheagent/currentIP.Usedtocreatefirewallexemptions. +deployment_principal_layers1and3: "ccbdbba4-669c-48d6-86b8-75c9ab2ee578" diff --git a/solution/DeploymentV2/terraform_layer1/vars/local/terragrunt.hcl b/solution/DeploymentV2/terraform_layer1/vars/local/terragrunt.hcl index 76f9a888..2825ab48 100644 --- a/solution/DeploymentV2/terraform_layer1/vars/local/terragrunt.hcl +++ b/solution/DeploymentV2/terraform_layer1/vars/local/terragrunt.hcl @@ -1,3 +1,8 @@ +locals { + common_vars = yamldecode(file("../../../environments/vars/local/common_vars.yaml")) +} + + remote_state { backend = "azurerm" generate = { @@ -7,10 +12,10 @@ remote_state { config = { # You need to update the resource group and storage account here. # You should have created these with the Prepare.ps1 script. - resource_group_name = "gft2" - storage_account_name = "gft2state" + resource_group_name = "${local.common_vars.resource_group_name}" + storage_account_name = "${local.common_vars.resource_group_name}state" container_name = "tstate" - key = "terraform_layer1.tfstate" + key = "terraform_layer1.tfstate" } } @@ -18,15 +23,15 @@ remote_state { # If you are deploying using pipelines, these can be overridden from environment variables # using TF_VAR_variablename inputs = { - prefix = "ads" # All azure resources will be prefixed with this - domain = "microsoft.com" # Used when configuring AAD config for Azure functions - tenant_id = "72f988bf-86f1-41af-91ab-2d7cd011db47" # This is the Azure AD tenant ID - subscription_id = "035a1364-f00d-48e2-b582-4fe125905ee3" # The azure subscription id to deploy to - resource_location = "Australia East" # The location of the resources - resource_group_name = "gft2" # The resource group all resources will be deployed to - owner_tag = "Contoso" # Owner tag value for Azure resources - environment_tag = "stg" # This is used on Azure tags as well as all resource names - ip_address = "144.138.148.220" # This is the ip address of the agent/current IP. Used to create firewall exemptions. + prefix = "${local.common_vars.prefix}" # All azure resources will be prefixed with this + domain = "${local.common_vars.domain}" # Used when configuring AAD config for Azure functions + tenant_id = "${local.common_vars.tenant_id}" # This is the Azure AD tenant ID + subscription_id = "${local.common_vars.subscription_id}" # The azure subscription id to deploy to + resource_location = "${local.common_vars.resource_location}" # The location of the resources + resource_group_name = "${local.common_vars.resource_group_name}" # The resource group all resources will be deployed to + owner_tag = "${local.common_vars.owner_tag}" # Owner tag value for Azure resources + environment_tag = "${local.common_vars.environment_tag}" # This is used on Azure tags as well as all resource names + ip_address = "${local.common_vars.ip_address}" # This is the ip address of the agent/current IP. Used to create firewall exemptions. deploy_web_app = true deploy_function_app = true -} \ No newline at end of file +} diff --git a/solution/DeploymentV2/terraform_layer1/vars/production/terragrunt.hcl b/solution/DeploymentV2/terraform_layer1/vars/production/terragrunt.hcl index 7224486d..6c5f1bae 100644 --- a/solution/DeploymentV2/terraform_layer1/vars/production/terragrunt.hcl +++ b/solution/DeploymentV2/terraform_layer1/vars/production/terragrunt.hcl @@ -1,3 +1,8 @@ +locals { + common_vars = yamldecode(file("../../../environments/vars/production/common_vars.yaml")) +} + + remote_state { backend = "azurerm" generate = { @@ -7,8 +12,8 @@ remote_state { config = { # You need to update the resource group and storage account here. # You should have created these with the Prepare.ps1 script. - resource_group_name = "gft2" - storage_account_name = "gft2state" + resource_group_name = "${local.common_vars.resource_group_name}" + storage_account_name = "${local.common_vars.resource_group_name}state" container_name = "tstate" key = "terraform_layer1.tfstate" } @@ -18,16 +23,15 @@ remote_state { # If you are deploying using pipelines, these can be overridden from environment variables # using TF_VAR_variablename inputs = { - prefix = "ads" # All azure resources will be prefixed with this - domain = "microsoft.com" # Used when configuring AAD config for Azure functions - tenant_id = "72f988bf-86f1-41af-91ab-2d7cd011db47" # This is the Azure AD tenant ID - subscription_id = "035a1364-f00d-48e2-b582-4fe125905ee3" # The azure subscription id to deploy to - resource_location = "Australia East" # The location of the resources - resource_group_name = "gft2" # The resource group all resources will be deployed to - owner_tag = "Contoso" # Owner tag value for Azure resources - environment_tag = "stg" # This is used on Azure tags as well as all resource names - ip_address = "144.138.148.220" # This is the ip address of the agent/current IP. Used to create firewall exemptions. + prefix = "${local.common_vars.prefix}" # All azure resources will be prefixed with this + domain = "${local.common_vars.domain}" # Used when configuring AAD config for Azure functions + tenant_id = "${local.common_vars.tenant_id}" # This is the Azure AD tenant ID + subscription_id = "${local.common_vars.subscription_id}" # The azure subscription id to deploy to + resource_location = "${local.common_vars.resource_location}" # The location of the resources + resource_group_name = "${local.common_vars.resource_group_name}" # The resource group all resources will be deployed to + owner_tag = "${local.common_vars.owner_tag}" # Owner tag value for Azure resources + environment_tag = "${local.common_vars.environment_tag}" # This is used on Azure tags as well as all resource names + ip_address = "${local.common_vars.ip_address}" # This is the ip address of the agent/current IP. Used to create firewall exemptions. deploy_web_app = true deploy_function_app = true -} - +} diff --git a/solution/DeploymentV2/terraform_layer1/vars/staging/terragrunt.hcl b/solution/DeploymentV2/terraform_layer1/vars/staging/terragrunt.hcl index f977573f..010d5f2a 100644 --- a/solution/DeploymentV2/terraform_layer1/vars/staging/terragrunt.hcl +++ b/solution/DeploymentV2/terraform_layer1/vars/staging/terragrunt.hcl @@ -1,3 +1,8 @@ +locals { + common_vars = yamldecode(file("../../../environments/vars/staging/common_vars.yaml")) +} + + remote_state { backend = "azurerm" generate = { @@ -7,8 +12,8 @@ remote_state { config = { # You need to update the resource group and storage account here. # You should have created these with the Prepare.ps1 script. - resource_group_name = "gft2" - storage_account_name = "gft2state" + resource_group_name = "${local.common_vars.resource_group_name}" + storage_account_name = "${local.common_vars.resource_group_name}state" container_name = "tstate" key = "terraform_layer1.tfstate" } @@ -18,15 +23,15 @@ remote_state { # If you are deploying using pipelines, these can be overridden from environment variables # using TF_VAR_variablename inputs = { - prefix = "ads" # All azure resources will be prefixed with this - domain = "microsoft.com" # Used when configuring AAD config for Azure functions - tenant_id = "72f988bf-86f1-41af-91ab-2d7cd011db47" # This is the Azure AD tenant ID - subscription_id = "035a1364-f00d-48e2-b582-4fe125905ee3" # The azure subscription id to deploy to - resource_location = "Australia East" # The location of the resources - resource_group_name = "gft2" # The resource group all resources will be deployed to - owner_tag = "Contoso" # Owner tag value for Azure resources - environment_tag = "stg" # This is used on Azure tags as well as all resource names - ip_address = "144.138.148.220" # This is the ip address of the agent/current IP. Used to create firewall exemptions. + prefix = "${local.common_vars.prefix}" # All azure resources will be prefixed with this + domain = "${local.common_vars.domain}" # Used when configuring AAD config for Azure functions + tenant_id = "${local.common_vars.tenant_id}" # This is the Azure AD tenant ID + subscription_id = "${local.common_vars.subscription_id}" # The azure subscription id to deploy to + resource_location = "${local.common_vars.resource_location}" # The location of the resources + resource_group_name = "${local.common_vars.resource_group_name}" # The resource group all resources will be deployed to + owner_tag = "${local.common_vars.owner_tag}" # Owner tag value for Azure resources + environment_tag = "${local.common_vars.environment_tag}" # This is used on Azure tags as well as all resource names + ip_address = "${local.common_vars.ip_address}" # This is the ip address of the agent/current IP. Used to create firewall exemptions. deploy_web_app = true deploy_function_app = true } diff --git a/solution/DeploymentV2/terraform_layer2/vars.tf b/solution/DeploymentV2/terraform_layer2/vars.tf index 0124ef49..3f523aae 100644 --- a/solution/DeploymentV2/terraform_layer2/vars.tf +++ b/solution/DeploymentV2/terraform_layer2/vars.tf @@ -839,4 +839,4 @@ variable "deployment_principal_layers1and3" { description = "Object Id of the azure account that will deploy layers 1 & 3. If it is the same as the layer 2 user then leave as empty string." default = "" type = string -} \ No newline at end of file +} diff --git a/solution/DeploymentV2/terraform_layer2/vars/local/terragrunt.hcl b/solution/DeploymentV2/terraform_layer2/vars/local/terragrunt.hcl index 2e42becb..27c98df8 100644 --- a/solution/DeploymentV2/terraform_layer2/vars/local/terragrunt.hcl +++ b/solution/DeploymentV2/terraform_layer2/vars/local/terragrunt.hcl @@ -1,20 +1,63 @@ +locals { + common_vars = yamldecode(file("../../../environments/vars/local/common_vars.yaml")) +} + + +generate "layer1.tf" { + path = "layer1.tf" + if_exists = "overwrite_terragrunt" + contents = < Date: Sun, 24 Jul 2022 19:25:23 +0800 Subject: [PATCH 042/151] Separating CICD Agent and Admin User --- solution/DeploymentV2/Deploy.ps1 | 59 +++---------------- solution/DeploymentV2/Deploy_0_Prep.ps1 | 11 +--- .../DeploymentV2/Deploy_11_AdAppRoles.ps1 | 43 ++++++++++++++ solution/DeploymentV2/Deploy_1_Infra0.ps1 | 4 +- .../DeploymentV2/Deploy_2_Infra0_Outputs.ps1 | 2 +- solution/DeploymentV2/Deploy_3_Infra1.ps1 | 4 +- solution/DeploymentV2/Deploy_5_WebApp.ps1 | 37 +----------- solution/DeploymentV2/Deploy_7_MetadataDB.ps1 | 13 +++- .../{ => environments}/Sample.env | 0 .../vars/staging/LoadCommonVars.ps1 | 14 +++++ .../vars/staging/common_vars.json | 23 ++++++++ .../vars/staging/common_vars.yaml | 22 +++---- .../terraform_layer2/storage_adls.tf | 8 +++ .../terraform_layer2/storage_blob.tf | 7 +++ .../DeploymentV2/terraform_layer2/synapse.tf | 20 +++++++ .../DeploymentV2/terraform_layer2/vars.tf | 5 ++ 16 files changed, 161 insertions(+), 111 deletions(-) create mode 100644 solution/DeploymentV2/Deploy_11_AdAppRoles.ps1 rename solution/DeploymentV2/{ => environments}/Sample.env (100%) create mode 100644 solution/DeploymentV2/environments/vars/staging/LoadCommonVars.ps1 create mode 100644 solution/DeploymentV2/environments/vars/staging/common_vars.json diff --git a/solution/DeploymentV2/Deploy.ps1 b/solution/DeploymentV2/Deploy.ps1 index 6343654d..07ea7eae 100644 --- a/solution/DeploymentV2/Deploy.ps1 +++ b/solution/DeploymentV2/Deploy.ps1 @@ -21,34 +21,6 @@ # You can run this script multiple times if needed. #---------------------------------------------------------------------------------------------------------------- -if ($null -eq $Env:GITHUB_ENV) - { - [Environment]::SetEnvironmentVariable("GITHUB_ENV",".\bin\GitEnv.txt") - $FileNameOnly = Split-Path $env:GITHUB_ENV -leaf - $PathOnly = Split-Path $env:GITHUB_ENV - if ((Test-Path $env:GITHUB_ENV)) - { - # Remove-Item -Path $env:GITHUB_ENV - } - else - { - - New-Item -Path $PathOnly -Name $FileNameOnly -type "file" -value "" - } - -} - -function PersistEnvVariable($Name, $Value) -{ - Write-Debug "Writing $Name to env file" - echo "$Name=$Value" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append - #Also Push Variables to the Session Env Variables for local testing - [Environment]::SetEnvironmentVariable($Name, "$Value") - -} - - - #------------------------------------------------------------------------------------------------------------ # Preparation #Mandatory #------------------------------------------------------------------------------------------------------------ @@ -59,21 +31,6 @@ $skipTerraformDeployment = ([System.Environment]::GetEnvironmentVariable('skipTe Invoke-Expression ./Deploy_0_Prep.ps1 -$environmentName = [System.Environment]::GetEnvironmentVariable('environmentName') -$myIp = (Invoke-WebRequest ifconfig.me/ip).Content -$env:TF_VAR_ip_address = $myIp -$AddSpecificUserAsWebAppAdmin = $env:AdsGf_AddSpecificUserAsWebAppAdmin - - -#PersistEnvVariable $Name = "deploymentFolderPath" $Value = (Get-Location).Path -#PersistEnvVariable $Name = "gitDeploy" $Value = ([System.Environment]::GetEnvironmentVariable('gitDeploy') -eq 'true') -#PersistEnvVariable $Name = "skipTerraformDeployment" $Value = ([System.Environment]::GetEnvironmentVariable('skipTerraformDeployment') -eq 'true') -#PersistEnvVariable $Name = "environmentName" $Value = [System.Environment]::GetEnvironmentVariable('environmentName') -#PersistEnvVariable $Name = "myIp" $Value = (Invoke-WebRequest ifconfig.me/ip).Content -#PersistEnvVariable $Name = "TF_VAR_ip_address" $Value = (Invoke-WebRequest ifconfig.me/ip).Content -#PersistEnvVariable $Name = "AddSpecificUserAsWebAppAdmin" $Value = [System.Environment]::GetEnvironmentVariable('AdsGf_AddSpecificUserAsWebAppAdmin') - - #------------------------------------------------------------------------------------------------------------ # Main Terraform #------------------------------------------------------------------------------------------------------------ @@ -89,7 +46,7 @@ Invoke-Expression ./Deploy_1_Infra0.ps1 #$init = terragrunt init --terragrunt-config vars/$environmentName/terragrunt.hcl -reconfigure Import-Module .\..\GatherOutputsFromTerraform.psm1 -force $tout = GatherOutputsFromTerraform - $outputs = terragrunt output -json --terragrunt-config ./vars/$environmentName/terragrunt.hcl | ConvertFrom-Json + $outputs = terragrunt output -json --terragrunt-config ./vars/$env:environmentName/terragrunt.hcl | ConvertFrom-Json $subscription_id =$outputs.subscription_id.value $resource_group_name =$outputs.resource_group_name.value $webapp_name =$outputs.webapp_name.value @@ -130,13 +87,13 @@ Invoke-Expression ./Deploy_1_Infra0.ps1 # Run Each SubModule #------------------------------------------------------------------------------------------------------------ #Invoke-Expression ./Deploy_3_Infra1.ps1 -#Invoke-Expression ./Deploy_4_PrivateLinks.ps1 -#Invoke-Expression ./Deploy_5_WebApp.ps1 -#Invoke-Expression ./Deploy_6_FuncApp.ps1 -#Invoke-Expression ./Deploy_7_MetadataDB.ps1 -#Invoke-Expression ./Deploy_8_SQLLogins.ps1 -#Invoke-Expression ./Deploy_9_DataFactory.ps1 -#Invoke-Expression ./Deploy_10_SampleFiles.ps1 +Invoke-Expression ./Deploy_4_PrivateLinks.ps1 +Invoke-Expression ./Deploy_5_WebApp.ps1 +Invoke-Expression ./Deploy_6_FuncApp.ps1 +Invoke-Expression ./Deploy_7_MetadataDB.ps1 +Invoke-Expression ./Deploy_8_SQLLogins.ps1 +Invoke-Expression ./Deploy_9_DataFactory.ps1 +Invoke-Expression ./Deploy_10_SampleFiles.ps1 #---------------------------------------------------------------------------------------------------------------- # Set up Purview diff --git a/solution/DeploymentV2/Deploy_0_Prep.ps1 b/solution/DeploymentV2/Deploy_0_Prep.ps1 index b32a3d76..2001c8fa 100644 --- a/solution/DeploymentV2/Deploy_0_Prep.ps1 +++ b/solution/DeploymentV2/Deploy_0_Prep.ps1 @@ -26,14 +26,7 @@ if ($gitDeploy) { $resourceGroupName = [System.Environment]::GetEnvironmentVariable('ARM_RESOURCE_GROUP_NAME') $synapseWorkspaceName = [System.Environment]::GetEnvironmentVariable('ARM_RESOURCE_SYNAPSE_WORKSPACE_NAME') - $specificuser = [System.Environment]::GetEnvironmentVariable('specificUserIdForWebAppAdmin') - if ($specificuser -ne "") { - $AddSpecificUserAsWebAppAdmin = $true - } else { - $AddSpecificUserAsWebAppAdmin = $false - } - $env:AdsGf_AddSpecificUserAsWebAppAdmin = $AddSpecificUserAsWebAppAdmin - + $env:TF_VAR_ip_address = (Invoke-WebRequest ifconfig.me/ip).Content } else { @@ -78,6 +71,8 @@ else $environmentName = Get-SelectionFromUser -Options ('local','staging', 'admz') -Prompt "Select deployment environment" [System.Environment]::SetEnvironmentVariable('environmentName', $environmentName) } + + $env:TF_VAR_ip_address2 = (Invoke-WebRequest ifconfig.me/ip).Content } diff --git a/solution/DeploymentV2/Deploy_11_AdAppRoles.ps1 b/solution/DeploymentV2/Deploy_11_AdAppRoles.ps1 new file mode 100644 index 00000000..751f0fcd --- /dev/null +++ b/solution/DeploymentV2/Deploy_11_AdAppRoles.ps1 @@ -0,0 +1,43 @@ + + + +#---------------------------------------------------------------------------------------------------------------- +# Web App Admin User +#---------------------------------------------------------------------------------------------------------------- + +#---------------------------------------------------------------------------------------------------------------- +if ($gitDeploy -or $null -eq (az ad signed-in-user show) ) +{ + if ($null -ne [System.Environment]::GetEnvironmentVariable('WEB_APP_ADMIN_USER') -and [System.Environment]::GetEnvironmentVariable('WEB_APP_ADMIN_USER') -ne "") { + write-host "Adding Admin Role To WebApp for specific user" + $authapp = (az ad app show --id $tout.aad_webreg_id) | ConvertFrom-Json + $authappid = $authapp.appId + $authappobjectid = (az ad sp show --id $authapp.appId | ConvertFrom-Json).id + $body = '{"principalId": "@principalid","resourceId":"@resourceId","appRoleId": "@appRoleId"}' | ConvertFrom-Json + $body.resourceId = $authappobjectid + $body.appRoleId = ($authapp.appRoles | Where-Object {$_.value -eq "Administrator" }).id + $body.principalId = [System.Environment]::GetEnvironmentVariable('WEB_APP_ADMIN_USER') + $body = ($body | ConvertTo-Json -compress | Out-String).Replace('"','\"') + + $result = az rest --method post --uri "https://graph.microsoft.com/v1.0/servicePrincipals/$authappobjectid/appRoleAssignedTo" --headers '{\"Content-Type\":\"application/json\"}' --body $body + } +} +else +{ + if ($AddCurrentUserAsWebAppAdmin) { + write-host "Adding Admin Role To WebApp" + $authapp = (az ad app show --id $tout.aad_webreg_id) | ConvertFrom-Json + $cu = az ad signed-in-user show | ConvertFrom-Json + $callinguser = $cu.id + $authappid = $authapp.appId + $authappobjectid = (az ad sp show --id $authapp.appId | ConvertFrom-Json).id + + $body = '{"principalId": "@principalid","resourceId":"@resourceId","appRoleId": "@appRoleId"}' | ConvertFrom-Json + $body.resourceId = $authappobjectid + $body.appRoleId = ($authapp.appRoles | Where-Object {$_.value -eq "Administrator" }).id + $body.principalId = $callinguser + $body = ($body | ConvertTo-Json -compress | Out-String).Replace('"','\"') + + $result = az rest --method post --uri "https://graph.microsoft.com/v1.0/servicePrincipals/$authappobjectid/appRoleAssignedTo" --headers '{\"Content-Type\":\"application/json\"}' --body $body + } +} \ No newline at end of file diff --git a/solution/DeploymentV2/Deploy_1_Infra0.ps1 b/solution/DeploymentV2/Deploy_1_Infra0.ps1 index b2d9efb6..96718604 100644 --- a/solution/DeploymentV2/Deploy_1_Infra0.ps1 +++ b/solution/DeploymentV2/Deploy_1_Infra0.ps1 @@ -9,14 +9,14 @@ #------------------------------------------------------------------------------------------------------------ Set-Location "./terraform" -terragrunt init --terragrunt-config vars/$environmentName/terragrunt.hcl -reconfigure +terragrunt init --terragrunt-config vars/$env:environmentName/terragrunt.hcl -reconfigure if ($skipTerraformDeployment) { Write-Host "Skipping Terraform Deployment" } else { Write-Host "Starting Terraform Deployment" - terragrunt apply -auto-approve --terragrunt-config vars/$environmentName/terragrunt.hcl + terragrunt apply -auto-approve --terragrunt-config vars/$env:environmentName/terragrunt.hcl } Set-Location $deploymentFolderPath diff --git a/solution/DeploymentV2/Deploy_2_Infra0_Outputs.ps1 b/solution/DeploymentV2/Deploy_2_Infra0_Outputs.ps1 index ba4c2921..ec850304 100644 --- a/solution/DeploymentV2/Deploy_2_Infra0_Outputs.ps1 +++ b/solution/DeploymentV2/Deploy_2_Infra0_Outputs.ps1 @@ -6,7 +6,7 @@ Write-Host "Reading Terraform Outputs" Import-Module .\..\GatherOutputsFromTerraform.psm1 -force $tout = GatherOutputsFromTerraform -$outputs = terragrunt output -json --terragrunt-config ./vars/$environmentName/terragrunt.hcl | ConvertFrom-Json +$outputs = terragrunt output -json --terragrunt-config ./vars/$env:environmentName/terragrunt.hcl | ConvertFrom-Json $subscription_id =$outputs.subscription_id.value $resource_group_name =$outputs.resource_group_name.value diff --git a/solution/DeploymentV2/Deploy_3_Infra1.ps1 b/solution/DeploymentV2/Deploy_3_Infra1.ps1 index 161eaf79..c97a3c4e 100644 --- a/solution/DeploymentV2/Deploy_3_Infra1.ps1 +++ b/solution/DeploymentV2/Deploy_3_Infra1.ps1 @@ -9,14 +9,14 @@ else { Set-Location $deploymentFolderPath Set-Location "./terraform_custom" - terragrunt init --terragrunt-config vars/$environmentName/terragrunt.hcl -reconfigure + terragrunt init --terragrunt-config vars/$env:environmentName/terragrunt.hcl -reconfigure if ($skipTerraformDeployment) { Write-Host "Skipping Custom Terraform Deployment" } else { Write-Host "Starting Custom Terraform Deployment" - terragrunt apply -auto-approve --terragrunt-config vars/$environmentName/terragrunt.hcl + terragrunt apply -auto-approve --terragrunt-config vars/$env:environmentName/terragrunt.hcl } } #------------------------------------------------------------------------------------------------------------ diff --git a/solution/DeploymentV2/Deploy_5_WebApp.ps1 b/solution/DeploymentV2/Deploy_5_WebApp.ps1 index 42905a29..2f87788c 100644 --- a/solution/DeploymentV2/Deploy_5_WebApp.ps1 +++ b/solution/DeploymentV2/Deploy_5_WebApp.ps1 @@ -19,42 +19,7 @@ else { $Path = $Path + "/Publish.zip" Compress-Archive -Path '.\unzipped\webapplication\*' -DestinationPath $Path -force - $result = az webapp deployment source config-zip --resource-group $resource_group_name --name $webapp_name --src $Path - if ($gitDeploy) - { - if ($AddSpecificUserAsWebAppAdmin) { - write-host "Adding Admin Role To WebApp for specific user" - $authapp = (az ad app show --id $tout.aad_webreg_id) | ConvertFrom-Json - $authappid = $authapp.appId - $authappobjectid = (az ad sp show --id $authapp.appId | ConvertFrom-Json).id - $body = '{"principalId": "@principalid","resourceId":"@resourceId","appRoleId": "@appRoleId"}' | ConvertFrom-Json - $body.resourceId = $authappobjectid - $body.appRoleId = ($authapp.appRoles | Where-Object {$_.value -eq "Administrator" }).id - $body.principalId = $specificuser - $body = ($body | ConvertTo-Json -compress | Out-String).Replace('"','\"') - - $result = az rest --method post --uri "https://graph.microsoft.com/v1.0/servicePrincipals/$authappobjectid/appRoleAssignedTo" --headers '{\"Content-Type\":\"application/json\"}' --body $body - } - } - else - { - if ($AddCurrentUserAsWebAppAdmin) { - write-host "Adding Admin Role To WebApp" - $authapp = (az ad app show --id $tout.aad_webreg_id) | ConvertFrom-Json - $cu = az ad signed-in-user show | ConvertFrom-Json - $callinguser = $cu.id - $authappid = $authapp.appId - $authappobjectid = (az ad sp show --id $authapp.appId | ConvertFrom-Json).id - - $body = '{"principalId": "@principalid","resourceId":"@resourceId","appRoleId": "@appRoleId"}' | ConvertFrom-Json - $body.resourceId = $authappobjectid - $body.appRoleId = ($authapp.appRoles | Where-Object {$_.value -eq "Administrator" }).id - $body.principalId = $callinguser - $body = ($body | ConvertTo-Json -compress | Out-String).Replace('"','\"') - - $result = az rest --method post --uri "https://graph.microsoft.com/v1.0/servicePrincipals/$authappobjectid/appRoleAssignedTo" --headers '{\"Content-Type\":\"application/json\"}' --body $body - } - } + $result = az webapp deployment source config-zip --resource-group $resource_group_name --name $webapp_name --src $Path Set-Location $deploymentFolderPath } \ No newline at end of file diff --git a/solution/DeploymentV2/Deploy_7_MetadataDB.ps1 b/solution/DeploymentV2/Deploy_7_MetadataDB.ps1 index 9a4ab04c..6701da32 100644 --- a/solution/DeploymentV2/Deploy_7_MetadataDB.ps1 +++ b/solution/DeploymentV2/Deploy_7_MetadataDB.ps1 @@ -15,7 +15,18 @@ else { #Add Ip to SQL Firewall $result = az sql server update -n $sqlserver_name -g $resource_group_name --set publicNetworkAccess="Enabled" - $result = az sql server firewall-rule create -g $resource_group_name -s $sqlserver_name -n "Deploy.ps1" --start-ip-address $myIp --end-ip-address $myIp + + $myIp = $env:TF_VAR_ip_address + $myIp2 = $env:TF_VAR_ip_address2 + + if($myIp -ne $null) + { + $result = az sql server firewall-rule create -g $resource_group_name -s $sqlserver_name -n "DeploymentAgent" --start-ip-address $myIp --end-ip-address $myIp + } + if($myIp2 -ne $null) + { + $result = az sql server firewall-rule create -g $resource_group_name -s $sqlserver_name -n "DeploymentUser" --start-ip-address $myIp2 --end-ip-address $myIp2 + } #Allow Azure services and resources to access this server $result = az sql server firewall-rule create -g $resource_group_name -s $sqlserver_name -n "Azure" --start-ip-address 0.0.0.0 --end-ip-address 0.0.0.0 diff --git a/solution/DeploymentV2/Sample.env b/solution/DeploymentV2/environments/Sample.env similarity index 100% rename from solution/DeploymentV2/Sample.env rename to solution/DeploymentV2/environments/Sample.env diff --git a/solution/DeploymentV2/environments/vars/staging/LoadCommonVars.ps1 b/solution/DeploymentV2/environments/vars/staging/LoadCommonVars.ps1 new file mode 100644 index 00000000..57ffd425 --- /dev/null +++ b/solution/DeploymentV2/environments/vars/staging/LoadCommonVars.ps1 @@ -0,0 +1,14 @@ + +#First Convert Terraform Commons to YAML +Install-Module powershell-yaml -Force +$obj = Get-Content ./common_vars.json | ConvertFrom-Json +$obj.Teraform | ConvertTo-YAML | Set-Content ./common_vars.yaml + +#Next Convert Environment to Environment Variables +$envars = $obj.Environment + +foreach($e in $envars) +{ + $Name = ($e | get-member)[-1].Name + [Environment]::SetEnvironmentVariable($Name, "$Value") +} \ No newline at end of file diff --git a/solution/DeploymentV2/environments/vars/staging/common_vars.json b/solution/DeploymentV2/environments/vars/staging/common_vars.json new file mode 100644 index 00000000..c4eef8c7 --- /dev/null +++ b/solution/DeploymentV2/environments/vars/staging/common_vars.json @@ -0,0 +1,23 @@ +{ + "Teraform": + { + "owner_tag": "Contoso", + "deployment_principal_layers1and3": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", + "resource_location": "AustraliaEast", + "environment_tag": "stg", + "domain": "microsoft.com", + "subscription_id": "035a1364-f00d-48e2-b582-4fe125905ee3", + "prefix": "ads", + "resource_group_name": "gft2", + "ip_address": "144.138.148.220", + "ip_address2": "144.138.148.220", + "tenant_id": "72f988bf-86f1-41af-91ab-2d7cd011db47" + }, + "Environment": + { + "WEB_APP_ADMIN_USER":"ccbdbba4-669c-48d6-86b8-75c9ab2ee578", + "ARM_SYNAPSE_WORKSPACE_NAME": "adsdevsynwadskhpv", //Required by deployment script (pwsh) to openfirewall and provide access.. note that this will not be known until first deployment + "ARM_KEYVAULT_NAME": "ads-dev-kv-ads-khpv", //Required by deployment script (pwsh) to openfirewall and provide access.. note that this will not be known until first deployment + "ARM_DATALAKE_NAME": "adsdevdlsadskhpvadsl" //Required by deployment script (pwsh) to openfirewall and provide access.. note that this will not be known until first deployment + } +} diff --git a/solution/DeploymentV2/environments/vars/staging/common_vars.yaml b/solution/DeploymentV2/environments/vars/staging/common_vars.yaml index bb9d67f9..e92a26d6 100644 --- a/solution/DeploymentV2/environments/vars/staging/common_vars.yaml +++ b/solution/DeploymentV2/environments/vars/staging/common_vars.yaml @@ -1,10 +1,12 @@ -resource_group_name: gft2 # Theresourcegroupallresourceswillbedeployedto -tenant_id: 72f988bf-86f1-41af-91ab-2d7cd011db47 # ThisistheAzureADtenantID -prefix: ads # Allazureresourceswillbeprefixedwiththis -domain: microsoft.com # UsedwhenconfiguringAADconfigforAzurefunctions -subscription_id: 035a1364-f00d-48e2-b582-4fe125905ee3 # Theazuresubscriptionidtodeployto -resource_location: AustraliaEast # Thelocationoftheresources -owner_tag: Contoso # OwnertagvalueforAzureresources -environment_tag: stg # ThisisusedonAzuretagsaswellasallresourcenames -ip_address: 144.138.148.220 # Thisistheipaddressoftheagent/currentIP.Usedtocreatefirewallexemptions. -deployment_principal_layers1and3: "ccbdbba4-669c-48d6-86b8-75c9ab2ee578" +owner_tag: Contoso +deployment_principal_layers1and3: ccbdbba4-669c-48d6-86b8-75c9ab2ee578 +resource_location: AustraliaEast +environment_tag: stg +domain: microsoft.com +subscription_id: 035a1364-f00d-48e2-b582-4fe125905ee3 +prefix: ads +resource_group_name: gft2 +ip_address: 144.138.148.220 +ip_address2: 144.138.148.220 +tenant_id: 72f988bf-86f1-41af-91ab-2d7cd011db47 + diff --git a/solution/DeploymentV2/terraform_layer2/storage_adls.tf b/solution/DeploymentV2/terraform_layer2/storage_adls.tf index d533516d..899fdb5e 100644 --- a/solution/DeploymentV2/terraform_layer2/storage_adls.tf +++ b/solution/DeploymentV2/terraform_layer2/storage_adls.tf @@ -23,6 +23,14 @@ resource "azurerm_storage_account" "adls" { } } + +resource "azurerm_role_assignment" "adls_deployment_agent" { + count = var.deploy_adls ? 1 : 0 + scope = azurerm_storage_account.adls[0].id + role_definition_name = "Storage Blob Data Contributor" + principal_id = data.azurerm_client_config.current.object_id +} + resource "azurerm_role_assignment" "adls_function_app" { count = var.deploy_adls && var.deploy_function_app ? 1 : 0 scope = azurerm_storage_account.adls[0].id diff --git a/solution/DeploymentV2/terraform_layer2/storage_blob.tf b/solution/DeploymentV2/terraform_layer2/storage_blob.tf index 18b444e5..d1f79477 100644 --- a/solution/DeploymentV2/terraform_layer2/storage_blob.tf +++ b/solution/DeploymentV2/terraform_layer2/storage_blob.tf @@ -25,6 +25,13 @@ resource "azurerm_storage_account" "blob" { } } +resource "azurerm_role_assignment" "blob_deployment_agent" { + count = var.deploy_storage_account ? 1 : 0 + scope = azurerm_storage_account.blob[0].id + role_definition_name = "Storage Blob Data Contributor" + principal_id = data.azurerm_client_config.current.object_id +} + resource "azurerm_role_assignment" "blob_function_app" { count = var.deploy_storage_account && var.deploy_function_app ? 1 : 0 scope = azurerm_storage_account.blob[0].id diff --git a/solution/DeploymentV2/terraform_layer2/synapse.tf b/solution/DeploymentV2/terraform_layer2/synapse.tf index b1446472..b2ff9f9f 100644 --- a/solution/DeploymentV2/terraform_layer2/synapse.tf +++ b/solution/DeploymentV2/terraform_layer2/synapse.tf @@ -128,6 +128,14 @@ resource "azurerm_synapse_firewall_rule" "cicd" { end_ip_address = var.ip_address } +resource "azurerm_synapse_firewall_rule" "cicd_user" { + count = var.deploy_adls && var.deploy_synapse ? 1 : 0 + name = "AllowCICDUser" + synapse_workspace_id = azurerm_synapse_workspace.synapse[0].id + start_ip_address = var.ip_address2 + end_ip_address = var.ip_address2 +} + # -------------------------------------------------------------------------------------------------------------------- # Synapse Workspace Firewall Rules (Allow Public Access) # -------------------------------------------------------------------------------------------------------------------- @@ -159,6 +167,18 @@ resource "azurerm_synapse_role_assignment" "synapse_function_app_assignment" { } +resource "azurerm_synapse_role_assignment" "synapse_adminuser_assignment" { + count = var.deploy_synapse ? 1 : 0 + synapse_workspace_id = azurerm_synapse_workspace.synapse[0].id + role_name = "Synapse Administrator" + principal_id = var.deployment_principal_layers1and3 + depends_on = [ + azurerm_synapse_firewall_rule.public_access, + time_sleep.azurerm_synapse_firewall_rule_wait_30_seconds_cicd + ] + +} + resource "azurerm_synapse_linked_service" "synapse_keyvault_linkedservice" { count = var.deploy_synapse ? 1 : 0 name = "SLS_AzureKeyVault" diff --git a/solution/DeploymentV2/terraform_layer2/vars.tf b/solution/DeploymentV2/terraform_layer2/vars.tf index 3f523aae..a05aa5fd 100644 --- a/solution/DeploymentV2/terraform_layer2/vars.tf +++ b/solution/DeploymentV2/terraform_layer2/vars.tf @@ -6,6 +6,11 @@ variable "ip_address" { type = string } +variable "ip_address2" { + description = "The CICD ipaddress. We add an IP whitelisting to allow the setting of keyvault secrets" + type = string +} + variable "tenant_id" { description = "The AAD tenant ID" type = string From 576a6725356d101c06db4894413234c3ac1a472d Mon Sep 17 00:00:00 2001 From: John Rampono Date: Tue, 26 Jul 2022 15:00:08 +0800 Subject: [PATCH 043/151] Added notion of Environments to simplify transition from console deployment to agent deployment --- .devcontainer/Dockerfile | 6 +- .github/workflows/continuous-delivery.yml | 12 +- .../Patterns/GatherOutputsFromTerraform.psm1 | 3 +- solution/DeploymentV2/Deploy.ps1 | 26 +- solution/DeploymentV2/Deploy_0_Prep.ps1 | 50 +-- solution/DeploymentV2/Deploy_1_Infra0.ps1 | 46 ++- solution/DeploymentV2/Prepare.ps1 | 202 ++-------- .../featuretemplates/basic_deployment.jsonc | 22 ++ .../featuretemplates/full_deployment.jsonc | 82 ++++ .../full_deployment_no_purview.jsonc | 82 ++++ .../featuretemplates/functional_tests.jsonc | 82 ++++ .../vars/PreprocessEnvironment.ps1 | 55 +++ .../vars/admz/common_vars_template.jsonnet | 230 ++++++++++++ .../vars/admz/common_vars_values.jsonc | 19 + .../vars/common_vars_template.jsonnet | 252 +++++++++++++ .../vars/local/common_vars_template.jsonnet | 230 ++++++++++++ .../vars/local/common_vars_values.jsonc | 19 + .../production/common_vars_template.jsonnet | 230 ++++++++++++ .../vars/production/common_vars_values.jsonc | 19 + .../vars/staging/GetSecretsTemplate.env | 27 ++ .../vars/staging/LoadCommonVars.ps1 | 14 - .../vars/staging/common_vars.json | 349 ++++++++++++++++-- .../vars/staging/common_vars.yaml | 16 +- .../vars/staging/common_vars_template.jsonnet | 246 ++++++++++++ .../vars/staging/common_vars_values.jsonc | 19 + .../terraform/vars/staging/terragrunt.hcl | 27 +- .../DeploymentV2/terraform_layer1/vars.tf | 1 - .../DeploymentV2/terraform_layer2/locals.tf | 1 - .../DeploymentV2/terraform_layer2/synapse.tf | 12 +- .../DeploymentV2/terraform_layer2/vars.tf | 7 + .../vars/local/terragrunt.hcl | 1 + .../vars/production/terragrunt.hcl | 1 + .../vars/staging/terragrunt.hcl | 1 + 33 files changed, 2089 insertions(+), 300 deletions(-) create mode 100644 solution/DeploymentV2/environments/featuretemplates/basic_deployment.jsonc create mode 100644 solution/DeploymentV2/environments/featuretemplates/full_deployment.jsonc create mode 100644 solution/DeploymentV2/environments/featuretemplates/full_deployment_no_purview.jsonc create mode 100644 solution/DeploymentV2/environments/featuretemplates/functional_tests.jsonc create mode 100644 solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 create mode 100644 solution/DeploymentV2/environments/vars/admz/common_vars_template.jsonnet create mode 100644 solution/DeploymentV2/environments/vars/admz/common_vars_values.jsonc create mode 100644 solution/DeploymentV2/environments/vars/common_vars_template.jsonnet create mode 100644 solution/DeploymentV2/environments/vars/local/common_vars_template.jsonnet create mode 100644 solution/DeploymentV2/environments/vars/local/common_vars_values.jsonc create mode 100644 solution/DeploymentV2/environments/vars/production/common_vars_template.jsonnet create mode 100644 solution/DeploymentV2/environments/vars/production/common_vars_values.jsonc create mode 100644 solution/DeploymentV2/environments/vars/staging/GetSecretsTemplate.env delete mode 100644 solution/DeploymentV2/environments/vars/staging/LoadCommonVars.ps1 create mode 100644 solution/DeploymentV2/environments/vars/staging/common_vars_template.jsonnet create mode 100644 solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index 56f4a4c5..0e6814a2 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -36,7 +36,11 @@ RUN bash /tmp/library-scripts/common-debian.sh "${INSTALL_ZSH}" "${USERNAME}" "$ && wget https://github.com/gruntwork-io/terragrunt/releases/download/v0.35.14/terragrunt_linux_amd64 \ && mv terragrunt_linux_amd64 terragrunt \ && chmod u+x terragrunt \ - && mv terragrunt /usr/local/bin/terragrunt + && mv terragrunt /usr/local/bin/terragrunt + && curl -sSLo install.sh https://install.hclq.sh + && sh install.sh + && rm install.sh + # [Optional] Uncomment this section to install additional OS packages. diff --git a/.github/workflows/continuous-delivery.yml b/.github/workflows/continuous-delivery.yml index 3935e404..b2c2644e 100644 --- a/.github/workflows/continuous-delivery.yml +++ b/.github/workflows/continuous-delivery.yml @@ -3,7 +3,7 @@ name: Continuous Delivery on: workflow_dispatch: push: - branches: feature-1.0.4 + branches: $default-branch jobs: deploy-to-env-one: @@ -14,9 +14,9 @@ jobs: environmentName: staging gitDeploy : true skipTerraformDeployment: false - specificUserIdForWebAppAdmin: ${{ secrets.WEB_APP_ADMIN_USER }} + WEB_APP_ADMIN_USER: ${{ secrets.WEB_APP_ADMIN_USER }} keyVaultName: ${{ secrets.ARM_KEYVAULT_NAME }} - synapseWorkspaceName: ${{ secrets.ARM_SYNAPSE_WORKSPACE_NAME }} + ARM_SYNAPSE_WORKSPACE_NAME: ${{ secrets.ARM_SYNAPSE_WORKSPACE_NAME }} datalakeName: ${{ secrets.ARM_DATALAKE_NAME }} # Required for Terraform ARM_CLIENT_ID: ${{ secrets.ARM_CLIENT_ID }} @@ -90,7 +90,7 @@ jobs: working-directory: ./solution/DeploymentV2/terraform run: | az keyvault network-rule add -g ${{ env.TF_VAR_resource_group_name }} --name ${{ env.keyVaultName }} --ip-address ${{ steps.ip.outputs.ipv4 }}/32 - az synapse workspace firewall-rule create --name AllowCICD --resource-group ${{ env.TF_VAR_resource_group_name }} --start-ip-address ${{ steps.ip.outputs.ipv4 }} --end-ip-address ${{ steps.ip.outputs.ipv4 }} --workspace-name ${{ env.synapseWorkspaceName }} + az synapse workspace firewall-rule create --name AllowCICD --resource-group ${{ env.TF_VAR_resource_group_name }} --start-ip-address ${{ steps.ip.outputs.ipv4 }} --end-ip-address ${{ steps.ip.outputs.ipv4 }} --workspace-name ${{ env.ARM_SYNAPSE_WORKSPACE_NAME }} az storage account network-rule add --resource-group ${{ env.TF_VAR_resource_group_name }} --account-name ${{ env.datalakeName }} --ip-address ${{ steps.ip.outputs.ipv4 }} - name: Install Jsonnet @@ -122,9 +122,9 @@ jobs: environmentName: production gitDeploy : true skipTerraformDeployment: false - specificUserIdForWebAppAdmin: ${{ secrets.WEB_APP_ADMIN_USER }} + WEB_APP_ADMIN_USER: ${{ secrets.WEB_APP_ADMIN_USER }} keyVaultName: ${{ secrets.ARM_KEYVAULT_NAME }} - synapseWorkspaceName: ${{ secrets.ARM_SYNAPSE_WORKSPACE_NAME }} + ARM_SYNAPSE_WORKSPACE_NAME: ${{ secrets.ARM_SYNAPSE_WORKSPACE_NAME }} datalakeName: ${{ secrets.ARM_DATALAKE_NAME }} # Required for Terraform ARM_CLIENT_ID: ${{ secrets.ARM_CLIENT_ID }} diff --git a/solution/DataFactory/Patterns/GatherOutputsFromTerraform.psm1 b/solution/DataFactory/Patterns/GatherOutputsFromTerraform.psm1 index 405318d3..08a5d2d2 100644 --- a/solution/DataFactory/Patterns/GatherOutputsFromTerraform.psm1 +++ b/solution/DataFactory/Patterns/GatherOutputsFromTerraform.psm1 @@ -1,7 +1,6 @@ function GatherOutputsFromTerraform() { - $environmentName = $env:TFenvironmentName - $environmentName = "local" # currently supports (local, staging) + $environmentName = $env:TFenvironmentName $myIp = (Invoke-WebRequest ifconfig.me/ip).Content $CurrentFolderPath = $PWD diff --git a/solution/DeploymentV2/Deploy.ps1 b/solution/DeploymentV2/Deploy.ps1 index 07ea7eae..61c11378 100644 --- a/solution/DeploymentV2/Deploy.ps1 +++ b/solution/DeploymentV2/Deploy.ps1 @@ -20,11 +20,17 @@ # # You can run this script multiple times if needed. #---------------------------------------------------------------------------------------------------------------- - +param ( + [Parameter(Mandatory=$false)] + [bool]$RunTerraformLayer1=$false, + [Parameter(Mandatory=$false)] + [bool]$RunTerraformLayer2=$false, + [Parameter(Mandatory=$false)] + [bool]$RunTerraformLayer3=$false +) #------------------------------------------------------------------------------------------------------------ # Preparation #Mandatory #------------------------------------------------------------------------------------------------------------ - $deploymentFolderPath = (Get-Location).Path $gitDeploy = ([System.Environment]::GetEnvironmentVariable('gitDeploy') -eq 'true') $skipTerraformDeployment = ([System.Environment]::GetEnvironmentVariable('skipTerraformDeployment') -eq 'true') @@ -34,7 +40,7 @@ Invoke-Expression ./Deploy_0_Prep.ps1 #------------------------------------------------------------------------------------------------------------ # Main Terraform #------------------------------------------------------------------------------------------------------------ -Invoke-Expression ./Deploy_1_Infra0.ps1 +Invoke-Expression ./Deploy_1_Infra0.ps1 -RunTerraformLayer1 $RunTerraformLayer1 -RunTerraformLayer2 $RunTerraformLayer2 -RunTerraformLayer3 $RunTerraformLayer3 #------------------------------------------------------------------------------------------------------------ @@ -87,13 +93,13 @@ Invoke-Expression ./Deploy_1_Infra0.ps1 # Run Each SubModule #------------------------------------------------------------------------------------------------------------ #Invoke-Expression ./Deploy_3_Infra1.ps1 -Invoke-Expression ./Deploy_4_PrivateLinks.ps1 -Invoke-Expression ./Deploy_5_WebApp.ps1 -Invoke-Expression ./Deploy_6_FuncApp.ps1 -Invoke-Expression ./Deploy_7_MetadataDB.ps1 -Invoke-Expression ./Deploy_8_SQLLogins.ps1 -Invoke-Expression ./Deploy_9_DataFactory.ps1 -Invoke-Expression ./Deploy_10_SampleFiles.ps1 +#Invoke-Expression ./Deploy_4_PrivateLinks.ps1 +#Invoke-Expression ./Deploy_5_WebApp.ps1 +#Invoke-Expression ./Deploy_6_FuncApp.ps1 +#Invoke-Expression ./Deploy_7_MetadataDB.ps1 +#Invoke-Expression ./Deploy_8_SQLLogins.ps1 +#Invoke-Expression ./Deploy_9_DataFactory.ps1 +#Invoke-Expression ./Deploy_10_SampleFiles.ps1 #---------------------------------------------------------------------------------------------------------------- # Set up Purview diff --git a/solution/DeploymentV2/Deploy_0_Prep.ps1 b/solution/DeploymentV2/Deploy_0_Prep.ps1 index 2001c8fa..5132741d 100644 --- a/solution/DeploymentV2/Deploy_0_Prep.ps1 +++ b/solution/DeploymentV2/Deploy_0_Prep.ps1 @@ -30,53 +30,25 @@ if ($gitDeploy) } else { - function Get-SelectionFromUser { - param ( - [Parameter(Mandatory=$true)] - [string[]]$Options, - [Parameter(Mandatory=$true)] - [string]$Prompt - ) - - [int]$Response = 0; - [bool]$ValidResponse = $false - - while (!($ValidResponse)) { - [int]$OptionNo = 0 - - Write-Host $Prompt -ForegroundColor DarkYellow - Write-Host "[0]: Quit" - - foreach ($Option in $Options) { - $OptionNo += 1 - Write-Host ("[$OptionNo]: {0}" -f $Option) - } - - if ([Int]::TryParse((Read-Host), [ref]$Response)) { - if ($Response -eq 0) { - return '' - } - elseif($Response -le $OptionNo) { - $ValidResponse = $true - } - } - } - - return $Options.Get($Response - 1) - } - + #Only Prompt if Environment Variable has not been set if ($null -eq [System.Environment]::GetEnvironmentVariable('environmentName')) - { - $environmentName = Get-SelectionFromUser -Options ('local','staging', 'admz') -Prompt "Select deployment environment" + { + $envlist = (Get-ChildItem -Directory -Path ./environments/vars | Select-Object -Property Name).Name + Import-Module ./pwshmodules/GetSelectionFromUser.psm1 -Force + $environmentName = Get-SelectionFromUser -Options ($envlist) -Prompt "Select deployment environment" [System.Environment]::SetEnvironmentVariable('environmentName', $environmentName) } $env:TF_VAR_ip_address2 = (Invoke-WebRequest ifconfig.me/ip).Content + + #Re-process Environment Config Files. + Set-Location ./environments/vars/ + ./PreprocessEnvironment.ps1 -Environment $environmentName + Set-Location $deploymentFolderPath + } - - $environmentName = [System.Environment]::GetEnvironmentVariable('environmentName') $skipTerraformDeployment = ([System.Environment]::GetEnvironmentVariable('skipTerraformDeployment') -eq 'true') diff --git a/solution/DeploymentV2/Deploy_1_Infra0.ps1 b/solution/DeploymentV2/Deploy_1_Infra0.ps1 index 96718604..7e62eb30 100644 --- a/solution/DeploymentV2/Deploy_1_Infra0.ps1 +++ b/solution/DeploymentV2/Deploy_1_Infra0.ps1 @@ -1,3 +1,12 @@ +param ( + [Parameter(Mandatory=$false)] + [System.Boolean]$RunTerraformLayer1=$false, + [Parameter(Mandatory=$false)] + [System.Boolean]$RunTerraformLayer2=$false, + [Parameter(Mandatory=$false)] + [System.Boolean]$RunTerraformLayer3=$false +) + #---------------------------------------------------------------------------------------------------------------- # Deploy Infrastructure #---------------------------------------------------------------------------------------------------------------- @@ -7,15 +16,44 @@ # - If the firewall is blocking you, add your IP as firewall rule / exemption to the appropriate resource # - If you havn't run prepare but want to run this script on its own, set the TF_VAR_jumphost_password and TF_VAR_domain env vars #------------------------------------------------------------------------------------------------------------ -Set-Location "./terraform" + +Set-Location "./terraform_layer1" terragrunt init --terragrunt-config vars/$env:environmentName/terragrunt.hcl -reconfigure -if ($skipTerraformDeployment) { - Write-Host "Skipping Terraform Deployment" +if ($skipTerraformDeployment -or $RunTerraformLayer1 -ne $true) { + Write-Host "Skipping Terraform Deployment - Layer 1" } else { - Write-Host "Starting Terraform Deployment" + Write-Host "Starting Terraform Deployment- Layer 1" + terragrunt apply -auto-approve --terragrunt-config vars/$env:environmentName/terragrunt.hcl +} + +Set-Location $deploymentFolderPath + +Set-Location "./terraform_layer2" + +terragrunt init --terragrunt-config vars/$env:environmentName/terragrunt.hcl -reconfigure + +if ($skipTerraformDeployment -or $RunTerraformLayer2 -ne $true) { + Write-Host "Skipping Terraform Deployment- Layer 2" +} +else { + Write-Host "Starting Terraform Deployment- Layer 2" + terragrunt apply -auto-approve --terragrunt-config vars/$env:environmentName/terragrunt.hcl +} + +Set-Location $deploymentFolderPath + +Set-Location "./terraform_layer3" + +terragrunt init --terragrunt-config vars/$env:environmentName/terragrunt.hcl -reconfigure + +if ($skipTerraformDeployment -or $RunTerraformLayer3 -ne $true) { + Write-Host "Skipping Terraform Deployment- Layer 3" +} +else { + Write-Host "Starting Terraform Deployment- Layer 3" terragrunt apply -auto-approve --terragrunt-config vars/$env:environmentName/terragrunt.hcl } diff --git a/solution/DeploymentV2/Prepare.ps1 b/solution/DeploymentV2/Prepare.ps1 index 02a7d4ab..6536b3e3 100644 --- a/solution/DeploymentV2/Prepare.ps1 +++ b/solution/DeploymentV2/Prepare.ps1 @@ -22,9 +22,12 @@ # Once this script has finished, you then run Deploy.ps1 to create your environment # ------------------------------------------------------------------------------------------------------------ + #by default $gitDeploy will not be true, only being set by the git environment - meaning if not using a runner it will default to a standard execution. $gitDeploy = ([System.Environment]::GetEnvironmentVariable('gitDeploy') -eq 'true') +$envlist = (Get-ChildItem -Directory -Path ./environments/vars | Select-Object -Property Name).Name + if ($gitDeploy) { Write-Host "Git Deployment" @@ -35,44 +38,9 @@ if ($gitDeploy) else { Write-Host "Standard Deployment" - function Get-SelectionFromUser { - param ( - [Parameter(Mandatory=$true)] - [string[]]$Options, - [Parameter(Mandatory=$true)] - [string]$Prompt - ) - - [int]$Response = 0; - [bool]$ValidResponse = $false - - while (!($ValidResponse)) { - [int]$OptionNo = 0 - - Write-Host $Prompt -ForegroundColor DarkYellow - Write-Host "[0]: Quit" - - foreach ($Option in $Options) { - $OptionNo += 1 - Write-Host ("[$OptionNo]: {0}" -f $Option) - } - - if ([Int]::TryParse((Read-Host), [ref]$Response)) { - if ($Response -eq 0) { - return '' - } - elseif($Response -le $OptionNo) { - $ValidResponse = $true - } - } - } - - return $Options.Get($Response - 1) - } - - $environmentName = Get-SelectionFromUser -Options ('local','staging', 'admz') -Prompt "Select deployment environment" + Import-Module ./pwshmodules/GetSelectionFromUser.psm1 -Force + $environmentName = Get-SelectionFromUser -Options ($envlist) -Prompt "Select deployment environment" [System.Environment]::SetEnvironmentVariable('environmentName', $environmentName) - } @@ -108,11 +76,11 @@ if ($gitDeploy) az storage container create --name tstate --account-name $stateStorageName --auth-mode login } else -{ +{ $environmentFile = "./EnvironmentTemplate_" + $environmentName + ".hcl" $environmentFileContents = Get-Content $environmentFile $env:TF_VAR_resource_group_name = Read-Host "Enter the name of the resource group to create (enter to skip)" - $env:TF_VAR_storage_account_name = Read-Host "Enter a unique name for the terraform state storage account (enter to skip)" + $env:TF_VAR_storage_account_name = $env:TF_VAR_resource_group_name+"state" $CONTAINER_NAME="tstate" # ------------------------------------------------------------------------------------------------------------ # Ensure that you have all of the require Azure resource providers enabled before you begin deploying the solution. @@ -240,7 +208,7 @@ else #------------------------------------------------------------------------------------------------------------ # Persist into relevant environment file #------------------------------------------------------------------------------------------------------------ - $PersistEnv = Get-SelectionFromUser -Options ('Yes','No') -Prompt "Do you want to automatically persist the configuration information into your environment file? WARNING this will overwrite your existing hcl file." + $PersistEnv = Get-SelectionFromUser -Options ('Yes','No') -Prompt "Do you want to automatically persist the configuration information into the files in your environment folder? WARNING this will overwrite your existing configurations." if ($PersistEnv -eq "Quit") { ## Changed so the prepare does not close if you do not wish to persist. @@ -250,20 +218,32 @@ else if ($PersistEnv -eq "Yes") { - $environmentFileTarget = "./terraform/vars/" + $environmentName.ToLower() + "/terragrunt.hcl" - - $environmentFileContents = $environmentFileContents.Replace("{prefix}","ads") - - $environmentFileContents = $environmentFileContents.Replace("{resource_group_name}","$env:TF_VAR_resource_group_name") - $environmentFileContents = $environmentFileContents.Replace("{storage_account_name}","$env:TF_VAR_storage_account_name") - $environmentFileContents = $environmentFileContents.Replace("{subscription_id}","$env:TF_VAR_subscription_id") - $environmentFileContents = $environmentFileContents.Replace("{tenant_id}","$env:TF_VAR_tenant_id") - $environmentFileContents = $environmentFileContents.Replace("{ip_address}","$env:TF_VAR_ip_address") - $environmentFileContents = $environmentFileContents.Replace("{domain}","$env:TF_VAR_domain") - $environmentFileContents = $environmentFileContents.Replace("{publish_sif_database}","true") + $common_vars_values = Get-Content ./environments/vars/$environmentName/common_vars_values.jsonc | ConvertFrom-Json -Depth 10 + $common_vars_values.resource_group_name = $env:TF_VAR_resource_group_name + $common_vars_values.domain = $env:TF_VAR_domain + $common_vars_values.subscription_id = $env:TF_VAR_subscription_id + $common_vars_values.ip_address2 = $env:TF_VAR_ip_address + $common_vars_values.tenant_id = $env:TF_VAR_tenant_id + $common_vars_values.WEB_APP_ADMIN_USER = (az ad signed-in-user show --query id -o tsv) + $common_vars_values.deployment_principal_layers1and3 = $common_vars_values.WEB_APP_ADMIN_USER + $common_vars_values.synapse_administrators = $common_vars_values.deployment_principal_layers1and3 + $foundUser = $false + foreach($u in $common_vars_values.synapse_administrators) + { + if ($u.(($u | Get-Member)[-1].Name) -eq ($common_vars_values.WEB_APP_ADMIN_USER)) + { + $foundUser = $true + break + } + } + if($foundUser -eq $false) + { + $common_vars_values.synapse_administrators | Add-Member -Name $common_vars_values.WEB_APP_ADMIN_USER -Value $common_vars_values.WEB_APP_ADMIN_USER -Type NoteProperty + } + $fts = (Get-ChildItem -Path ./environments/featuretemplates | Select-Object -Property Name).Name.replace(".jsonc","") #------------------------------------------------------------------------------------------------------------ # Templated Configurations #------------------------------------------------------------------------------------------------------------ @@ -271,126 +251,16 @@ else { Exit } - $templateName = Get-SelectionFromUser -Options ('Minimal-NoVNET,No Purview, No Synapse','Full-AllFeatures','FunctionalTests-NoVNET,No Purview, No Synapse, Includes SQL IAAS', 'Lockbox Light No Vnet - No FuncApp,WebApp,MetadataDB,Synapse,ADF Pipelines', 'Lockbox Light Including Vnet & Networking') -Prompt "Select deployment fast start template" + $templateName = Get-SelectionFromUser -Options ($fts) -Prompt "Select deployment fast start template" if ($templateName -eq "Quit") { Exit } - if ($templateName -eq "Full-AllFeatures") - { - $environmentFileContents = $environmentFileContents.Replace("{deploy_web_app}","true") - $environmentFileContents = $environmentFileContents.Replace("{deploy_function_app}","true") - $environmentFileContents = $environmentFileContents.Replace("{deploy_custom_terraform}","false") - $environmentFileContents = $environmentFileContents.Replace("{deploy_sentinel}","true") - $environmentFileContents = $environmentFileContents.Replace("{deploy_purview}","true") - $environmentFileContents = $environmentFileContents.Replace("{deploy_synapse}","true") - $environmentFileContents = $environmentFileContents.Replace("{deploy_metadata_database}","true") - $environmentFileContents = $environmentFileContents.Replace("{is_vnet_isolated}","true") - $environmentFileContents = $environmentFileContents.Replace("{publish_web_app}","true") - $environmentFileContents = $environmentFileContents.Replace("{publish_function_app}","true") - $environmentFileContents = $environmentFileContents.Replace("{publish_sample_files}","true") - $environmentFileContents = $environmentFileContents.Replace("{publish_metadata_database}","true") - $environmentFileContents = $environmentFileContents.Replace("{configure_networking}","true") - $environmentFileContents = $environmentFileContents.Replace("{publish_datafactory_pipelines}","true") - $environmentFileContents = $environmentFileContents.Replace("{publish_web_app_addcurrentuserasadmin}","true") - $environmentFileContents = $environmentFileContents.Replace("{deploy_selfhostedsql}","false") - $environmentFileContents = $environmentFileContents.Replace("{is_onprem_datafactory_ir_registered}","false") - $environmentFileContents = $environmentFileContents.Replace("{deploy_app_service_plan}","true") - $environmentFileContents = $environmentFileContents.Replace("{deploy_data_factory}","true") - } - - if ($templateName -eq "Minimal-NoVNET,No Purview, No Synapse") - { - $environmentFileContents = $environmentFileContents.Replace("{deploy_web_app}","true") - $environmentFileContents = $environmentFileContents.Replace("{deploy_function_app}","true") - $environmentFileContents = $environmentFileContents.Replace("{deploy_custom_terraform}","false") - $environmentFileContents = $environmentFileContents.Replace("{deploy_sentinel}","false") - $environmentFileContents = $environmentFileContents.Replace("{deploy_purview}","false") - $environmentFileContents = $environmentFileContents.Replace("{deploy_synapse}","false") - $environmentFileContents = $environmentFileContents.Replace("{deploy_metadata_database}","true") - $environmentFileContents = $environmentFileContents.Replace("{is_vnet_isolated}","false") - $environmentFileContents = $environmentFileContents.Replace("{publish_web_app}","true") - $environmentFileContents = $environmentFileContents.Replace("{publish_function_app}","true") - $environmentFileContents = $environmentFileContents.Replace("{publish_sample_files}","true") - $environmentFileContents = $environmentFileContents.Replace("{publish_metadata_database}","true") - $environmentFileContents = $environmentFileContents.Replace("{configure_networking}","false") - $environmentFileContents = $environmentFileContents.Replace("{publish_datafactory_pipelines}","true") - $environmentFileContents = $environmentFileContents.Replace("{publish_web_app_addcurrentuserasadmin}","true") - $environmentFileContents = $environmentFileContents.Replace("{deploy_selfhostedsql}","false") - $environmentFileContents = $environmentFileContents.Replace("{is_onprem_datafactory_ir_registered}","false") - $environmentFileContents = $environmentFileContents.Replace("{deploy_app_service_plan}","true") - $environmentFileContents = $environmentFileContents.Replace("{deploy_data_factory}","true") - } - - if ($templateName -eq "FunctionalTests-NoVNET,No Purview, No Synapse, Includes SQL IAAS") - { - $environmentFileContents = $environmentFileContents.Replace("{deploy_web_app}","true") - $environmentFileContents = $environmentFileContents.Replace("{deploy_function_app}","true") - $environmentFileContents = $environmentFileContents.Replace("{deploy_custom_terraform}","false") - $environmentFileContents = $environmentFileContents.Replace("{deploy_sentinel}","false") - $environmentFileContents = $environmentFileContents.Replace("{deploy_purview}","false") - $environmentFileContents = $environmentFileContents.Replace("{deploy_synapse}","false") - $environmentFileContents = $environmentFileContents.Replace("{deploy_metadata_database}","true") - $environmentFileContents = $environmentFileContents.Replace("{is_vnet_isolated}","false") - $environmentFileContents = $environmentFileContents.Replace("{publish_web_app}","true") - $environmentFileContents = $environmentFileContents.Replace("{publish_function_app}","true") - $environmentFileContents = $environmentFileContents.Replace("{publish_sample_files}","true") - $environmentFileContents = $environmentFileContents.Replace("{publish_metadata_database}","true") - $environmentFileContents = $environmentFileContents.Replace("{configure_networking}","false") - $environmentFileContents = $environmentFileContents.Replace("{publish_datafactory_pipelines}","true") - $environmentFileContents = $environmentFileContents.Replace("{publish_web_app_addcurrentuserasadmin}","true") - $environmentFileContents = $environmentFileContents.Replace("{deploy_selfhostedsql}","true") - $environmentFileContents = $environmentFileContents.Replace("{is_onprem_datafactory_ir_registered}","false") - $environmentFileContents = $environmentFileContents.Replace("{deploy_app_service_plan}","true") - $environmentFileContents = $environmentFileContents.Replace("{deploy_data_factory}","true") - } - - if ($templateName -eq "Lockbox Light No Vnet - No FuncApp,WebApp,MetadataDB,Synapse,ADF Pipelines") - { - $environmentFileContents = $environmentFileContents.Replace("{deploy_web_app}","false") - $environmentFileContents = $environmentFileContents.Replace("{deploy_function_app}","false") - $environmentFileContents = $environmentFileContents.Replace("{deploy_custom_terraform}","false") - $environmentFileContents = $environmentFileContents.Replace("{deploy_sentinel}","false") - $environmentFileContents = $environmentFileContents.Replace("{deploy_purview}","false") - $environmentFileContents = $environmentFileContents.Replace("{deploy_synapse}","false") - $environmentFileContents = $environmentFileContents.Replace("{deploy_metadata_database}","false") - $environmentFileContents = $environmentFileContents.Replace("{is_vnet_isolated}","false") - $environmentFileContents = $environmentFileContents.Replace("{publish_web_app}","false") - $environmentFileContents = $environmentFileContents.Replace("{publish_function_app}","false") - $environmentFileContents = $environmentFileContents.Replace("{publish_sample_files}","false") - $environmentFileContents = $environmentFileContents.Replace("{publish_metadata_database}","false") - $environmentFileContents = $environmentFileContents.Replace("{configure_networking}","false") - $environmentFileContents = $environmentFileContents.Replace("{publish_datafactory_pipelines}","false") - $environmentFileContents = $environmentFileContents.Replace("{publish_web_app_addcurrentuserasadmin}","false") - $environmentFileContents = $environmentFileContents.Replace("{deploy_selfhostedsql}","false") - $environmentFileContents = $environmentFileContents.Replace("{is_onprem_datafactory_ir_registered}","false") - $environmentFileContents = $environmentFileContents.Replace("{deploy_app_service_plan}","false") - $environmentFileContents = $environmentFileContents.Replace("{deploy_data_factory}","true") - } - - if ($templateName -eq "Lockbox Light Including Vnet & Networking") - { - $environmentFileContents = $environmentFileContents.Replace("{deploy_web_app}","false") - $environmentFileContents = $environmentFileContents.Replace("{deploy_function_app}","false") - $environmentFileContents = $environmentFileContents.Replace("{deploy_custom_terraform}","false") - $environmentFileContents = $environmentFileContents.Replace("{deploy_sentinel}","false") - $environmentFileContents = $environmentFileContents.Replace("{deploy_purview}","false") - $environmentFileContents = $environmentFileContents.Replace("{deploy_synapse}","false") - $environmentFileContents = $environmentFileContents.Replace("{deploy_metadata_database}","false") - $environmentFileContents = $environmentFileContents.Replace("{is_vnet_isolated}","true") - $environmentFileContents = $environmentFileContents.Replace("{publish_web_app}","false") - $environmentFileContents = $environmentFileContents.Replace("{publish_function_app}","false") - $environmentFileContents = $environmentFileContents.Replace("{publish_sample_files}","false") - $environmentFileContents = $environmentFileContents.Replace("{publish_metadata_database}","false") - $environmentFileContents = $environmentFileContents.Replace("{configure_networking}","true") - $environmentFileContents = $environmentFileContents.Replace("{publish_datafactory_pipelines}","false") - $environmentFileContents = $environmentFileContents.Replace("{publish_web_app_addcurrentuserasadmin}","false") - $environmentFileContents = $environmentFileContents.Replace("{deploy_selfhostedsql}","false") - $environmentFileContents = $environmentFileContents.Replace("{is_onprem_datafactory_ir_registered}","false") - $environmentFileContents = $environmentFileContents.Replace("{deploy_app_service_plan}","false") - $environmentFileContents = $environmentFileContents.Replace("{deploy_data_factory}","true") - } + Set-Location ./environments/vars/ + ./PreprocessEnvironment.ps1 -Environment $environmentName -FeatureTemplate $templateName + Set-Location $deploymentFolderPath + $environmentFileContents | Set-Content $environmentFileTarget diff --git a/solution/DeploymentV2/environments/featuretemplates/basic_deployment.jsonc b/solution/DeploymentV2/environments/featuretemplates/basic_deployment.jsonc new file mode 100644 index 00000000..eb52c8bb --- /dev/null +++ b/solution/DeploymentV2/environments/featuretemplates/basic_deployment.jsonc @@ -0,0 +1,22 @@ +{ +"deploy_web_app":true, +"deploy_function_app":true, +"deploy_custom_terraform":false, +"deploy_app_service_plan":true, +"deploy_data_factory":true, +"deploy_sentinel":true, +"deploy_purview":true, +"deploy_synapse":true, +"deploy_metadata_database":true, +"is_vnet_isolated":true, +"publish_web_app":true, +"publish_function_app":true, +"publish_sample_files":true, +"publish_metadata_database":true, +"configure_networking":true, +"publish_datafactory_pipelines":true, +"publish_web_app_addcurrentuserasadmin":true, +"deploy_selfhostedsql":false, +"is_onprem_datafactory_ir_registered":false, +"publish_sif_database":true +} \ No newline at end of file diff --git a/solution/DeploymentV2/environments/featuretemplates/full_deployment.jsonc b/solution/DeploymentV2/environments/featuretemplates/full_deployment.jsonc new file mode 100644 index 00000000..a10a63a9 --- /dev/null +++ b/solution/DeploymentV2/environments/featuretemplates/full_deployment.jsonc @@ -0,0 +1,82 @@ +[ + { + "Name": "deploy_web_app", + "Value": true + }, + { + "Name": "deploy_function_app", + "Value": true + }, + { + "Name": "deploy_custom_terraform", + "Value": false + }, + { + "Name": "deploy_app_service_plan", + "Value": true + }, + { + "Name": "deploy_data_factory", + "Value": true + }, + { + "Name": "deploy_sentinel", + "Value": true + }, + { + "Name": "deploy_purview", + "Value": true + }, + { + "Name": "deploy_synapse", + "Value": true + }, + { + "Name": "deploy_metadata_database", + "Value": true + }, + { + "Name": "is_vnet_isolated", + "Value": true + }, + { + "Name": "publish_web_app", + "Value": true + }, + { + "Name": "publish_function_app", + "Value": true + }, + { + "Name": "publish_sample_files", + "Value": true + }, + { + "Name": "publish_metadata_database", + "Value": true + }, + { + "Name": "configure_networking", + "Value": true + }, + { + "Name": "publish_datafactory_pipelines", + "Value": true + }, + { + "Name": "publish_web_app_addcurrentuserasadmin", + "Value": true + }, + { + "Name": "deploy_selfhostedsql", + "Value": false + }, + { + "Name": "is_onprem_datafactory_ir_registered", + "Value": false + }, + { + "Name": "publish_sif_database", + "Value": true + } +] \ No newline at end of file diff --git a/solution/DeploymentV2/environments/featuretemplates/full_deployment_no_purview.jsonc b/solution/DeploymentV2/environments/featuretemplates/full_deployment_no_purview.jsonc new file mode 100644 index 00000000..50990b21 --- /dev/null +++ b/solution/DeploymentV2/environments/featuretemplates/full_deployment_no_purview.jsonc @@ -0,0 +1,82 @@ +[ + { + "Name": "deploy_web_app", + "Value": true + }, + { + "Name": "deploy_function_app", + "Value": true + }, + { + "Name": "deploy_custom_terraform", + "Value": false + }, + { + "Name": "deploy_app_service_plan", + "Value": true + }, + { + "Name": "deploy_data_factory", + "Value": true + }, + { + "Name": "deploy_sentinel", + "Value": true + }, + { + "Name": "deploy_purview", + "Value": false + }, + { + "Name": "deploy_synapse", + "Value": true + }, + { + "Name": "deploy_metadata_database", + "Value": true + }, + { + "Name": "is_vnet_isolated", + "Value": true + }, + { + "Name": "publish_web_app", + "Value": true + }, + { + "Name": "publish_function_app", + "Value": true + }, + { + "Name": "publish_sample_files", + "Value": true + }, + { + "Name": "publish_metadata_database", + "Value": true + }, + { + "Name": "configure_networking", + "Value": true + }, + { + "Name": "publish_datafactory_pipelines", + "Value": true + }, + { + "Name": "publish_web_app_addcurrentuserasadmin", + "Value": true + }, + { + "Name": "deploy_selfhostedsql", + "Value": false + }, + { + "Name": "is_onprem_datafactory_ir_registered", + "Value": false + }, + { + "Name": "publish_sif_database", + "Value": true + } +] \ No newline at end of file diff --git a/solution/DeploymentV2/environments/featuretemplates/functional_tests.jsonc b/solution/DeploymentV2/environments/featuretemplates/functional_tests.jsonc new file mode 100644 index 00000000..a10a63a9 --- /dev/null +++ b/solution/DeploymentV2/environments/featuretemplates/functional_tests.jsonc @@ -0,0 +1,82 @@ +[ + { + "Name": "deploy_web_app", + "Value": true + }, + { + "Name": "deploy_function_app", + "Value": true + }, + { + "Name": "deploy_custom_terraform", + "Value": false + }, + { + "Name": "deploy_app_service_plan", + "Value": true + }, + { + "Name": "deploy_data_factory", + "Value": true + }, + { + "Name": "deploy_sentinel", + "Value": true + }, + { + "Name": "deploy_purview", + "Value": true + }, + { + "Name": "deploy_synapse", + "Value": true + }, + { + "Name": "deploy_metadata_database", + "Value": true + }, + { + "Name": "is_vnet_isolated", + "Value": true + }, + { + "Name": "publish_web_app", + "Value": true + }, + { + "Name": "publish_function_app", + "Value": true + }, + { + "Name": "publish_sample_files", + "Value": true + }, + { + "Name": "publish_metadata_database", + "Value": true + }, + { + "Name": "configure_networking", + "Value": true + }, + { + "Name": "publish_datafactory_pipelines", + "Value": true + }, + { + "Name": "publish_web_app_addcurrentuserasadmin", + "Value": true + }, + { + "Name": "deploy_selfhostedsql", + "Value": false + }, + { + "Name": "is_onprem_datafactory_ir_registered", + "Value": false + }, + { + "Name": "publish_sif_database", + "Value": true + } +] \ No newline at end of file diff --git a/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 b/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 new file mode 100644 index 00000000..6d71ea8a --- /dev/null +++ b/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 @@ -0,0 +1,55 @@ +<# + +This script processes common_vars_values.jsonc in the selected folder + +#> +param ( + [Parameter(Mandatory=$true)] + [string]$Environment="staging", + [Parameter(Mandatory=$true)] + [string]$FeatureTemplate="full_deployment" +) + +$Environment = $Environment.ToLower() + +#First Convert Terraform Commons to YAML +Install-Module powershell-yaml -Force +$GithubEnvTemplate = "" + +Write-Host "Preparing Environment: $Environment" + +(jsonnet "./common_vars_template.jsonnet" --tla-str featuretemplatename=$FeatureTemplate --tla-str environment=$Environment ) | Set-Content("./$Environment/common_vars.json") +$obj = Get-Content ./$Environment/common_vars.json | ConvertFrom-Json +$HCLYaml = @{} +foreach($t in $obj.Variables) +{ + $Value = $t.Value + if($t.EnvVarName -ne "") + { + $Name = $t.EnvVarName + if([string]::IsNullOrEmpty($Value) -eq $false -and $Value -ne '#####') + { + [Environment]::SetEnvironmentVariable($Name, "$Value") + } + } + + if($t.CICDSecretName -ne "") + { + $Name = $t.CICDSecretName + #Add to GitHubSecretFile + $GithubEnvTemplate = $GithubEnvTemplate + "$Name=$Value" + [System.Environment]::NewLine + } + + if($t.HCLName -ne "") + { + $Name = $t.HCLName + #Add to CommonVars.yaml + $HCLYAML.$Name = $Value + } + +} + +#Write the Terraform Element common_vars.yaml - this is then injected into the hcl file +$HCLYAML | ConvertTo-YAML | Set-Content ./$Environment/common_vars.yaml +#Write the Git Secrets to the Git Template .env +$GithubEnvTemplate|Set-Content ./$Environment/GetSecretsTemplate.env \ No newline at end of file diff --git a/solution/DeploymentV2/environments/vars/admz/common_vars_template.jsonnet b/solution/DeploymentV2/environments/vars/admz/common_vars_template.jsonnet new file mode 100644 index 00000000..2c5ee212 --- /dev/null +++ b/solution/DeploymentV2/environments/vars/admz/common_vars_template.jsonnet @@ -0,0 +1,230 @@ +function () +local locals = import './common_vars_values.jsonc'; + +{ + "Variables": [ + /*Attributes: + CICDSecretName: Name of the Secret that will hold the value in CICD. This mapps to the Env section of the CICD yaml", + EnvVarName: Name to be used when creating local environment Variable if this is blank no local environment variable will be created + HCLName: Name to be used when common_vars.yaml which is injected into HCL file. If this is blank it will not be included in HCL + */ + /* + Environment Only Vars + - these are not used in local console based deployments. Only when runnning in git hub + */ + { + "CICDSecretName": "WEB_APP_ADMIN_USER", + "EnvVarName": "WEB_APP_ADMIN_USER", + "HCLName": "", + "Value": locals.WEB_APP_ADMIN_USER + }, + { + "CICDSecretName": "ARM_SYNAPSE_WORKSPACE_NAME", + "EnvVarName": "ARM_SYNAPSE_WORKSPACE_NAME", + "HCLName": "", + "Value": locals.ARM_SYNAPSE_WORKSPACE_NAME + }, + { + "CICDSecretName": "ARM_KEYVAULT_NAME", + "EnvVarName": "keyVaultName", + "HCLName": "", + "Value": locals.ARM_KEYVAULT_NAME + }, + { + "CICDSecretName": "ARM_DATALAKE_NAME", + "EnvVarName": "datalakeName", + "HCLName": "", + "Value": locals.ARM_DATALAKE_NAME + }, + /* + Required for Automated CICD Deployment + */ + { + "CICDSecretName": "ARM_CLIENT_ID", + "EnvVarName": "ARM_CLIENT_ID", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "ARM_PAL_PARTNER_ID", + "EnvVarName": "ARM_PAL_PARTNER_ID", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "ARM_CLIENT_SECRET", + "EnvVarName": "ARM_CLIENT_SECRET", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "ARM_SUBSCRIPTION_ID", + "EnvVarName": "ARM_SUBSCRIPTION_ID", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "ARM_TENANT_ID", + "EnvVarName": "ARM_TENANT_ID", + "HCLName": "tenant_id", + "Value": locals.tenant_id + }, + + /* + HCL Common Vars & Terraform Customisations + */ + { + "CICDSecretName": "", + "EnvVarName": "", + "HCLName": "owner_tag", + "Value": locals.owner_tag + }, + { + "CICDSecretName": "", + "EnvVarName": "", + "HCLName": "deployment_principal_layers1and3", + "Value": locals.deployment_principal_layers1and3 + }, + { + "CICDSecretName": "", + "EnvVarName": "", + "HCLName": "resource_location", + "Value": locals.resource_location + }, + { + "CICDSecretName": "ENVIRONMENT_TAG", + "EnvVarName": "TF_VAR_environment_tag", + "HCLName": "environment_tag", + "Value": locals.environment_tag + }, + { + "CICDSecretName": "ARM_DOMAIN", + "EnvVarName": "TF_VAR_domain", + "HCLName": "domain", + "Value": locals.domain + }, + { + "CICDSecretName": "", + "EnvVarName": "", + "HCLName": "subscription_id", + "Value": locals.subscription_id + }, + { + "CICDSecretName": "", + "EnvVarName": "", + "HCLName": "prefix", + "Value": locals.prefix + }, + { + "CICDSecretName": "ARM_RESOURCE_GROUP_NAME", + "EnvVarName": "TF_VAR_resource_group_name", + "HCLName": "resource_group_name", + "Value": locals.resource_group_name + }, + { + "CICDSecretName": "", + "EnvVarName": "", + "HCLName": "ip_address", + "Value": locals.ip_address + }, + { + "CICDSecretName": "", + "EnvVarName": "", + "HCLName": "ip_address2", + "Value": locals.ip_address2 + }, + { + "CICDSecretName": "", + "EnvVarName": "", + "HCLName": "synapse_administrators", + "Value": locals.synapse_administrators + }, + { + "CICDSecretName": "ARM_STORAGE_NAME", + "EnvVarName": "TF_VAR_state_storage_account_name", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "ARM_SYNAPSE_PASSWORD", + "EnvVarName": "TF_VAR_synapse_sql_password", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "ARM_JUMPHOST_PASSWORD", + "EnvVarName": "TF_VAR_jumphost_password", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "WEB_APP_ADMIN_SECURITY_GROUP", + "EnvVarName": "TF_VAR_web_app_admin_security_group", + "HCLName": "", + "Value": "#####" + }, + /* + Git Integration Set-Up + */ + { + "CICDSecretName": "GIT_REPOSITORY_NAME", + "EnvVarName": "TF_VAR_adf_git_repository_name", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "GIT_SYNAPSE_REPOSITORY_BRANCH_NAME", + "EnvVarName": "TF_VAR_synapse_git_repository_branch_name", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "GIT_PAT", + "EnvVarName": "TF_VAR_synapse_git_pat", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "GIT_USER_NAME", + "EnvVarName": "TF_VAR_synapse_git_user_name", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "GIT_EMAIL_ADDRESS", + "EnvVarName": "TF_VAR_synapse_git_email_address", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "GIT_ADF_REPOSITORY_NAME", + "EnvVarName": "TF_VAR_adf_git_repository_name", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "GIT_ADF_REPOSITORY_BRANCH_NAME", + "EnvVarName": "TF_VAR_adf_git_repository_branch_name", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "GIT_ADF_PAT", + "EnvVarName": "TF_VAR_adf_git_pat", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "GIT_ADF_USER_NAME", + "EnvVarName": "TF_VAR_adf_git_user_name", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "GIT_ADF_EMAIL_ADDRESS", + "EnvVarName": "TF_VAR_adf_git_email_address", + "HCLName": "", + "Value": "#####" + } + ] +} \ No newline at end of file diff --git a/solution/DeploymentV2/environments/vars/admz/common_vars_values.jsonc b/solution/DeploymentV2/environments/vars/admz/common_vars_values.jsonc new file mode 100644 index 00000000..4ad56b08 --- /dev/null +++ b/solution/DeploymentV2/environments/vars/admz/common_vars_values.jsonc @@ -0,0 +1,19 @@ +{ + "owner_tag": "Contoso", + "deployment_principal_layers1and3": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", + "resource_location": "AustraliaEast", + "environment_tag": "stg", + "domain": "microsoft.com", + "subscription_id": "035a1364-f00d-48e2-b582-4fe125905ee3", + "prefix": "ads", + "resource_group_name": "gft2", + "ip_address": "144.138.148.220", + "ip_address2": "144.138.148.220", + "tenant_id": "72f988bf-86f1-41af-91ab-2d7cd011db47", + "synapse_administrators": + {"Jorampon":"ccbdbba4-669c-48d6-86b8-75c9ab2ee578"}, + "WEB_APP_ADMIN_USER":"ccbdbba4-669c-48d6-86b8-75c9ab2ee578", + "ARM_SYNAPSE_WORKSPACE_NAME": "adsdevsynwadskhpv", //Required by deployment script (pwsh) to openfirewall and provide access.. note that this will not be known until first deployment + "ARM_KEYVAULT_NAME": "ads-dev-kv-ads-khpv", //Required by deployment script (pwsh) to openfirewall and provide access.. note that this will not be known until first deployment + "ARM_DATALAKE_NAME": "adsdevdlsadskhpvadsl" //Required by deployment script (pwsh) to openfirewall and provide access.. note that this will not be known until first deployment +} \ No newline at end of file diff --git a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet new file mode 100644 index 00000000..1ea0bd5a --- /dev/null +++ b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet @@ -0,0 +1,252 @@ +function (featuretemplatename="full_deployment",environment="staging") +local locals = { + "staging": import './staging/common_vars_values.jsonc', + "production": './production/common_vars_values.jsonc', + "local": './local/common_vars_values.jsonc', + "admz": './admz/common_vars_values.jsonc', +}; + +local featuretemplates = { + "basic_deployment" : import './../featuretemplates/basic_deployment.jsonc', + "full_deployment" : import './../featuretemplates/full_deployment.jsonc', + "functional_tests" : import './../featuretemplates/full_deployment.jsonc', +}; + +local featuretemplate = [ // Object comprehension. + { + ["CICDSecretName"]: "", + ["EnvVarName"]: "TF_VAR_" + sd.Name, + ["HCLName"]: "", + ["Value"]: sd.Value, + } + for sd in featuretemplates[featuretemplatename] + ]; + + +{ + "Variables": [ + /*Attributes: + CICDSecretName: Name of the Secret that will hold the value in CICD. This mapps to the Env section of the CICD yaml", + EnvVarName: Name to be used when creating local environment Variable if this is blank no local environment variable will be created + HCLName: Name to be used when common_vars.yaml which is injected into HCL file. If this is blank it will not be included in HCL + */ + /* + Environment Only Vars + - these are not used in local console based deployments. Only when runnning in git hub + */ + { + "CICDSecretName": "WEB_APP_ADMIN_USER", + "EnvVarName": "WEB_APP_ADMIN_USER", + "HCLName": "", + "Value": locals[environment].WEB_APP_ADMIN_USER + }, + { + "CICDSecretName": "ARM_SYNAPSE_WORKSPACE_NAME", + "EnvVarName": "ARM_SYNAPSE_WORKSPACE_NAME", + "HCLName": "", + "Value": locals[environment].ARM_SYNAPSE_WORKSPACE_NAME + }, + { + "CICDSecretName": "ARM_KEYVAULT_NAME", + "EnvVarName": "keyVaultName", + "HCLName": "", + "Value": locals[environment].ARM_KEYVAULT_NAME + }, + { + "CICDSecretName": "ARM_DATALAKE_NAME", + "EnvVarName": "datalakeName", + "HCLName": "", + "Value": locals[environment].ARM_DATALAKE_NAME + }, + /* + Required for Automated CICD Deployment + */ + { + "CICDSecretName": "ARM_CLIENT_ID", + "EnvVarName": "ARM_CLIENT_ID", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "ARM_PAL_PARTNER_ID", + "EnvVarName": "ARM_PAL_PARTNER_ID", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "ARM_CLIENT_SECRET", + "EnvVarName": "ARM_CLIENT_SECRET", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "ARM_SUBSCRIPTION_ID", + "EnvVarName": "ARM_SUBSCRIPTION_ID", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "ARM_TENANT_ID", + "EnvVarName": "ARM_TENANT_ID", + "HCLName": "tenant_id", + "Value": locals[environment].tenant_id + }, + + /* + HCL Common Vars & Terraform Customisations + */ + { + "CICDSecretName": "", + "EnvVarName": "", + "HCLName": "owner_tag", + "Value": locals[environment].owner_tag + }, + { + "CICDSecretName": "", + "EnvVarName": "", + "HCLName": "deployment_principal_layers1and3", + "Value": locals[environment].deployment_principal_layers1and3 + }, + { + "CICDSecretName": "", + "EnvVarName": "", + "HCLName": "resource_location", + "Value": locals[environment].resource_location + }, + { + "CICDSecretName": "ENVIRONMENT_TAG", + "EnvVarName": "TF_VAR_environment_tag", + "HCLName": "environment_tag", + "Value": locals[environment].environment_tag + }, + { + "CICDSecretName": "ARM_DOMAIN", + "EnvVarName": "TF_VAR_domain", + "HCLName": "domain", + "Value": locals[environment].domain + }, + { + "CICDSecretName": "", + "EnvVarName": "", + "HCLName": "subscription_id", + "Value": locals[environment].subscription_id + }, + { + "CICDSecretName": "", + "EnvVarName": "", + "HCLName": "prefix", + "Value": locals[environment].prefix + }, + { + "CICDSecretName": "ARM_RESOURCE_GROUP_NAME", + "EnvVarName": "TF_VAR_resource_group_name", + "HCLName": "resource_group_name", + "Value": locals[environment].resource_group_name + }, + { + "CICDSecretName": "", + "EnvVarName": "", + "HCLName": "ip_address", + "Value": locals[environment].ip_address + }, + { + "CICDSecretName": "", + "EnvVarName": "", + "HCLName": "ip_address2", + "Value": locals[environment].ip_address2 + }, + { + "CICDSecretName": "", + "EnvVarName": "", + "HCLName": "synapse_administrators", + "Value": locals[environment].synapse_administrators + }, + { + "CICDSecretName": "ARM_STORAGE_NAME", + "EnvVarName": "TF_VAR_state_storage_account_name", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "ARM_SYNAPSE_PASSWORD", + "EnvVarName": "TF_VAR_synapse_sql_password", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "ARM_JUMPHOST_PASSWORD", + "EnvVarName": "TF_VAR_jumphost_password", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "WEB_APP_ADMIN_SECURITY_GROUP", + "EnvVarName": "TF_VAR_web_app_admin_security_group", + "HCLName": "", + "Value": "#####" + }, + /* + Git Integration Set-Up + */ + { + "CICDSecretName": "GIT_REPOSITORY_NAME", + "EnvVarName": "TF_VAR_adf_git_repository_name", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "GIT_SYNAPSE_REPOSITORY_BRANCH_NAME", + "EnvVarName": "TF_VAR_synapse_git_repository_branch_name", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "GIT_PAT", + "EnvVarName": "TF_VAR_synapse_git_pat", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "GIT_USER_NAME", + "EnvVarName": "TF_VAR_synapse_git_user_name", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "GIT_EMAIL_ADDRESS", + "EnvVarName": "TF_VAR_synapse_git_email_address", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "GIT_ADF_REPOSITORY_NAME", + "EnvVarName": "TF_VAR_adf_git_repository_name", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "GIT_ADF_REPOSITORY_BRANCH_NAME", + "EnvVarName": "TF_VAR_adf_git_repository_branch_name", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "GIT_ADF_PAT", + "EnvVarName": "TF_VAR_adf_git_pat", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "GIT_ADF_USER_NAME", + "EnvVarName": "TF_VAR_adf_git_user_name", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "GIT_ADF_EMAIL_ADDRESS", + "EnvVarName": "TF_VAR_adf_git_email_address", + "HCLName": "", + "Value": "#####" + } + ]+featuretemplate +} \ No newline at end of file diff --git a/solution/DeploymentV2/environments/vars/local/common_vars_template.jsonnet b/solution/DeploymentV2/environments/vars/local/common_vars_template.jsonnet new file mode 100644 index 00000000..2c5ee212 --- /dev/null +++ b/solution/DeploymentV2/environments/vars/local/common_vars_template.jsonnet @@ -0,0 +1,230 @@ +function () +local locals = import './common_vars_values.jsonc'; + +{ + "Variables": [ + /*Attributes: + CICDSecretName: Name of the Secret that will hold the value in CICD. This mapps to the Env section of the CICD yaml", + EnvVarName: Name to be used when creating local environment Variable if this is blank no local environment variable will be created + HCLName: Name to be used when common_vars.yaml which is injected into HCL file. If this is blank it will not be included in HCL + */ + /* + Environment Only Vars + - these are not used in local console based deployments. Only when runnning in git hub + */ + { + "CICDSecretName": "WEB_APP_ADMIN_USER", + "EnvVarName": "WEB_APP_ADMIN_USER", + "HCLName": "", + "Value": locals.WEB_APP_ADMIN_USER + }, + { + "CICDSecretName": "ARM_SYNAPSE_WORKSPACE_NAME", + "EnvVarName": "ARM_SYNAPSE_WORKSPACE_NAME", + "HCLName": "", + "Value": locals.ARM_SYNAPSE_WORKSPACE_NAME + }, + { + "CICDSecretName": "ARM_KEYVAULT_NAME", + "EnvVarName": "keyVaultName", + "HCLName": "", + "Value": locals.ARM_KEYVAULT_NAME + }, + { + "CICDSecretName": "ARM_DATALAKE_NAME", + "EnvVarName": "datalakeName", + "HCLName": "", + "Value": locals.ARM_DATALAKE_NAME + }, + /* + Required for Automated CICD Deployment + */ + { + "CICDSecretName": "ARM_CLIENT_ID", + "EnvVarName": "ARM_CLIENT_ID", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "ARM_PAL_PARTNER_ID", + "EnvVarName": "ARM_PAL_PARTNER_ID", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "ARM_CLIENT_SECRET", + "EnvVarName": "ARM_CLIENT_SECRET", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "ARM_SUBSCRIPTION_ID", + "EnvVarName": "ARM_SUBSCRIPTION_ID", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "ARM_TENANT_ID", + "EnvVarName": "ARM_TENANT_ID", + "HCLName": "tenant_id", + "Value": locals.tenant_id + }, + + /* + HCL Common Vars & Terraform Customisations + */ + { + "CICDSecretName": "", + "EnvVarName": "", + "HCLName": "owner_tag", + "Value": locals.owner_tag + }, + { + "CICDSecretName": "", + "EnvVarName": "", + "HCLName": "deployment_principal_layers1and3", + "Value": locals.deployment_principal_layers1and3 + }, + { + "CICDSecretName": "", + "EnvVarName": "", + "HCLName": "resource_location", + "Value": locals.resource_location + }, + { + "CICDSecretName": "ENVIRONMENT_TAG", + "EnvVarName": "TF_VAR_environment_tag", + "HCLName": "environment_tag", + "Value": locals.environment_tag + }, + { + "CICDSecretName": "ARM_DOMAIN", + "EnvVarName": "TF_VAR_domain", + "HCLName": "domain", + "Value": locals.domain + }, + { + "CICDSecretName": "", + "EnvVarName": "", + "HCLName": "subscription_id", + "Value": locals.subscription_id + }, + { + "CICDSecretName": "", + "EnvVarName": "", + "HCLName": "prefix", + "Value": locals.prefix + }, + { + "CICDSecretName": "ARM_RESOURCE_GROUP_NAME", + "EnvVarName": "TF_VAR_resource_group_name", + "HCLName": "resource_group_name", + "Value": locals.resource_group_name + }, + { + "CICDSecretName": "", + "EnvVarName": "", + "HCLName": "ip_address", + "Value": locals.ip_address + }, + { + "CICDSecretName": "", + "EnvVarName": "", + "HCLName": "ip_address2", + "Value": locals.ip_address2 + }, + { + "CICDSecretName": "", + "EnvVarName": "", + "HCLName": "synapse_administrators", + "Value": locals.synapse_administrators + }, + { + "CICDSecretName": "ARM_STORAGE_NAME", + "EnvVarName": "TF_VAR_state_storage_account_name", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "ARM_SYNAPSE_PASSWORD", + "EnvVarName": "TF_VAR_synapse_sql_password", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "ARM_JUMPHOST_PASSWORD", + "EnvVarName": "TF_VAR_jumphost_password", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "WEB_APP_ADMIN_SECURITY_GROUP", + "EnvVarName": "TF_VAR_web_app_admin_security_group", + "HCLName": "", + "Value": "#####" + }, + /* + Git Integration Set-Up + */ + { + "CICDSecretName": "GIT_REPOSITORY_NAME", + "EnvVarName": "TF_VAR_adf_git_repository_name", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "GIT_SYNAPSE_REPOSITORY_BRANCH_NAME", + "EnvVarName": "TF_VAR_synapse_git_repository_branch_name", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "GIT_PAT", + "EnvVarName": "TF_VAR_synapse_git_pat", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "GIT_USER_NAME", + "EnvVarName": "TF_VAR_synapse_git_user_name", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "GIT_EMAIL_ADDRESS", + "EnvVarName": "TF_VAR_synapse_git_email_address", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "GIT_ADF_REPOSITORY_NAME", + "EnvVarName": "TF_VAR_adf_git_repository_name", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "GIT_ADF_REPOSITORY_BRANCH_NAME", + "EnvVarName": "TF_VAR_adf_git_repository_branch_name", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "GIT_ADF_PAT", + "EnvVarName": "TF_VAR_adf_git_pat", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "GIT_ADF_USER_NAME", + "EnvVarName": "TF_VAR_adf_git_user_name", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "GIT_ADF_EMAIL_ADDRESS", + "EnvVarName": "TF_VAR_adf_git_email_address", + "HCLName": "", + "Value": "#####" + } + ] +} \ No newline at end of file diff --git a/solution/DeploymentV2/environments/vars/local/common_vars_values.jsonc b/solution/DeploymentV2/environments/vars/local/common_vars_values.jsonc new file mode 100644 index 00000000..4ad56b08 --- /dev/null +++ b/solution/DeploymentV2/environments/vars/local/common_vars_values.jsonc @@ -0,0 +1,19 @@ +{ + "owner_tag": "Contoso", + "deployment_principal_layers1and3": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", + "resource_location": "AustraliaEast", + "environment_tag": "stg", + "domain": "microsoft.com", + "subscription_id": "035a1364-f00d-48e2-b582-4fe125905ee3", + "prefix": "ads", + "resource_group_name": "gft2", + "ip_address": "144.138.148.220", + "ip_address2": "144.138.148.220", + "tenant_id": "72f988bf-86f1-41af-91ab-2d7cd011db47", + "synapse_administrators": + {"Jorampon":"ccbdbba4-669c-48d6-86b8-75c9ab2ee578"}, + "WEB_APP_ADMIN_USER":"ccbdbba4-669c-48d6-86b8-75c9ab2ee578", + "ARM_SYNAPSE_WORKSPACE_NAME": "adsdevsynwadskhpv", //Required by deployment script (pwsh) to openfirewall and provide access.. note that this will not be known until first deployment + "ARM_KEYVAULT_NAME": "ads-dev-kv-ads-khpv", //Required by deployment script (pwsh) to openfirewall and provide access.. note that this will not be known until first deployment + "ARM_DATALAKE_NAME": "adsdevdlsadskhpvadsl" //Required by deployment script (pwsh) to openfirewall and provide access.. note that this will not be known until first deployment +} \ No newline at end of file diff --git a/solution/DeploymentV2/environments/vars/production/common_vars_template.jsonnet b/solution/DeploymentV2/environments/vars/production/common_vars_template.jsonnet new file mode 100644 index 00000000..2c5ee212 --- /dev/null +++ b/solution/DeploymentV2/environments/vars/production/common_vars_template.jsonnet @@ -0,0 +1,230 @@ +function () +local locals = import './common_vars_values.jsonc'; + +{ + "Variables": [ + /*Attributes: + CICDSecretName: Name of the Secret that will hold the value in CICD. This mapps to the Env section of the CICD yaml", + EnvVarName: Name to be used when creating local environment Variable if this is blank no local environment variable will be created + HCLName: Name to be used when common_vars.yaml which is injected into HCL file. If this is blank it will not be included in HCL + */ + /* + Environment Only Vars + - these are not used in local console based deployments. Only when runnning in git hub + */ + { + "CICDSecretName": "WEB_APP_ADMIN_USER", + "EnvVarName": "WEB_APP_ADMIN_USER", + "HCLName": "", + "Value": locals.WEB_APP_ADMIN_USER + }, + { + "CICDSecretName": "ARM_SYNAPSE_WORKSPACE_NAME", + "EnvVarName": "ARM_SYNAPSE_WORKSPACE_NAME", + "HCLName": "", + "Value": locals.ARM_SYNAPSE_WORKSPACE_NAME + }, + { + "CICDSecretName": "ARM_KEYVAULT_NAME", + "EnvVarName": "keyVaultName", + "HCLName": "", + "Value": locals.ARM_KEYVAULT_NAME + }, + { + "CICDSecretName": "ARM_DATALAKE_NAME", + "EnvVarName": "datalakeName", + "HCLName": "", + "Value": locals.ARM_DATALAKE_NAME + }, + /* + Required for Automated CICD Deployment + */ + { + "CICDSecretName": "ARM_CLIENT_ID", + "EnvVarName": "ARM_CLIENT_ID", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "ARM_PAL_PARTNER_ID", + "EnvVarName": "ARM_PAL_PARTNER_ID", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "ARM_CLIENT_SECRET", + "EnvVarName": "ARM_CLIENT_SECRET", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "ARM_SUBSCRIPTION_ID", + "EnvVarName": "ARM_SUBSCRIPTION_ID", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "ARM_TENANT_ID", + "EnvVarName": "ARM_TENANT_ID", + "HCLName": "tenant_id", + "Value": locals.tenant_id + }, + + /* + HCL Common Vars & Terraform Customisations + */ + { + "CICDSecretName": "", + "EnvVarName": "", + "HCLName": "owner_tag", + "Value": locals.owner_tag + }, + { + "CICDSecretName": "", + "EnvVarName": "", + "HCLName": "deployment_principal_layers1and3", + "Value": locals.deployment_principal_layers1and3 + }, + { + "CICDSecretName": "", + "EnvVarName": "", + "HCLName": "resource_location", + "Value": locals.resource_location + }, + { + "CICDSecretName": "ENVIRONMENT_TAG", + "EnvVarName": "TF_VAR_environment_tag", + "HCLName": "environment_tag", + "Value": locals.environment_tag + }, + { + "CICDSecretName": "ARM_DOMAIN", + "EnvVarName": "TF_VAR_domain", + "HCLName": "domain", + "Value": locals.domain + }, + { + "CICDSecretName": "", + "EnvVarName": "", + "HCLName": "subscription_id", + "Value": locals.subscription_id + }, + { + "CICDSecretName": "", + "EnvVarName": "", + "HCLName": "prefix", + "Value": locals.prefix + }, + { + "CICDSecretName": "ARM_RESOURCE_GROUP_NAME", + "EnvVarName": "TF_VAR_resource_group_name", + "HCLName": "resource_group_name", + "Value": locals.resource_group_name + }, + { + "CICDSecretName": "", + "EnvVarName": "", + "HCLName": "ip_address", + "Value": locals.ip_address + }, + { + "CICDSecretName": "", + "EnvVarName": "", + "HCLName": "ip_address2", + "Value": locals.ip_address2 + }, + { + "CICDSecretName": "", + "EnvVarName": "", + "HCLName": "synapse_administrators", + "Value": locals.synapse_administrators + }, + { + "CICDSecretName": "ARM_STORAGE_NAME", + "EnvVarName": "TF_VAR_state_storage_account_name", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "ARM_SYNAPSE_PASSWORD", + "EnvVarName": "TF_VAR_synapse_sql_password", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "ARM_JUMPHOST_PASSWORD", + "EnvVarName": "TF_VAR_jumphost_password", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "WEB_APP_ADMIN_SECURITY_GROUP", + "EnvVarName": "TF_VAR_web_app_admin_security_group", + "HCLName": "", + "Value": "#####" + }, + /* + Git Integration Set-Up + */ + { + "CICDSecretName": "GIT_REPOSITORY_NAME", + "EnvVarName": "TF_VAR_adf_git_repository_name", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "GIT_SYNAPSE_REPOSITORY_BRANCH_NAME", + "EnvVarName": "TF_VAR_synapse_git_repository_branch_name", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "GIT_PAT", + "EnvVarName": "TF_VAR_synapse_git_pat", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "GIT_USER_NAME", + "EnvVarName": "TF_VAR_synapse_git_user_name", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "GIT_EMAIL_ADDRESS", + "EnvVarName": "TF_VAR_synapse_git_email_address", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "GIT_ADF_REPOSITORY_NAME", + "EnvVarName": "TF_VAR_adf_git_repository_name", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "GIT_ADF_REPOSITORY_BRANCH_NAME", + "EnvVarName": "TF_VAR_adf_git_repository_branch_name", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "GIT_ADF_PAT", + "EnvVarName": "TF_VAR_adf_git_pat", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "GIT_ADF_USER_NAME", + "EnvVarName": "TF_VAR_adf_git_user_name", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "GIT_ADF_EMAIL_ADDRESS", + "EnvVarName": "TF_VAR_adf_git_email_address", + "HCLName": "", + "Value": "#####" + } + ] +} \ No newline at end of file diff --git a/solution/DeploymentV2/environments/vars/production/common_vars_values.jsonc b/solution/DeploymentV2/environments/vars/production/common_vars_values.jsonc new file mode 100644 index 00000000..4ad56b08 --- /dev/null +++ b/solution/DeploymentV2/environments/vars/production/common_vars_values.jsonc @@ -0,0 +1,19 @@ +{ + "owner_tag": "Contoso", + "deployment_principal_layers1and3": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", + "resource_location": "AustraliaEast", + "environment_tag": "stg", + "domain": "microsoft.com", + "subscription_id": "035a1364-f00d-48e2-b582-4fe125905ee3", + "prefix": "ads", + "resource_group_name": "gft2", + "ip_address": "144.138.148.220", + "ip_address2": "144.138.148.220", + "tenant_id": "72f988bf-86f1-41af-91ab-2d7cd011db47", + "synapse_administrators": + {"Jorampon":"ccbdbba4-669c-48d6-86b8-75c9ab2ee578"}, + "WEB_APP_ADMIN_USER":"ccbdbba4-669c-48d6-86b8-75c9ab2ee578", + "ARM_SYNAPSE_WORKSPACE_NAME": "adsdevsynwadskhpv", //Required by deployment script (pwsh) to openfirewall and provide access.. note that this will not be known until first deployment + "ARM_KEYVAULT_NAME": "ads-dev-kv-ads-khpv", //Required by deployment script (pwsh) to openfirewall and provide access.. note that this will not be known until first deployment + "ARM_DATALAKE_NAME": "adsdevdlsadskhpvadsl" //Required by deployment script (pwsh) to openfirewall and provide access.. note that this will not be known until first deployment +} \ No newline at end of file diff --git a/solution/DeploymentV2/environments/vars/staging/GetSecretsTemplate.env b/solution/DeploymentV2/environments/vars/staging/GetSecretsTemplate.env new file mode 100644 index 00000000..10ca45c9 --- /dev/null +++ b/solution/DeploymentV2/environments/vars/staging/GetSecretsTemplate.env @@ -0,0 +1,27 @@ +WEB_APP_ADMIN_USER=ccbdbba4-669c-48d6-86b8-75c9ab2ee578 +ARM_SYNAPSE_WORKSPACE_NAME=adsdevsynwadskhpv +ARM_KEYVAULT_NAME=ads-dev-kv-ads-khpv +ARM_DATALAKE_NAME=adsdevdlsadskhpvadsl +ARM_CLIENT_ID=##### +ARM_PAL_PARTNER_ID=##### +ARM_CLIENT_SECRET=##### +ARM_SUBSCRIPTION_ID=##### +ARM_TENANT_ID=72f988bf-86f1-41af-91ab-2d7cd011db47 +ENVIRONMENT_TAG=stg +ARM_DOMAIN=microsoft.com +ARM_RESOURCE_GROUP_NAME=gft2 +ARM_STORAGE_NAME=##### +ARM_SYNAPSE_PASSWORD=##### +ARM_JUMPHOST_PASSWORD=##### +WEB_APP_ADMIN_SECURITY_GROUP=##### +GIT_REPOSITORY_NAME=##### +GIT_SYNAPSE_REPOSITORY_BRANCH_NAME=##### +GIT_PAT=##### +GIT_USER_NAME=##### +GIT_EMAIL_ADDRESS=##### +GIT_ADF_REPOSITORY_NAME=##### +GIT_ADF_REPOSITORY_BRANCH_NAME=##### +GIT_ADF_PAT=##### +GIT_ADF_USER_NAME=##### +GIT_ADF_EMAIL_ADDRESS=##### + diff --git a/solution/DeploymentV2/environments/vars/staging/LoadCommonVars.ps1 b/solution/DeploymentV2/environments/vars/staging/LoadCommonVars.ps1 deleted file mode 100644 index 57ffd425..00000000 --- a/solution/DeploymentV2/environments/vars/staging/LoadCommonVars.ps1 +++ /dev/null @@ -1,14 +0,0 @@ - -#First Convert Terraform Commons to YAML -Install-Module powershell-yaml -Force -$obj = Get-Content ./common_vars.json | ConvertFrom-Json -$obj.Teraform | ConvertTo-YAML | Set-Content ./common_vars.yaml - -#Next Convert Environment to Environment Variables -$envars = $obj.Environment - -foreach($e in $envars) -{ - $Name = ($e | get-member)[-1].Name - [Environment]::SetEnvironmentVariable($Name, "$Value") -} \ No newline at end of file diff --git a/solution/DeploymentV2/environments/vars/staging/common_vars.json b/solution/DeploymentV2/environments/vars/staging/common_vars.json index c4eef8c7..ba775126 100644 --- a/solution/DeploymentV2/environments/vars/staging/common_vars.json +++ b/solution/DeploymentV2/environments/vars/staging/common_vars.json @@ -1,23 +1,330 @@ { - "Teraform": - { - "owner_tag": "Contoso", - "deployment_principal_layers1and3": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", - "resource_location": "AustraliaEast", - "environment_tag": "stg", - "domain": "microsoft.com", - "subscription_id": "035a1364-f00d-48e2-b582-4fe125905ee3", - "prefix": "ads", - "resource_group_name": "gft2", - "ip_address": "144.138.148.220", - "ip_address2": "144.138.148.220", - "tenant_id": "72f988bf-86f1-41af-91ab-2d7cd011db47" - }, - "Environment": - { - "WEB_APP_ADMIN_USER":"ccbdbba4-669c-48d6-86b8-75c9ab2ee578", - "ARM_SYNAPSE_WORKSPACE_NAME": "adsdevsynwadskhpv", //Required by deployment script (pwsh) to openfirewall and provide access.. note that this will not be known until first deployment - "ARM_KEYVAULT_NAME": "ads-dev-kv-ads-khpv", //Required by deployment script (pwsh) to openfirewall and provide access.. note that this will not be known until first deployment - "ARM_DATALAKE_NAME": "adsdevdlsadskhpvadsl" //Required by deployment script (pwsh) to openfirewall and provide access.. note that this will not be known until first deployment - } + "Variables": [ + { + "CICDSecretName": "WEB_APP_ADMIN_USER", + "EnvVarName": "WEB_APP_ADMIN_USER", + "HCLName": "", + "Value": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578" + }, + { + "CICDSecretName": "ARM_SYNAPSE_WORKSPACE_NAME", + "EnvVarName": "ARM_SYNAPSE_WORKSPACE_NAME", + "HCLName": "", + "Value": "adsdevsynwadskhpv" + }, + { + "CICDSecretName": "ARM_KEYVAULT_NAME", + "EnvVarName": "keyVaultName", + "HCLName": "", + "Value": "ads-dev-kv-ads-khpv" + }, + { + "CICDSecretName": "ARM_DATALAKE_NAME", + "EnvVarName": "datalakeName", + "HCLName": "", + "Value": "adsdevdlsadskhpvadsl" + }, + { + "CICDSecretName": "ARM_CLIENT_ID", + "EnvVarName": "ARM_CLIENT_ID", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "ARM_PAL_PARTNER_ID", + "EnvVarName": "ARM_PAL_PARTNER_ID", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "ARM_CLIENT_SECRET", + "EnvVarName": "ARM_CLIENT_SECRET", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "ARM_SUBSCRIPTION_ID", + "EnvVarName": "ARM_SUBSCRIPTION_ID", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "ARM_TENANT_ID", + "EnvVarName": "ARM_TENANT_ID", + "HCLName": "tenant_id", + "Value": "72f988bf-86f1-41af-91ab-2d7cd011db47" + }, + { + "CICDSecretName": "", + "EnvVarName": "", + "HCLName": "owner_tag", + "Value": "Contoso" + }, + { + "CICDSecretName": "", + "EnvVarName": "", + "HCLName": "deployment_principal_layers1and3", + "Value": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578" + }, + { + "CICDSecretName": "", + "EnvVarName": "", + "HCLName": "resource_location", + "Value": "AustraliaEast" + }, + { + "CICDSecretName": "ENVIRONMENT_TAG", + "EnvVarName": "TF_VAR_environment_tag", + "HCLName": "environment_tag", + "Value": "stg" + }, + { + "CICDSecretName": "ARM_DOMAIN", + "EnvVarName": "TF_VAR_domain", + "HCLName": "domain", + "Value": "microsoft.com" + }, + { + "CICDSecretName": "", + "EnvVarName": "", + "HCLName": "subscription_id", + "Value": "035a1364-f00d-48e2-b582-4fe125905ee3" + }, + { + "CICDSecretName": "", + "EnvVarName": "", + "HCLName": "prefix", + "Value": "ads" + }, + { + "CICDSecretName": "ARM_RESOURCE_GROUP_NAME", + "EnvVarName": "TF_VAR_resource_group_name", + "HCLName": "resource_group_name", + "Value": "gft2" + }, + { + "CICDSecretName": "", + "EnvVarName": "", + "HCLName": "ip_address", + "Value": "144.138.148.220" + }, + { + "CICDSecretName": "", + "EnvVarName": "", + "HCLName": "ip_address2", + "Value": "144.138.148.220" + }, + { + "CICDSecretName": "", + "EnvVarName": "", + "HCLName": "synapse_administrators", + "Value": { + "Jorampon": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578" + } + }, + { + "CICDSecretName": "ARM_STORAGE_NAME", + "EnvVarName": "TF_VAR_state_storage_account_name", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "ARM_SYNAPSE_PASSWORD", + "EnvVarName": "TF_VAR_synapse_sql_password", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "ARM_JUMPHOST_PASSWORD", + "EnvVarName": "TF_VAR_jumphost_password", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "WEB_APP_ADMIN_SECURITY_GROUP", + "EnvVarName": "TF_VAR_web_app_admin_security_group", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "GIT_REPOSITORY_NAME", + "EnvVarName": "TF_VAR_adf_git_repository_name", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "GIT_SYNAPSE_REPOSITORY_BRANCH_NAME", + "EnvVarName": "TF_VAR_synapse_git_repository_branch_name", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "GIT_PAT", + "EnvVarName": "TF_VAR_synapse_git_pat", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "GIT_USER_NAME", + "EnvVarName": "TF_VAR_synapse_git_user_name", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "GIT_EMAIL_ADDRESS", + "EnvVarName": "TF_VAR_synapse_git_email_address", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "GIT_ADF_REPOSITORY_NAME", + "EnvVarName": "TF_VAR_adf_git_repository_name", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "GIT_ADF_REPOSITORY_BRANCH_NAME", + "EnvVarName": "TF_VAR_adf_git_repository_branch_name", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "GIT_ADF_PAT", + "EnvVarName": "TF_VAR_adf_git_pat", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "GIT_ADF_USER_NAME", + "EnvVarName": "TF_VAR_adf_git_user_name", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "GIT_ADF_EMAIL_ADDRESS", + "EnvVarName": "TF_VAR_adf_git_email_address", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "", + "EnvVarName": "TF_VAR_deploy_web_app", + "HCLName": "", + "Value": true + }, + { + "CICDSecretName": "", + "EnvVarName": "TF_VAR_deploy_function_app", + "HCLName": "", + "Value": true + }, + { + "CICDSecretName": "", + "EnvVarName": "TF_VAR_deploy_custom_terraform", + "HCLName": "", + "Value": false + }, + { + "CICDSecretName": "", + "EnvVarName": "TF_VAR_deploy_app_service_plan", + "HCLName": "", + "Value": true + }, + { + "CICDSecretName": "", + "EnvVarName": "TF_VAR_deploy_data_factory", + "HCLName": "", + "Value": true + }, + { + "CICDSecretName": "", + "EnvVarName": "TF_VAR_deploy_sentinel", + "HCLName": "", + "Value": true + }, + { + "CICDSecretName": "", + "EnvVarName": "TF_VAR_deploy_purview", + "HCLName": "", + "Value": true + }, + { + "CICDSecretName": "", + "EnvVarName": "TF_VAR_deploy_synapse", + "HCLName": "", + "Value": true + }, + { + "CICDSecretName": "", + "EnvVarName": "TF_VAR_deploy_metadata_database", + "HCLName": "", + "Value": true + }, + { + "CICDSecretName": "", + "EnvVarName": "TF_VAR_is_vnet_isolated", + "HCLName": "", + "Value": true + }, + { + "CICDSecretName": "", + "EnvVarName": "TF_VAR_publish_web_app", + "HCLName": "", + "Value": true + }, + { + "CICDSecretName": "", + "EnvVarName": "TF_VAR_publish_function_app", + "HCLName": "", + "Value": true + }, + { + "CICDSecretName": "", + "EnvVarName": "TF_VAR_publish_sample_files", + "HCLName": "", + "Value": true + }, + { + "CICDSecretName": "", + "EnvVarName": "TF_VAR_publish_metadata_database", + "HCLName": "", + "Value": true + }, + { + "CICDSecretName": "", + "EnvVarName": "TF_VAR_configure_networking", + "HCLName": "", + "Value": true + }, + { + "CICDSecretName": "", + "EnvVarName": "TF_VAR_publish_datafactory_pipelines", + "HCLName": "", + "Value": true + }, + { + "CICDSecretName": "", + "EnvVarName": "TF_VAR_publish_web_app_addcurrentuserasadmin", + "HCLName": "", + "Value": true + }, + { + "CICDSecretName": "", + "EnvVarName": "TF_VAR_deploy_selfhostedsql", + "HCLName": "", + "Value": false + }, + { + "CICDSecretName": "", + "EnvVarName": "TF_VAR_is_onprem_datafactory_ir_registered", + "HCLName": "", + "Value": false + }, + { + "CICDSecretName": "", + "EnvVarName": "TF_VAR_publish_sif_database", + "HCLName": "", + "Value": true + } + ] } diff --git a/solution/DeploymentV2/environments/vars/staging/common_vars.yaml b/solution/DeploymentV2/environments/vars/staging/common_vars.yaml index e92a26d6..9e3f574a 100644 --- a/solution/DeploymentV2/environments/vars/staging/common_vars.yaml +++ b/solution/DeploymentV2/environments/vars/staging/common_vars.yaml @@ -1,12 +1,14 @@ +domain: microsoft.com owner_tag: Contoso +tenant_id: 72f988bf-86f1-41af-91ab-2d7cd011db47 +resource_group_name: gft2 +subscription_id: 035a1364-f00d-48e2-b582-4fe125905ee3 +ip_address: 144.138.148.220 deployment_principal_layers1and3: ccbdbba4-669c-48d6-86b8-75c9ab2ee578 resource_location: AustraliaEast -environment_tag: stg -domain: microsoft.com -subscription_id: 035a1364-f00d-48e2-b582-4fe125905ee3 -prefix: ads -resource_group_name: gft2 -ip_address: 144.138.148.220 ip_address2: 144.138.148.220 -tenant_id: 72f988bf-86f1-41af-91ab-2d7cd011db47 +prefix: ads +environment_tag: stg +synapse_administrators: + Jorampon: ccbdbba4-669c-48d6-86b8-75c9ab2ee578 diff --git a/solution/DeploymentV2/environments/vars/staging/common_vars_template.jsonnet b/solution/DeploymentV2/environments/vars/staging/common_vars_template.jsonnet new file mode 100644 index 00000000..ea3ac10d --- /dev/null +++ b/solution/DeploymentV2/environments/vars/staging/common_vars_template.jsonnet @@ -0,0 +1,246 @@ +function (featuretemplatenam="full_deployment") +local locals = import './common_vars_values.jsonc'; +local featuretemplates = { + "basic_deployment" : import './../../featuretemplates/full_deployment.jsonc', + "full_deployment" : import './../../featuretemplates/full_deployment.jsonc', + "functional_tests" : import './../../featuretemplates/full_deployment.jsonc', +}; + +local featuretemplate = [ // Object comprehension. + { + ["CICDSecretName"]: "", + ["EnvVarName"]: "TF_VAR_" + sd.Name, + ["HCLName"]: "", + ["Value"]: sd.Value, + } + for sd in featuretemplates[featuretemplatenam] + ]; + + +{ + "Variables": [ + /*Attributes: + CICDSecretName: Name of the Secret that will hold the value in CICD. This mapps to the Env section of the CICD yaml", + EnvVarName: Name to be used when creating local environment Variable if this is blank no local environment variable will be created + HCLName: Name to be used when common_vars.yaml which is injected into HCL file. If this is blank it will not be included in HCL + */ + /* + Environment Only Vars + - these are not used in local console based deployments. Only when runnning in git hub + */ + { + "CICDSecretName": "WEB_APP_ADMIN_USER", + "EnvVarName": "WEB_APP_ADMIN_USER", + "HCLName": "", + "Value": locals.WEB_APP_ADMIN_USER + }, + { + "CICDSecretName": "ARM_SYNAPSE_WORKSPACE_NAME", + "EnvVarName": "ARM_SYNAPSE_WORKSPACE_NAME", + "HCLName": "", + "Value": locals.ARM_SYNAPSE_WORKSPACE_NAME + }, + { + "CICDSecretName": "ARM_KEYVAULT_NAME", + "EnvVarName": "keyVaultName", + "HCLName": "", + "Value": locals.ARM_KEYVAULT_NAME + }, + { + "CICDSecretName": "ARM_DATALAKE_NAME", + "EnvVarName": "datalakeName", + "HCLName": "", + "Value": locals.ARM_DATALAKE_NAME + }, + /* + Required for Automated CICD Deployment + */ + { + "CICDSecretName": "ARM_CLIENT_ID", + "EnvVarName": "ARM_CLIENT_ID", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "ARM_PAL_PARTNER_ID", + "EnvVarName": "ARM_PAL_PARTNER_ID", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "ARM_CLIENT_SECRET", + "EnvVarName": "ARM_CLIENT_SECRET", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "ARM_SUBSCRIPTION_ID", + "EnvVarName": "ARM_SUBSCRIPTION_ID", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "ARM_TENANT_ID", + "EnvVarName": "ARM_TENANT_ID", + "HCLName": "tenant_id", + "Value": locals.tenant_id + }, + + /* + HCL Common Vars & Terraform Customisations + */ + { + "CICDSecretName": "", + "EnvVarName": "", + "HCLName": "owner_tag", + "Value": locals.owner_tag + }, + { + "CICDSecretName": "", + "EnvVarName": "", + "HCLName": "deployment_principal_layers1and3", + "Value": locals.deployment_principal_layers1and3 + }, + { + "CICDSecretName": "", + "EnvVarName": "", + "HCLName": "resource_location", + "Value": locals.resource_location + }, + { + "CICDSecretName": "ENVIRONMENT_TAG", + "EnvVarName": "TF_VAR_environment_tag", + "HCLName": "environment_tag", + "Value": locals.environment_tag + }, + { + "CICDSecretName": "ARM_DOMAIN", + "EnvVarName": "TF_VAR_domain", + "HCLName": "domain", + "Value": locals.domain + }, + { + "CICDSecretName": "", + "EnvVarName": "", + "HCLName": "subscription_id", + "Value": locals.subscription_id + }, + { + "CICDSecretName": "", + "EnvVarName": "", + "HCLName": "prefix", + "Value": locals.prefix + }, + { + "CICDSecretName": "ARM_RESOURCE_GROUP_NAME", + "EnvVarName": "TF_VAR_resource_group_name", + "HCLName": "resource_group_name", + "Value": locals.resource_group_name + }, + { + "CICDSecretName": "", + "EnvVarName": "", + "HCLName": "ip_address", + "Value": locals.ip_address + }, + { + "CICDSecretName": "", + "EnvVarName": "", + "HCLName": "ip_address2", + "Value": locals.ip_address2 + }, + { + "CICDSecretName": "", + "EnvVarName": "", + "HCLName": "synapse_administrators", + "Value": locals.synapse_administrators + }, + { + "CICDSecretName": "ARM_STORAGE_NAME", + "EnvVarName": "TF_VAR_state_storage_account_name", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "ARM_SYNAPSE_PASSWORD", + "EnvVarName": "TF_VAR_synapse_sql_password", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "ARM_JUMPHOST_PASSWORD", + "EnvVarName": "TF_VAR_jumphost_password", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "WEB_APP_ADMIN_SECURITY_GROUP", + "EnvVarName": "TF_VAR_web_app_admin_security_group", + "HCLName": "", + "Value": "#####" + }, + /* + Git Integration Set-Up + */ + { + "CICDSecretName": "GIT_REPOSITORY_NAME", + "EnvVarName": "TF_VAR_adf_git_repository_name", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "GIT_SYNAPSE_REPOSITORY_BRANCH_NAME", + "EnvVarName": "TF_VAR_synapse_git_repository_branch_name", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "GIT_PAT", + "EnvVarName": "TF_VAR_synapse_git_pat", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "GIT_USER_NAME", + "EnvVarName": "TF_VAR_synapse_git_user_name", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "GIT_EMAIL_ADDRESS", + "EnvVarName": "TF_VAR_synapse_git_email_address", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "GIT_ADF_REPOSITORY_NAME", + "EnvVarName": "TF_VAR_adf_git_repository_name", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "GIT_ADF_REPOSITORY_BRANCH_NAME", + "EnvVarName": "TF_VAR_adf_git_repository_branch_name", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "GIT_ADF_PAT", + "EnvVarName": "TF_VAR_adf_git_pat", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "GIT_ADF_USER_NAME", + "EnvVarName": "TF_VAR_adf_git_user_name", + "HCLName": "", + "Value": "#####" + }, + { + "CICDSecretName": "GIT_ADF_EMAIL_ADDRESS", + "EnvVarName": "TF_VAR_adf_git_email_address", + "HCLName": "", + "Value": "#####" + } + ]+featuretemplate +} \ No newline at end of file diff --git a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc new file mode 100644 index 00000000..4ad56b08 --- /dev/null +++ b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc @@ -0,0 +1,19 @@ +{ + "owner_tag": "Contoso", + "deployment_principal_layers1and3": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", + "resource_location": "AustraliaEast", + "environment_tag": "stg", + "domain": "microsoft.com", + "subscription_id": "035a1364-f00d-48e2-b582-4fe125905ee3", + "prefix": "ads", + "resource_group_name": "gft2", + "ip_address": "144.138.148.220", + "ip_address2": "144.138.148.220", + "tenant_id": "72f988bf-86f1-41af-91ab-2d7cd011db47", + "synapse_administrators": + {"Jorampon":"ccbdbba4-669c-48d6-86b8-75c9ab2ee578"}, + "WEB_APP_ADMIN_USER":"ccbdbba4-669c-48d6-86b8-75c9ab2ee578", + "ARM_SYNAPSE_WORKSPACE_NAME": "adsdevsynwadskhpv", //Required by deployment script (pwsh) to openfirewall and provide access.. note that this will not be known until first deployment + "ARM_KEYVAULT_NAME": "ads-dev-kv-ads-khpv", //Required by deployment script (pwsh) to openfirewall and provide access.. note that this will not be known until first deployment + "ARM_DATALAKE_NAME": "adsdevdlsadskhpvadsl" //Required by deployment script (pwsh) to openfirewall and provide access.. note that this will not be known until first deployment +} \ No newline at end of file diff --git a/solution/DeploymentV2/terraform/vars/staging/terragrunt.hcl b/solution/DeploymentV2/terraform/vars/staging/terragrunt.hcl index 44d68995..f7151652 100644 --- a/solution/DeploymentV2/terraform/vars/staging/terragrunt.hcl +++ b/solution/DeploymentV2/terraform/vars/staging/terragrunt.hcl @@ -1,22 +1,3 @@ -generate "layer1.tf" { - path = "layer1.tf" - if_exists = "overwrite_terragrunt" - contents = < Date: Tue, 26 Jul 2022 18:50:59 +0800 Subject: [PATCH 044/151] Added notion of Environments to simplify transition from console deployment to agent deployment --- .github/workflows/continuous-delivery.yml | 5 +- solution/DeploymentV2/Deploy.ps1 | 8 +- solution/DeploymentV2/Deploy_0_Prep.ps1 | 27 +++-- solution/DeploymentV2/Deploy_1_Infra0.ps1 | 2 + solution/DeploymentV2/Prepare.ps1 | 9 +- .../featuretemplates/basic_deployment.jsonc | 44 ++++---- .../featuretemplates/full_deployment.jsonc | 100 ++++-------------- .../full_deployment_no_purview.jsonc | 100 ++++-------------- .../featuretemplates/functional_tests.jsonc | 100 ++++-------------- .../vars/PreprocessEnvironment.ps1 | 12 ++- 10 files changed, 122 insertions(+), 285 deletions(-) diff --git a/.github/workflows/continuous-delivery.yml b/.github/workflows/continuous-delivery.yml index b2c2644e..8c03bbcc 100644 --- a/.github/workflows/continuous-delivery.yml +++ b/.github/workflows/continuous-delivery.yml @@ -3,7 +3,7 @@ name: Continuous Delivery on: workflow_dispatch: push: - branches: $default-branch + branches: feature-1.0.4 jobs: deploy-to-env-one: @@ -14,6 +14,7 @@ jobs: environmentName: staging gitDeploy : true skipTerraformDeployment: false + featureTemplate: basic_deployment WEB_APP_ADMIN_USER: ${{ secrets.WEB_APP_ADMIN_USER }} keyVaultName: ${{ secrets.ARM_KEYVAULT_NAME }} ARM_SYNAPSE_WORKSPACE_NAME: ${{ secrets.ARM_SYNAPSE_WORKSPACE_NAME }} @@ -110,7 +111,7 @@ jobs: TF_VAR_ip_address : ${{steps.ip.outputs.ipv4}} run: | git update-index --chmod=+x ./Deploy.ps1 - ./Deploy.ps1 + ./Deploy.ps1 -RunTerraformLayer2 $true -FeatureTemplate $featureTemplate #PROD ENVIRONMENT diff --git a/solution/DeploymentV2/Deploy.ps1 b/solution/DeploymentV2/Deploy.ps1 index 61c11378..55f15d19 100644 --- a/solution/DeploymentV2/Deploy.ps1 +++ b/solution/DeploymentV2/Deploy.ps1 @@ -26,7 +26,9 @@ param ( [Parameter(Mandatory=$false)] [bool]$RunTerraformLayer2=$false, [Parameter(Mandatory=$false)] - [bool]$RunTerraformLayer3=$false + [bool]$RunTerraformLayer3=$false, + [Parameter(Mandatory=$false)] + [string]$FeatureTemplate="full_deployment" ) #------------------------------------------------------------------------------------------------------------ # Preparation #Mandatory @@ -35,12 +37,12 @@ $deploymentFolderPath = (Get-Location).Path $gitDeploy = ([System.Environment]::GetEnvironmentVariable('gitDeploy') -eq 'true') $skipTerraformDeployment = ([System.Environment]::GetEnvironmentVariable('skipTerraformDeployment') -eq 'true') -Invoke-Expression ./Deploy_0_Prep.ps1 +Invoke-Expression ./Deploy_0_Prep.ps1 -gitDeploy $gitDeploy -deploymentFolderPath $deploymentFolderPath -FeatureTemplate $FeatureTemplate #------------------------------------------------------------------------------------------------------------ # Main Terraform #------------------------------------------------------------------------------------------------------------ -Invoke-Expression ./Deploy_1_Infra0.ps1 -RunTerraformLayer1 $RunTerraformLayer1 -RunTerraformLayer2 $RunTerraformLayer2 -RunTerraformLayer3 $RunTerraformLayer3 +Invoke-Expression ./Deploy_1_Infra0.ps1 -RunTerraformLayer1 $RunTerraformLayer1 -RunTerraformLayer2 $RunTerraformLayer2 -RunTerraformLayer3 $RunTerraformLayer3 -skipTerraformDeployment $skipTerraformDeployment #------------------------------------------------------------------------------------------------------------ diff --git a/solution/DeploymentV2/Deploy_0_Prep.ps1 b/solution/DeploymentV2/Deploy_0_Prep.ps1 index 5132741d..67b1a6d2 100644 --- a/solution/DeploymentV2/Deploy_0_Prep.ps1 +++ b/solution/DeploymentV2/Deploy_0_Prep.ps1 @@ -1,4 +1,11 @@ -$gitDeploy = ([System.Environment]::GetEnvironmentVariable('gitDeploy') -eq 'true') +param ( + [Parameter(Mandatory=$true)] + [System.Boolean]$gitDeploy=$false, + [Parameter(Mandatory=$true)] + [String]$deploymentFolderPath, + [Parameter(Mandatory=$true)] + [String]$FeatureTemplate +) #Check for SQLServer Module $SqlInstalled = false @@ -16,12 +23,13 @@ if($null -eq $SqlInstalled) #needed for git integration az extension add --upgrade --name datafactory - #accept custom image terms #https://docs.microsoft.com/en-us/cli/azure/vm/image/terms?view=azure-cli-latest #az vm image terms accept --urn h2o-ai:h2o-driverles-ai:h2o-dai-lts:latest + + if ($gitDeploy) { $resourceGroupName = [System.Environment]::GetEnvironmentVariable('ARM_RESOURCE_GROUP_NAME') @@ -40,18 +48,14 @@ else [System.Environment]::SetEnvironmentVariable('environmentName', $environmentName) } - $env:TF_VAR_ip_address2 = (Invoke-WebRequest ifconfig.me/ip).Content - - #Re-process Environment Config Files. - Set-Location ./environments/vars/ - ./PreprocessEnvironment.ps1 -Environment $environmentName - Set-Location $deploymentFolderPath + $env:TF_VAR_ip_address2 = (Invoke-WebRequest ifconfig.me/ip).Content } + + $environmentName = [System.Environment]::GetEnvironmentVariable('environmentName') -$skipTerraformDeployment = ([System.Environment]::GetEnvironmentVariable('skipTerraformDeployment') -eq 'true') if ($environmentName -eq "Quit" -or [string]::IsNullOrEmpty($environmentName)) { write-host "environmentName is currently: $environmentName" @@ -60,6 +64,11 @@ if ($environmentName -eq "Quit" -or [string]::IsNullOrEmpty($environmentName)) } +#Re-process Environment Config Files. +Set-Location ./environments/vars/ +./PreprocessEnvironment.ps1 -Environment $environmentName -FeatureTemplate $FeatureTemplate -gitDeploy $gitDeploy +Set-Location $deploymentFolderPath + [System.Environment]::SetEnvironmentVariable('TFenvironmentName',$environmentName) diff --git a/solution/DeploymentV2/Deploy_1_Infra0.ps1 b/solution/DeploymentV2/Deploy_1_Infra0.ps1 index 7e62eb30..aedce7c9 100644 --- a/solution/DeploymentV2/Deploy_1_Infra0.ps1 +++ b/solution/DeploymentV2/Deploy_1_Infra0.ps1 @@ -1,4 +1,6 @@ param ( + [Parameter(Mandatory=$false)] + [System.Boolean]$skipTerraformDeployment=$true, [Parameter(Mandatory=$false)] [System.Boolean]$RunTerraformLayer1=$false, [Parameter(Mandatory=$false)] diff --git a/solution/DeploymentV2/Prepare.ps1 b/solution/DeploymentV2/Prepare.ps1 index 6536b3e3..c2d6707e 100644 --- a/solution/DeploymentV2/Prepare.ps1 +++ b/solution/DeploymentV2/Prepare.ps1 @@ -258,12 +258,9 @@ else } Set-Location ./environments/vars/ - ./PreprocessEnvironment.ps1 -Environment $environmentName -FeatureTemplate $templateName - Set-Location $deploymentFolderPath - - - - $environmentFileContents | Set-Content $environmentFileTarget + ./PreprocessEnvironment.ps1 -Environment $environmentName -FeatureTemplate $templateName -gitDeploy $gitDeploy + Set-Location $deploymentFolderPath + } diff --git a/solution/DeploymentV2/environments/featuretemplates/basic_deployment.jsonc b/solution/DeploymentV2/environments/featuretemplates/basic_deployment.jsonc index eb52c8bb..7ca49bcc 100644 --- a/solution/DeploymentV2/environments/featuretemplates/basic_deployment.jsonc +++ b/solution/DeploymentV2/environments/featuretemplates/basic_deployment.jsonc @@ -1,22 +1,22 @@ -{ -"deploy_web_app":true, -"deploy_function_app":true, -"deploy_custom_terraform":false, -"deploy_app_service_plan":true, -"deploy_data_factory":true, -"deploy_sentinel":true, -"deploy_purview":true, -"deploy_synapse":true, -"deploy_metadata_database":true, -"is_vnet_isolated":true, -"publish_web_app":true, -"publish_function_app":true, -"publish_sample_files":true, -"publish_metadata_database":true, -"configure_networking":true, -"publish_datafactory_pipelines":true, -"publish_web_app_addcurrentuserasadmin":true, -"deploy_selfhostedsql":false, -"is_onprem_datafactory_ir_registered":false, -"publish_sif_database":true -} \ No newline at end of file +[ + {"Name":"deploy_web_app","Value":true}, + {"Name":"deploy_function_app","Value":true}, + {"Name":"deploy_custom_terraform","Value":false}, + {"Name":"deploy_app_service_plan","Value":true}, + {"Name":"deploy_data_factory","Value":true}, + {"Name":"deploy_sentinel","Value":true}, + {"Name":"deploy_purview","Value":false}, + {"Name":"deploy_synapse","Value":true}, + {"Name":"deploy_metadata_database","Value":true}, + {"Name":"is_vnet_isolated","Value":false}, + {"Name":"publish_web_app","Value":true}, + {"Name":"publish_function_app","Value":true}, + {"Name":"publish_sample_files","Value":true}, + {"Name":"publish_metadata_database","Value":true}, + {"Name":"configure_networking","Value":true}, + {"Name":"publish_datafactory_pipelines","Value":true}, + {"Name":"publish_web_app_addcurrentuserasadmin","Value":true}, + {"Name":"deploy_selfhostedsql","Value":false}, + {"Name":"is_onprem_datafactory_ir_registered","Value":false}, + {"Name":"publish_sif_database","Value":true} +] \ No newline at end of file diff --git a/solution/DeploymentV2/environments/featuretemplates/full_deployment.jsonc b/solution/DeploymentV2/environments/featuretemplates/full_deployment.jsonc index a10a63a9..4dfe5fc4 100644 --- a/solution/DeploymentV2/environments/featuretemplates/full_deployment.jsonc +++ b/solution/DeploymentV2/environments/featuretemplates/full_deployment.jsonc @@ -1,82 +1,22 @@ [ - { - "Name": "deploy_web_app", - "Value": true - }, - { - "Name": "deploy_function_app", - "Value": true - }, - { - "Name": "deploy_custom_terraform", - "Value": false - }, - { - "Name": "deploy_app_service_plan", - "Value": true - }, - { - "Name": "deploy_data_factory", - "Value": true - }, - { - "Name": "deploy_sentinel", - "Value": true - }, - { - "Name": "deploy_purview", - "Value": true - }, - { - "Name": "deploy_synapse", - "Value": true - }, - { - "Name": "deploy_metadata_database", - "Value": true - }, - { - "Name": "is_vnet_isolated", - "Value": true - }, - { - "Name": "publish_web_app", - "Value": true - }, - { - "Name": "publish_function_app", - "Value": true - }, - { - "Name": "publish_sample_files", - "Value": true - }, - { - "Name": "publish_metadata_database", - "Value": true - }, - { - "Name": "configure_networking", - "Value": true - }, - { - "Name": "publish_datafactory_pipelines", - "Value": true - }, - { - "Name": "publish_web_app_addcurrentuserasadmin", - "Value": true - }, - { - "Name": "deploy_selfhostedsql", - "Value": false - }, - { - "Name": "is_onprem_datafactory_ir_registered", - "Value": false - }, - { - "Name": "publish_sif_database", - "Value": true - } + {"Name":"deploy_web_app","Value":true}, + {"Name":"deploy_function_app","Value":true}, + {"Name":"deploy_custom_terraform","Value":false}, + {"Name":"deploy_app_service_plan","Value":true}, + {"Name":"deploy_data_factory","Value":true}, + {"Name":"deploy_sentinel","Value":true}, + {"Name":"deploy_purview","Value":true}, + {"Name":"deploy_synapse","Value":true}, + {"Name":"deploy_metadata_database","Value":true}, + {"Name":"is_vnet_isolated","Value":true}, + {"Name":"publish_web_app","Value":true}, + {"Name":"publish_function_app","Value":true}, + {"Name":"publish_sample_files","Value":true}, + {"Name":"publish_metadata_database","Value":true}, + {"Name":"configure_networking","Value":true}, + {"Name":"publish_datafactory_pipelines","Value":true}, + {"Name":"publish_web_app_addcurrentuserasadmin","Value":true}, + {"Name":"deploy_selfhostedsql","Value":false}, + {"Name":"is_onprem_datafactory_ir_registered","Value":false}, + {"Name":"publish_sif_database","Value":true} ] \ No newline at end of file diff --git a/solution/DeploymentV2/environments/featuretemplates/full_deployment_no_purview.jsonc b/solution/DeploymentV2/environments/featuretemplates/full_deployment_no_purview.jsonc index 50990b21..e0c785ba 100644 --- a/solution/DeploymentV2/environments/featuretemplates/full_deployment_no_purview.jsonc +++ b/solution/DeploymentV2/environments/featuretemplates/full_deployment_no_purview.jsonc @@ -1,82 +1,22 @@ [ - { - "Name": "deploy_web_app", - "Value": true - }, - { - "Name": "deploy_function_app", - "Value": true - }, - { - "Name": "deploy_custom_terraform", - "Value": false - }, - { - "Name": "deploy_app_service_plan", - "Value": true - }, - { - "Name": "deploy_data_factory", - "Value": true - }, - { - "Name": "deploy_sentinel", - "Value": true - }, - { - "Name": "deploy_purview", - "Value": false - }, - { - "Name": "deploy_synapse", - "Value": true - }, - { - "Name": "deploy_metadata_database", - "Value": true - }, - { - "Name": "is_vnet_isolated", - "Value": true - }, - { - "Name": "publish_web_app", - "Value": true - }, - { - "Name": "publish_function_app", - "Value": true - }, - { - "Name": "publish_sample_files", - "Value": true - }, - { - "Name": "publish_metadata_database", - "Value": true - }, - { - "Name": "configure_networking", - "Value": true - }, - { - "Name": "publish_datafactory_pipelines", - "Value": true - }, - { - "Name": "publish_web_app_addcurrentuserasadmin", - "Value": true - }, - { - "Name": "deploy_selfhostedsql", - "Value": false - }, - { - "Name": "is_onprem_datafactory_ir_registered", - "Value": false - }, - { - "Name": "publish_sif_database", - "Value": true - } + {"Name":"deploy_web_app","Value":true}, + {"Name":"deploy_function_app","Value":true}, + {"Name":"deploy_custom_terraform","Value":false}, + {"Name":"deploy_app_service_plan","Value":true}, + {"Name":"deploy_data_factory","Value":true}, + {"Name":"deploy_sentinel","Value":true}, + {"Name":"deploy_purview","Value":false}, + {"Name":"deploy_synapse","Value":true}, + {"Name":"deploy_metadata_database","Value":true}, + {"Name":"is_vnet_isolated","Value":true}, + {"Name":"publish_web_app","Value":true}, + {"Name":"publish_function_app","Value":true}, + {"Name":"publish_sample_files","Value":true}, + {"Name":"publish_metadata_database","Value":true}, + {"Name":"configure_networking","Value":true}, + {"Name":"publish_datafactory_pipelines","Value":true}, + {"Name":"publish_web_app_addcurrentuserasadmin","Value":true}, + {"Name":"deploy_selfhostedsql","Value":false}, + {"Name":"is_onprem_datafactory_ir_registered","Value":false}, + {"Name":"publish_sif_database","Value":true} ] \ No newline at end of file diff --git a/solution/DeploymentV2/environments/featuretemplates/functional_tests.jsonc b/solution/DeploymentV2/environments/featuretemplates/functional_tests.jsonc index a10a63a9..a24a8970 100644 --- a/solution/DeploymentV2/environments/featuretemplates/functional_tests.jsonc +++ b/solution/DeploymentV2/environments/featuretemplates/functional_tests.jsonc @@ -1,82 +1,22 @@ [ - { - "Name": "deploy_web_app", - "Value": true - }, - { - "Name": "deploy_function_app", - "Value": true - }, - { - "Name": "deploy_custom_terraform", - "Value": false - }, - { - "Name": "deploy_app_service_plan", - "Value": true - }, - { - "Name": "deploy_data_factory", - "Value": true - }, - { - "Name": "deploy_sentinel", - "Value": true - }, - { - "Name": "deploy_purview", - "Value": true - }, - { - "Name": "deploy_synapse", - "Value": true - }, - { - "Name": "deploy_metadata_database", - "Value": true - }, - { - "Name": "is_vnet_isolated", - "Value": true - }, - { - "Name": "publish_web_app", - "Value": true - }, - { - "Name": "publish_function_app", - "Value": true - }, - { - "Name": "publish_sample_files", - "Value": true - }, - { - "Name": "publish_metadata_database", - "Value": true - }, - { - "Name": "configure_networking", - "Value": true - }, - { - "Name": "publish_datafactory_pipelines", - "Value": true - }, - { - "Name": "publish_web_app_addcurrentuserasadmin", - "Value": true - }, - { - "Name": "deploy_selfhostedsql", - "Value": false - }, - { - "Name": "is_onprem_datafactory_ir_registered", - "Value": false - }, - { - "Name": "publish_sif_database", - "Value": true - } + {"Name":"deploy_web_app","Value":true}, + {"Name":"deploy_function_app","Value":true}, + {"Name":"deploy_custom_terraform","Value":false}, + {"Name":"deploy_app_service_plan","Value":true}, + {"Name":"deploy_data_factory","Value":true}, + {"Name":"deploy_sentinel","Value":true}, + {"Name":"deploy_purview","Value":false}, + {"Name":"deploy_synapse","Value":true}, + {"Name":"deploy_metadata_database","Value":true}, + {"Name":"is_vnet_isolated","Value":true}, + {"Name":"publish_web_app","Value":true}, + {"Name":"publish_function_app","Value":true}, + {"Name":"publish_sample_files","Value":true}, + {"Name":"publish_metadata_database","Value":true}, + {"Name":"configure_networking","Value":true}, + {"Name":"publish_datafactory_pipelines","Value":true}, + {"Name":"publish_web_app_addcurrentuserasadmin","Value":true}, + {"Name":"deploy_selfhostedsql","Value":true}, + {"Name":"is_onprem_datafactory_ir_registered","Value":false}, + {"Name":"publish_sif_database","Value":true} ] \ No newline at end of file diff --git a/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 b/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 index 6d71ea8a..1755b4a1 100644 --- a/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 +++ b/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 @@ -7,7 +7,9 @@ param ( [Parameter(Mandatory=$true)] [string]$Environment="staging", [Parameter(Mandatory=$true)] - [string]$FeatureTemplate="full_deployment" + [string]$FeatureTemplate="full_deployment", + [Parameter(Mandatory=$false)] + [bool]$gitDeploy=$false ) $Environment = $Environment.ToLower() @@ -51,5 +53,9 @@ foreach($t in $obj.Variables) #Write the Terraform Element common_vars.yaml - this is then injected into the hcl file $HCLYAML | ConvertTo-YAML | Set-Content ./$Environment/common_vars.yaml -#Write the Git Secrets to the Git Template .env -$GithubEnvTemplate|Set-Content ./$Environment/GetSecretsTemplate.env \ No newline at end of file + +if($gitDeploy -eq $false) +{ + #Write the Git Secrets to the Git Template .env + $GithubEnvTemplate|Set-Content ./$Environment/GetSecretsTemplate.env +} \ No newline at end of file From 744fd1d2fad3917615b3f60372ed161129080af4 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Tue, 26 Jul 2022 19:17:59 +0800 Subject: [PATCH 045/151] CICD Test --- .github/workflows/continuous-delivery.yml | 35 +++++++++++------------ 1 file changed, 17 insertions(+), 18 deletions(-) diff --git a/.github/workflows/continuous-delivery.yml b/.github/workflows/continuous-delivery.yml index 8c03bbcc..5a1d2f77 100644 --- a/.github/workflows/continuous-delivery.yml +++ b/.github/workflows/continuous-delivery.yml @@ -9,6 +9,8 @@ jobs: deploy-to-env-one: name: Deploy to Environment One concurrency: terraform + environment: + name: development env: # This determines the location of the .hcl file that will be used environmentName: staging @@ -48,11 +50,9 @@ jobs: TF_VAR_adf_git_email_address : ${{secrets.GIT_EMAIL_ADDRESS}} #TF_LOG : TRACE - environment: - name: development - runs-on: ubuntu-latest - steps: + runs-on: ubuntu-latest + steps: - name: PrintInfo run: | echo "Deploying to Resource Group: ${{ env.TF_VAR_resource_group_name }} " @@ -71,20 +71,6 @@ jobs: creds: '{"clientId":"${{ secrets.ARM_CLIENT_ID }}","clientSecret":"${{ secrets.ARM_CLIENT_SECRET }}","subscriptionId":"${{ secrets.ARM_SUBSCRIPTION_ID }}","tenantId":"${{ secrets.ARM_TENANT_ID }}"}' enable-AzPSSession: true - - name: Set PAL - id: set_pal - continue-on-error: true - run: | - az extension add --name managementpartner - az managementpartner update --partner-id ${{ secrets.ARM_PAL_PARTNER_ID }} || az managementpartner create --partner-id ${{ secrets.ARM_PAL_PARTNER_ID }} - - - - name: Terragrunt Install - id: terragrunt_install - working-directory: ./solution/DeploymentV2/terraform - run: | - brew install terragrunt - - name: Open Firewalls for Agent id: open_firewalls continue-on-error: true @@ -94,6 +80,19 @@ jobs: az synapse workspace firewall-rule create --name AllowCICD --resource-group ${{ env.TF_VAR_resource_group_name }} --start-ip-address ${{ steps.ip.outputs.ipv4 }} --end-ip-address ${{ steps.ip.outputs.ipv4 }} --workspace-name ${{ env.ARM_SYNAPSE_WORKSPACE_NAME }} az storage account network-rule add --resource-group ${{ env.TF_VAR_resource_group_name }} --account-name ${{ env.datalakeName }} --ip-address ${{ steps.ip.outputs.ipv4 }} + - name: Set PAL + id: set_pal + continue-on-error: true + run: | + az extension add --name managementpartner + az managementpartner update --partner-id ${{ secrets.ARM_PAL_PARTNER_ID }} || az managementpartner create --partner-id ${{ secrets.ARM_PAL_PARTNER_ID }} + + - name: Terragrunt Install + id: terragrunt_install + working-directory: ./solution/DeploymentV2/terraform + run: | + brew install terragrunt + - name: Install Jsonnet id: jsonnet-install working-directory: ./solution/DeploymentV2/ From 7d026d85451dc559107f72557f7b22fb7a048aec Mon Sep 17 00:00:00 2001 From: John Rampono Date: Tue, 26 Jul 2022 21:23:20 +0800 Subject: [PATCH 046/151] Added notion of Environments to simplify transition from console deployment to agent deployment --- .github/workflows/continuous-delivery.yml | 7 +- solution/DeploymentV2/Deploy.ps1 | 4 +- .../featuretemplates/basic_deployment.jsonc | 2 +- .../vars/PreprocessEnvironment.ps1 | 55 ++- .../vars/admz/common_vars_template.jsonnet | 230 ---------- .../vars/common_vars_template.jsonnet | 33 +- .../vars/local/common_vars_template.jsonnet | 230 ---------- .../production/common_vars_template.jsonnet | 230 ---------- .../vars/staging/GetSecretsTemplate.env | 34 +- .../vars/staging/common_vars.json | 418 ++++-------------- .../vars/staging/common_vars.yaml | 14 - .../vars/staging/common_vars_for_hcl.json | 16 + .../vars/staging/common_vars_template.jsonnet | 246 ----------- .../vars/staging/common_vars_values.jsonc | 6 +- .../vars/local/terragrunt.hcl | 2 +- .../vars/production/terragrunt.hcl | 2 +- .../vars/staging/terragrunt.hcl | 2 +- .../vars/local/terragrunt.hcl | 2 +- .../vars/production/terragrunt.hcl | 2 +- .../vars/staging/terragrunt.hcl | 2 +- .../vars/local/terragrunt.hcl | 2 +- .../vars/production/terragrunt.hcl | 2 +- .../vars/staging/terragrunt.hcl | 2 +- 23 files changed, 196 insertions(+), 1347 deletions(-) delete mode 100644 solution/DeploymentV2/environments/vars/admz/common_vars_template.jsonnet delete mode 100644 solution/DeploymentV2/environments/vars/local/common_vars_template.jsonnet delete mode 100644 solution/DeploymentV2/environments/vars/production/common_vars_template.jsonnet delete mode 100644 solution/DeploymentV2/environments/vars/staging/common_vars.yaml create mode 100644 solution/DeploymentV2/environments/vars/staging/common_vars_for_hcl.json delete mode 100644 solution/DeploymentV2/environments/vars/staging/common_vars_template.jsonnet diff --git a/.github/workflows/continuous-delivery.yml b/.github/workflows/continuous-delivery.yml index 5a1d2f77..29640979 100644 --- a/.github/workflows/continuous-delivery.yml +++ b/.github/workflows/continuous-delivery.yml @@ -3,7 +3,7 @@ name: Continuous Delivery on: workflow_dispatch: push: - branches: feature-1.0.4 + branches: $default-branch jobs: deploy-to-env-one: @@ -118,6 +118,8 @@ jobs: name: Deploy to Environment Two concurrency: terraform needs: [deploy-to-env-one] + environment: + name: Prod env: environmentName: production gitDeploy : true @@ -156,8 +158,7 @@ jobs: #PROD ENVIRONMENT #TF_LOG : TRACE - environment: - name: Prod + runs-on: ubuntu-latest steps: diff --git a/solution/DeploymentV2/Deploy.ps1 b/solution/DeploymentV2/Deploy.ps1 index 55f15d19..52599f3a 100644 --- a/solution/DeploymentV2/Deploy.ps1 +++ b/solution/DeploymentV2/Deploy.ps1 @@ -37,12 +37,12 @@ $deploymentFolderPath = (Get-Location).Path $gitDeploy = ([System.Environment]::GetEnvironmentVariable('gitDeploy') -eq 'true') $skipTerraformDeployment = ([System.Environment]::GetEnvironmentVariable('skipTerraformDeployment') -eq 'true') -Invoke-Expression ./Deploy_0_Prep.ps1 -gitDeploy $gitDeploy -deploymentFolderPath $deploymentFolderPath -FeatureTemplate $FeatureTemplate +./Deploy_0_Prep.ps1 -gitDeploy $gitDeploy -deploymentFolderPath $deploymentFolderPath -FeatureTemplate $FeatureTemplate #------------------------------------------------------------------------------------------------------------ # Main Terraform #------------------------------------------------------------------------------------------------------------ -Invoke-Expression ./Deploy_1_Infra0.ps1 -RunTerraformLayer1 $RunTerraformLayer1 -RunTerraformLayer2 $RunTerraformLayer2 -RunTerraformLayer3 $RunTerraformLayer3 -skipTerraformDeployment $skipTerraformDeployment +./Deploy_1_Infra0.ps1 -RunTerraformLayer1 $RunTerraformLayer1 -RunTerraformLayer2 $RunTerraformLayer2 -RunTerraformLayer3 $RunTerraformLayer3 -skipTerraformDeployment $skipTerraformDeployment #------------------------------------------------------------------------------------------------------------ diff --git a/solution/DeploymentV2/environments/featuretemplates/basic_deployment.jsonc b/solution/DeploymentV2/environments/featuretemplates/basic_deployment.jsonc index 7ca49bcc..54477a6f 100644 --- a/solution/DeploymentV2/environments/featuretemplates/basic_deployment.jsonc +++ b/solution/DeploymentV2/environments/featuretemplates/basic_deployment.jsonc @@ -13,7 +13,7 @@ {"Name":"publish_function_app","Value":true}, {"Name":"publish_sample_files","Value":true}, {"Name":"publish_metadata_database","Value":true}, - {"Name":"configure_networking","Value":true}, + {"Name":"configure_networking","Value":false}, {"Name":"publish_datafactory_pipelines","Value":true}, {"Name":"publish_web_app_addcurrentuserasadmin","Value":true}, {"Name":"deploy_selfhostedsql","Value":false}, diff --git a/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 b/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 index 1755b4a1..67069e8f 100644 --- a/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 +++ b/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 @@ -7,7 +7,7 @@ param ( [Parameter(Mandatory=$true)] [string]$Environment="staging", [Parameter(Mandatory=$true)] - [string]$FeatureTemplate="full_deployment", + [string]$FeatureTemplate="basic_deployment", [Parameter(Mandatory=$false)] [bool]$gitDeploy=$false ) @@ -15,44 +15,39 @@ param ( $Environment = $Environment.ToLower() #First Convert Terraform Commons to YAML -Install-Module powershell-yaml -Force +#Install-Module powershell-yaml -Force $GithubEnvTemplate = "" -Write-Host "Preparing Environment: $Environment" +Write-Host "Preparing Environment: $Environment Using $FeatureTemplate Template" (jsonnet "./common_vars_template.jsonnet" --tla-str featuretemplatename=$FeatureTemplate --tla-str environment=$Environment ) | Set-Content("./$Environment/common_vars.json") $obj = Get-Content ./$Environment/common_vars.json | ConvertFrom-Json -$HCLYaml = @{} -foreach($t in $obj.Variables) + +foreach($t in ($obj.ForEnvVar | Get-Member | Where-Object {$_.MemberType -eq "NoteProperty"})) { - $Value = $t.Value - if($t.EnvVarName -ne "") + $Name = $t.Name + $Value = $obj.ForEnvVar[0].$Name + if($Value.GetType().Name -eq "Boolean") { - $Name = $t.EnvVarName - if([string]::IsNullOrEmpty($Value) -eq $false -and $Value -ne '#####') - { - [Environment]::SetEnvironmentVariable($Name, "$Value") - } - } - - if($t.CICDSecretName -ne "") - { - $Name = $t.CICDSecretName - #Add to GitHubSecretFile - $GithubEnvTemplate = $GithubEnvTemplate + "$Name=$Value" + [System.Environment]::NewLine - } - - if($t.HCLName -ne "") - { - $Name = $t.HCLName - #Add to CommonVars.yaml - $HCLYAML.$Name = $Value - } - + $Value = $Value.ToString().ToLower() + } + + if([string]::IsNullOrEmpty($Value) -eq $false -and $Value -ne '#####') + { + [Environment]::SetEnvironmentVariable($Name, $Value) + } } -#Write the Terraform Element common_vars.yaml - this is then injected into the hcl file -$HCLYAML | ConvertTo-YAML | Set-Content ./$Environment/common_vars.yaml +foreach($t in ($obj.ForSecretFile | Get-Member | Where-Object {$_.MemberType -eq "NoteProperty"})) +{ + $Name = $t.Name + $Value = $obj.ForSecretFile[0].$Name + #Add to GitHubSecretFile + $GithubEnvTemplate = $GithubEnvTemplate + "$Name=$Value" + [System.Environment]::NewLine +} + +#Write the Terraform Element common_vars_for_hcl.json - this is then injected into the hcl file +($obj.ForHCL | ConvertTo-Json -Depth 10) | Set-Content ./$Environment/common_vars_for_hcl.json if($gitDeploy -eq $false) { diff --git a/solution/DeploymentV2/environments/vars/admz/common_vars_template.jsonnet b/solution/DeploymentV2/environments/vars/admz/common_vars_template.jsonnet deleted file mode 100644 index 2c5ee212..00000000 --- a/solution/DeploymentV2/environments/vars/admz/common_vars_template.jsonnet +++ /dev/null @@ -1,230 +0,0 @@ -function () -local locals = import './common_vars_values.jsonc'; - -{ - "Variables": [ - /*Attributes: - CICDSecretName: Name of the Secret that will hold the value in CICD. This mapps to the Env section of the CICD yaml", - EnvVarName: Name to be used when creating local environment Variable if this is blank no local environment variable will be created - HCLName: Name to be used when common_vars.yaml which is injected into HCL file. If this is blank it will not be included in HCL - */ - /* - Environment Only Vars - - these are not used in local console based deployments. Only when runnning in git hub - */ - { - "CICDSecretName": "WEB_APP_ADMIN_USER", - "EnvVarName": "WEB_APP_ADMIN_USER", - "HCLName": "", - "Value": locals.WEB_APP_ADMIN_USER - }, - { - "CICDSecretName": "ARM_SYNAPSE_WORKSPACE_NAME", - "EnvVarName": "ARM_SYNAPSE_WORKSPACE_NAME", - "HCLName": "", - "Value": locals.ARM_SYNAPSE_WORKSPACE_NAME - }, - { - "CICDSecretName": "ARM_KEYVAULT_NAME", - "EnvVarName": "keyVaultName", - "HCLName": "", - "Value": locals.ARM_KEYVAULT_NAME - }, - { - "CICDSecretName": "ARM_DATALAKE_NAME", - "EnvVarName": "datalakeName", - "HCLName": "", - "Value": locals.ARM_DATALAKE_NAME - }, - /* - Required for Automated CICD Deployment - */ - { - "CICDSecretName": "ARM_CLIENT_ID", - "EnvVarName": "ARM_CLIENT_ID", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "ARM_PAL_PARTNER_ID", - "EnvVarName": "ARM_PAL_PARTNER_ID", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "ARM_CLIENT_SECRET", - "EnvVarName": "ARM_CLIENT_SECRET", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "ARM_SUBSCRIPTION_ID", - "EnvVarName": "ARM_SUBSCRIPTION_ID", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "ARM_TENANT_ID", - "EnvVarName": "ARM_TENANT_ID", - "HCLName": "tenant_id", - "Value": locals.tenant_id - }, - - /* - HCL Common Vars & Terraform Customisations - */ - { - "CICDSecretName": "", - "EnvVarName": "", - "HCLName": "owner_tag", - "Value": locals.owner_tag - }, - { - "CICDSecretName": "", - "EnvVarName": "", - "HCLName": "deployment_principal_layers1and3", - "Value": locals.deployment_principal_layers1and3 - }, - { - "CICDSecretName": "", - "EnvVarName": "", - "HCLName": "resource_location", - "Value": locals.resource_location - }, - { - "CICDSecretName": "ENVIRONMENT_TAG", - "EnvVarName": "TF_VAR_environment_tag", - "HCLName": "environment_tag", - "Value": locals.environment_tag - }, - { - "CICDSecretName": "ARM_DOMAIN", - "EnvVarName": "TF_VAR_domain", - "HCLName": "domain", - "Value": locals.domain - }, - { - "CICDSecretName": "", - "EnvVarName": "", - "HCLName": "subscription_id", - "Value": locals.subscription_id - }, - { - "CICDSecretName": "", - "EnvVarName": "", - "HCLName": "prefix", - "Value": locals.prefix - }, - { - "CICDSecretName": "ARM_RESOURCE_GROUP_NAME", - "EnvVarName": "TF_VAR_resource_group_name", - "HCLName": "resource_group_name", - "Value": locals.resource_group_name - }, - { - "CICDSecretName": "", - "EnvVarName": "", - "HCLName": "ip_address", - "Value": locals.ip_address - }, - { - "CICDSecretName": "", - "EnvVarName": "", - "HCLName": "ip_address2", - "Value": locals.ip_address2 - }, - { - "CICDSecretName": "", - "EnvVarName": "", - "HCLName": "synapse_administrators", - "Value": locals.synapse_administrators - }, - { - "CICDSecretName": "ARM_STORAGE_NAME", - "EnvVarName": "TF_VAR_state_storage_account_name", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "ARM_SYNAPSE_PASSWORD", - "EnvVarName": "TF_VAR_synapse_sql_password", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "ARM_JUMPHOST_PASSWORD", - "EnvVarName": "TF_VAR_jumphost_password", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "WEB_APP_ADMIN_SECURITY_GROUP", - "EnvVarName": "TF_VAR_web_app_admin_security_group", - "HCLName": "", - "Value": "#####" - }, - /* - Git Integration Set-Up - */ - { - "CICDSecretName": "GIT_REPOSITORY_NAME", - "EnvVarName": "TF_VAR_adf_git_repository_name", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "GIT_SYNAPSE_REPOSITORY_BRANCH_NAME", - "EnvVarName": "TF_VAR_synapse_git_repository_branch_name", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "GIT_PAT", - "EnvVarName": "TF_VAR_synapse_git_pat", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "GIT_USER_NAME", - "EnvVarName": "TF_VAR_synapse_git_user_name", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "GIT_EMAIL_ADDRESS", - "EnvVarName": "TF_VAR_synapse_git_email_address", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "GIT_ADF_REPOSITORY_NAME", - "EnvVarName": "TF_VAR_adf_git_repository_name", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "GIT_ADF_REPOSITORY_BRANCH_NAME", - "EnvVarName": "TF_VAR_adf_git_repository_branch_name", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "GIT_ADF_PAT", - "EnvVarName": "TF_VAR_adf_git_pat", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "GIT_ADF_USER_NAME", - "EnvVarName": "TF_VAR_adf_git_user_name", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "GIT_ADF_EMAIL_ADDRESS", - "EnvVarName": "TF_VAR_adf_git_email_address", - "HCLName": "", - "Value": "#####" - } - ] -} \ No newline at end of file diff --git a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet index 1ea0bd5a..d01f7141 100644 --- a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet +++ b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet @@ -9,7 +9,7 @@ local locals = { local featuretemplates = { "basic_deployment" : import './../featuretemplates/basic_deployment.jsonc', "full_deployment" : import './../featuretemplates/full_deployment.jsonc', - "functional_tests" : import './../featuretemplates/full_deployment.jsonc', + "functional_tests" : import './../featuretemplates/functional_tests.jsonc', }; local featuretemplate = [ // Object comprehension. @@ -23,8 +23,7 @@ local featuretemplate = [ // Object comprehension. ]; -{ - "Variables": [ +local AllVariables = [ /*Attributes: CICDSecretName: Name of the Secret that will hold the value in CICD. This mapps to the Env section of the CICD yaml", EnvVarName: Name to be used when creating local environment Variable if this is blank no local environment variable will be created @@ -190,7 +189,7 @@ local featuretemplate = [ // Object comprehension. */ { "CICDSecretName": "GIT_REPOSITORY_NAME", - "EnvVarName": "TF_VAR_adf_git_repository_name", + "EnvVarName": "TF_VAR_synapse_git_repository_name", "HCLName": "", "Value": "#####" }, @@ -248,5 +247,29 @@ local featuretemplate = [ // Object comprehension. "HCLName": "", "Value": "#####" } - ]+featuretemplate + ]+featuretemplate; + + + +local HCLVariables = { // Object comprehension. + [sd.HCLName]: sd.Value + for sd in AllVariables + if sd.HCLName != "" +}; + +local EnvironmentVariables = { // Object comprehension. + [sd.EnvVarName]: sd.Value + for sd in AllVariables + if sd.EnvVarName != "" +}; + +local SecretFileVars = { // Object comprehension. + [sd.CICDSecretName]: sd.Value + for sd in AllVariables + if sd.CICDSecretName != "" +}; +{ + "ForHCL": HCLVariables, + "ForEnvVar": EnvironmentVariables, + "ForSecretFile": SecretFileVars } \ No newline at end of file diff --git a/solution/DeploymentV2/environments/vars/local/common_vars_template.jsonnet b/solution/DeploymentV2/environments/vars/local/common_vars_template.jsonnet deleted file mode 100644 index 2c5ee212..00000000 --- a/solution/DeploymentV2/environments/vars/local/common_vars_template.jsonnet +++ /dev/null @@ -1,230 +0,0 @@ -function () -local locals = import './common_vars_values.jsonc'; - -{ - "Variables": [ - /*Attributes: - CICDSecretName: Name of the Secret that will hold the value in CICD. This mapps to the Env section of the CICD yaml", - EnvVarName: Name to be used when creating local environment Variable if this is blank no local environment variable will be created - HCLName: Name to be used when common_vars.yaml which is injected into HCL file. If this is blank it will not be included in HCL - */ - /* - Environment Only Vars - - these are not used in local console based deployments. Only when runnning in git hub - */ - { - "CICDSecretName": "WEB_APP_ADMIN_USER", - "EnvVarName": "WEB_APP_ADMIN_USER", - "HCLName": "", - "Value": locals.WEB_APP_ADMIN_USER - }, - { - "CICDSecretName": "ARM_SYNAPSE_WORKSPACE_NAME", - "EnvVarName": "ARM_SYNAPSE_WORKSPACE_NAME", - "HCLName": "", - "Value": locals.ARM_SYNAPSE_WORKSPACE_NAME - }, - { - "CICDSecretName": "ARM_KEYVAULT_NAME", - "EnvVarName": "keyVaultName", - "HCLName": "", - "Value": locals.ARM_KEYVAULT_NAME - }, - { - "CICDSecretName": "ARM_DATALAKE_NAME", - "EnvVarName": "datalakeName", - "HCLName": "", - "Value": locals.ARM_DATALAKE_NAME - }, - /* - Required for Automated CICD Deployment - */ - { - "CICDSecretName": "ARM_CLIENT_ID", - "EnvVarName": "ARM_CLIENT_ID", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "ARM_PAL_PARTNER_ID", - "EnvVarName": "ARM_PAL_PARTNER_ID", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "ARM_CLIENT_SECRET", - "EnvVarName": "ARM_CLIENT_SECRET", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "ARM_SUBSCRIPTION_ID", - "EnvVarName": "ARM_SUBSCRIPTION_ID", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "ARM_TENANT_ID", - "EnvVarName": "ARM_TENANT_ID", - "HCLName": "tenant_id", - "Value": locals.tenant_id - }, - - /* - HCL Common Vars & Terraform Customisations - */ - { - "CICDSecretName": "", - "EnvVarName": "", - "HCLName": "owner_tag", - "Value": locals.owner_tag - }, - { - "CICDSecretName": "", - "EnvVarName": "", - "HCLName": "deployment_principal_layers1and3", - "Value": locals.deployment_principal_layers1and3 - }, - { - "CICDSecretName": "", - "EnvVarName": "", - "HCLName": "resource_location", - "Value": locals.resource_location - }, - { - "CICDSecretName": "ENVIRONMENT_TAG", - "EnvVarName": "TF_VAR_environment_tag", - "HCLName": "environment_tag", - "Value": locals.environment_tag - }, - { - "CICDSecretName": "ARM_DOMAIN", - "EnvVarName": "TF_VAR_domain", - "HCLName": "domain", - "Value": locals.domain - }, - { - "CICDSecretName": "", - "EnvVarName": "", - "HCLName": "subscription_id", - "Value": locals.subscription_id - }, - { - "CICDSecretName": "", - "EnvVarName": "", - "HCLName": "prefix", - "Value": locals.prefix - }, - { - "CICDSecretName": "ARM_RESOURCE_GROUP_NAME", - "EnvVarName": "TF_VAR_resource_group_name", - "HCLName": "resource_group_name", - "Value": locals.resource_group_name - }, - { - "CICDSecretName": "", - "EnvVarName": "", - "HCLName": "ip_address", - "Value": locals.ip_address - }, - { - "CICDSecretName": "", - "EnvVarName": "", - "HCLName": "ip_address2", - "Value": locals.ip_address2 - }, - { - "CICDSecretName": "", - "EnvVarName": "", - "HCLName": "synapse_administrators", - "Value": locals.synapse_administrators - }, - { - "CICDSecretName": "ARM_STORAGE_NAME", - "EnvVarName": "TF_VAR_state_storage_account_name", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "ARM_SYNAPSE_PASSWORD", - "EnvVarName": "TF_VAR_synapse_sql_password", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "ARM_JUMPHOST_PASSWORD", - "EnvVarName": "TF_VAR_jumphost_password", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "WEB_APP_ADMIN_SECURITY_GROUP", - "EnvVarName": "TF_VAR_web_app_admin_security_group", - "HCLName": "", - "Value": "#####" - }, - /* - Git Integration Set-Up - */ - { - "CICDSecretName": "GIT_REPOSITORY_NAME", - "EnvVarName": "TF_VAR_adf_git_repository_name", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "GIT_SYNAPSE_REPOSITORY_BRANCH_NAME", - "EnvVarName": "TF_VAR_synapse_git_repository_branch_name", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "GIT_PAT", - "EnvVarName": "TF_VAR_synapse_git_pat", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "GIT_USER_NAME", - "EnvVarName": "TF_VAR_synapse_git_user_name", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "GIT_EMAIL_ADDRESS", - "EnvVarName": "TF_VAR_synapse_git_email_address", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "GIT_ADF_REPOSITORY_NAME", - "EnvVarName": "TF_VAR_adf_git_repository_name", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "GIT_ADF_REPOSITORY_BRANCH_NAME", - "EnvVarName": "TF_VAR_adf_git_repository_branch_name", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "GIT_ADF_PAT", - "EnvVarName": "TF_VAR_adf_git_pat", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "GIT_ADF_USER_NAME", - "EnvVarName": "TF_VAR_adf_git_user_name", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "GIT_ADF_EMAIL_ADDRESS", - "EnvVarName": "TF_VAR_adf_git_email_address", - "HCLName": "", - "Value": "#####" - } - ] -} \ No newline at end of file diff --git a/solution/DeploymentV2/environments/vars/production/common_vars_template.jsonnet b/solution/DeploymentV2/environments/vars/production/common_vars_template.jsonnet deleted file mode 100644 index 2c5ee212..00000000 --- a/solution/DeploymentV2/environments/vars/production/common_vars_template.jsonnet +++ /dev/null @@ -1,230 +0,0 @@ -function () -local locals = import './common_vars_values.jsonc'; - -{ - "Variables": [ - /*Attributes: - CICDSecretName: Name of the Secret that will hold the value in CICD. This mapps to the Env section of the CICD yaml", - EnvVarName: Name to be used when creating local environment Variable if this is blank no local environment variable will be created - HCLName: Name to be used when common_vars.yaml which is injected into HCL file. If this is blank it will not be included in HCL - */ - /* - Environment Only Vars - - these are not used in local console based deployments. Only when runnning in git hub - */ - { - "CICDSecretName": "WEB_APP_ADMIN_USER", - "EnvVarName": "WEB_APP_ADMIN_USER", - "HCLName": "", - "Value": locals.WEB_APP_ADMIN_USER - }, - { - "CICDSecretName": "ARM_SYNAPSE_WORKSPACE_NAME", - "EnvVarName": "ARM_SYNAPSE_WORKSPACE_NAME", - "HCLName": "", - "Value": locals.ARM_SYNAPSE_WORKSPACE_NAME - }, - { - "CICDSecretName": "ARM_KEYVAULT_NAME", - "EnvVarName": "keyVaultName", - "HCLName": "", - "Value": locals.ARM_KEYVAULT_NAME - }, - { - "CICDSecretName": "ARM_DATALAKE_NAME", - "EnvVarName": "datalakeName", - "HCLName": "", - "Value": locals.ARM_DATALAKE_NAME - }, - /* - Required for Automated CICD Deployment - */ - { - "CICDSecretName": "ARM_CLIENT_ID", - "EnvVarName": "ARM_CLIENT_ID", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "ARM_PAL_PARTNER_ID", - "EnvVarName": "ARM_PAL_PARTNER_ID", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "ARM_CLIENT_SECRET", - "EnvVarName": "ARM_CLIENT_SECRET", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "ARM_SUBSCRIPTION_ID", - "EnvVarName": "ARM_SUBSCRIPTION_ID", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "ARM_TENANT_ID", - "EnvVarName": "ARM_TENANT_ID", - "HCLName": "tenant_id", - "Value": locals.tenant_id - }, - - /* - HCL Common Vars & Terraform Customisations - */ - { - "CICDSecretName": "", - "EnvVarName": "", - "HCLName": "owner_tag", - "Value": locals.owner_tag - }, - { - "CICDSecretName": "", - "EnvVarName": "", - "HCLName": "deployment_principal_layers1and3", - "Value": locals.deployment_principal_layers1and3 - }, - { - "CICDSecretName": "", - "EnvVarName": "", - "HCLName": "resource_location", - "Value": locals.resource_location - }, - { - "CICDSecretName": "ENVIRONMENT_TAG", - "EnvVarName": "TF_VAR_environment_tag", - "HCLName": "environment_tag", - "Value": locals.environment_tag - }, - { - "CICDSecretName": "ARM_DOMAIN", - "EnvVarName": "TF_VAR_domain", - "HCLName": "domain", - "Value": locals.domain - }, - { - "CICDSecretName": "", - "EnvVarName": "", - "HCLName": "subscription_id", - "Value": locals.subscription_id - }, - { - "CICDSecretName": "", - "EnvVarName": "", - "HCLName": "prefix", - "Value": locals.prefix - }, - { - "CICDSecretName": "ARM_RESOURCE_GROUP_NAME", - "EnvVarName": "TF_VAR_resource_group_name", - "HCLName": "resource_group_name", - "Value": locals.resource_group_name - }, - { - "CICDSecretName": "", - "EnvVarName": "", - "HCLName": "ip_address", - "Value": locals.ip_address - }, - { - "CICDSecretName": "", - "EnvVarName": "", - "HCLName": "ip_address2", - "Value": locals.ip_address2 - }, - { - "CICDSecretName": "", - "EnvVarName": "", - "HCLName": "synapse_administrators", - "Value": locals.synapse_administrators - }, - { - "CICDSecretName": "ARM_STORAGE_NAME", - "EnvVarName": "TF_VAR_state_storage_account_name", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "ARM_SYNAPSE_PASSWORD", - "EnvVarName": "TF_VAR_synapse_sql_password", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "ARM_JUMPHOST_PASSWORD", - "EnvVarName": "TF_VAR_jumphost_password", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "WEB_APP_ADMIN_SECURITY_GROUP", - "EnvVarName": "TF_VAR_web_app_admin_security_group", - "HCLName": "", - "Value": "#####" - }, - /* - Git Integration Set-Up - */ - { - "CICDSecretName": "GIT_REPOSITORY_NAME", - "EnvVarName": "TF_VAR_adf_git_repository_name", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "GIT_SYNAPSE_REPOSITORY_BRANCH_NAME", - "EnvVarName": "TF_VAR_synapse_git_repository_branch_name", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "GIT_PAT", - "EnvVarName": "TF_VAR_synapse_git_pat", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "GIT_USER_NAME", - "EnvVarName": "TF_VAR_synapse_git_user_name", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "GIT_EMAIL_ADDRESS", - "EnvVarName": "TF_VAR_synapse_git_email_address", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "GIT_ADF_REPOSITORY_NAME", - "EnvVarName": "TF_VAR_adf_git_repository_name", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "GIT_ADF_REPOSITORY_BRANCH_NAME", - "EnvVarName": "TF_VAR_adf_git_repository_branch_name", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "GIT_ADF_PAT", - "EnvVarName": "TF_VAR_adf_git_pat", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "GIT_ADF_USER_NAME", - "EnvVarName": "TF_VAR_adf_git_user_name", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "GIT_ADF_EMAIL_ADDRESS", - "EnvVarName": "TF_VAR_adf_git_email_address", - "HCLName": "", - "Value": "#####" - } - ] -} \ No newline at end of file diff --git a/solution/DeploymentV2/environments/vars/staging/GetSecretsTemplate.env b/solution/DeploymentV2/environments/vars/staging/GetSecretsTemplate.env index 10ca45c9..3613de7f 100644 --- a/solution/DeploymentV2/environments/vars/staging/GetSecretsTemplate.env +++ b/solution/DeploymentV2/environments/vars/staging/GetSecretsTemplate.env @@ -1,27 +1,27 @@ -WEB_APP_ADMIN_USER=ccbdbba4-669c-48d6-86b8-75c9ab2ee578 -ARM_SYNAPSE_WORKSPACE_NAME=adsdevsynwadskhpv -ARM_KEYVAULT_NAME=ads-dev-kv-ads-khpv -ARM_DATALAKE_NAME=adsdevdlsadskhpvadsl ARM_CLIENT_ID=##### -ARM_PAL_PARTNER_ID=##### ARM_CLIENT_SECRET=##### -ARM_SUBSCRIPTION_ID=##### -ARM_TENANT_ID=72f988bf-86f1-41af-91ab-2d7cd011db47 -ENVIRONMENT_TAG=stg +ARM_DATALAKE_NAME=adsstgdlsadskhpvadsl ARM_DOMAIN=microsoft.com +ARM_JUMPHOST_PASSWORD=##### +ARM_KEYVAULT_NAME=ads-stg-kv-ads-khpv +ARM_PAL_PARTNER_ID=##### ARM_RESOURCE_GROUP_NAME=gft2 ARM_STORAGE_NAME=##### +ARM_SUBSCRIPTION_ID=##### ARM_SYNAPSE_PASSWORD=##### -ARM_JUMPHOST_PASSWORD=##### -WEB_APP_ADMIN_SECURITY_GROUP=##### +ARM_SYNAPSE_WORKSPACE_NAME=adsstgsynwadskhpv +ARM_TENANT_ID=72f988bf-86f1-41af-91ab-2d7cd011db47 +ENVIRONMENT_TAG=stg +GIT_ADF_EMAIL_ADDRESS=##### +GIT_ADF_PAT=##### +GIT_ADF_REPOSITORY_BRANCH_NAME=##### +GIT_ADF_REPOSITORY_NAME=##### +GIT_ADF_USER_NAME=##### +GIT_EMAIL_ADDRESS=##### +GIT_PAT=##### GIT_REPOSITORY_NAME=##### GIT_SYNAPSE_REPOSITORY_BRANCH_NAME=##### -GIT_PAT=##### GIT_USER_NAME=##### -GIT_EMAIL_ADDRESS=##### -GIT_ADF_REPOSITORY_NAME=##### -GIT_ADF_REPOSITORY_BRANCH_NAME=##### -GIT_ADF_PAT=##### -GIT_ADF_USER_NAME=##### -GIT_ADF_EMAIL_ADDRESS=##### +WEB_APP_ADMIN_SECURITY_GROUP=##### +WEB_APP_ADMIN_USER=ccbdbba4-669c-48d6-86b8-75c9ab2ee578 diff --git a/solution/DeploymentV2/environments/vars/staging/common_vars.json b/solution/DeploymentV2/environments/vars/staging/common_vars.json index ba775126..324bd328 100644 --- a/solution/DeploymentV2/environments/vars/staging/common_vars.json +++ b/solution/DeploymentV2/environments/vars/staging/common_vars.json @@ -1,330 +1,94 @@ { - "Variables": [ - { - "CICDSecretName": "WEB_APP_ADMIN_USER", - "EnvVarName": "WEB_APP_ADMIN_USER", - "HCLName": "", - "Value": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578" + "ForEnvVar": { + "ARM_CLIENT_ID": "#####", + "ARM_CLIENT_SECRET": "#####", + "ARM_PAL_PARTNER_ID": "#####", + "ARM_SUBSCRIPTION_ID": "#####", + "ARM_SYNAPSE_WORKSPACE_NAME": "adsstgsynwadskhpv", + "ARM_TENANT_ID": "72f988bf-86f1-41af-91ab-2d7cd011db47", + "TF_VAR_adf_git_email_address": "#####", + "TF_VAR_adf_git_pat": "#####", + "TF_VAR_adf_git_repository_branch_name": "#####", + "TF_VAR_adf_git_repository_name": "#####", + "TF_VAR_adf_git_user_name": "#####", + "TF_VAR_configure_networking": false, + "TF_VAR_deploy_app_service_plan": true, + "TF_VAR_deploy_custom_terraform": false, + "TF_VAR_deploy_data_factory": true, + "TF_VAR_deploy_function_app": true, + "TF_VAR_deploy_metadata_database": true, + "TF_VAR_deploy_purview": false, + "TF_VAR_deploy_selfhostedsql": false, + "TF_VAR_deploy_sentinel": true, + "TF_VAR_deploy_synapse": true, + "TF_VAR_deploy_web_app": true, + "TF_VAR_domain": "microsoft.com", + "TF_VAR_environment_tag": "stg", + "TF_VAR_is_onprem_datafactory_ir_registered": false, + "TF_VAR_is_vnet_isolated": false, + "TF_VAR_jumphost_password": "#####", + "TF_VAR_publish_datafactory_pipelines": true, + "TF_VAR_publish_function_app": true, + "TF_VAR_publish_metadata_database": true, + "TF_VAR_publish_sample_files": true, + "TF_VAR_publish_sif_database": true, + "TF_VAR_publish_web_app": true, + "TF_VAR_publish_web_app_addcurrentuserasadmin": true, + "TF_VAR_resource_group_name": "gft2", + "TF_VAR_state_storage_account_name": "#####", + "TF_VAR_synapse_git_email_address": "#####", + "TF_VAR_synapse_git_pat": "#####", + "TF_VAR_synapse_git_repository_branch_name": "#####", + "TF_VAR_synapse_git_repository_name": "#####", + "TF_VAR_synapse_git_user_name": "#####", + "TF_VAR_synapse_sql_password": "#####", + "TF_VAR_web_app_admin_security_group": "#####", + "WEB_APP_ADMIN_USER": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", + "datalakeName": "adsstgdlsadskhpvadsl", + "keyVaultName": "ads-stg-kv-ads-khpv" + }, + "ForHCL": { + "deployment_principal_layers1and3": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", + "domain": "microsoft.com", + "environment_tag": "stg", + "ip_address": "144.138.148.220", + "ip_address2": "144.138.148.220", + "owner_tag": "Contoso", + "prefix": "ads", + "resource_group_name": "gft2", + "resource_location": "AustraliaEast", + "subscription_id": "035a1364-f00d-48e2-b582-4fe125905ee3", + "synapse_administrators": { + "Jorampon": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578" }, - { - "CICDSecretName": "ARM_SYNAPSE_WORKSPACE_NAME", - "EnvVarName": "ARM_SYNAPSE_WORKSPACE_NAME", - "HCLName": "", - "Value": "adsdevsynwadskhpv" - }, - { - "CICDSecretName": "ARM_KEYVAULT_NAME", - "EnvVarName": "keyVaultName", - "HCLName": "", - "Value": "ads-dev-kv-ads-khpv" - }, - { - "CICDSecretName": "ARM_DATALAKE_NAME", - "EnvVarName": "datalakeName", - "HCLName": "", - "Value": "adsdevdlsadskhpvadsl" - }, - { - "CICDSecretName": "ARM_CLIENT_ID", - "EnvVarName": "ARM_CLIENT_ID", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "ARM_PAL_PARTNER_ID", - "EnvVarName": "ARM_PAL_PARTNER_ID", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "ARM_CLIENT_SECRET", - "EnvVarName": "ARM_CLIENT_SECRET", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "ARM_SUBSCRIPTION_ID", - "EnvVarName": "ARM_SUBSCRIPTION_ID", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "ARM_TENANT_ID", - "EnvVarName": "ARM_TENANT_ID", - "HCLName": "tenant_id", - "Value": "72f988bf-86f1-41af-91ab-2d7cd011db47" - }, - { - "CICDSecretName": "", - "EnvVarName": "", - "HCLName": "owner_tag", - "Value": "Contoso" - }, - { - "CICDSecretName": "", - "EnvVarName": "", - "HCLName": "deployment_principal_layers1and3", - "Value": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578" - }, - { - "CICDSecretName": "", - "EnvVarName": "", - "HCLName": "resource_location", - "Value": "AustraliaEast" - }, - { - "CICDSecretName": "ENVIRONMENT_TAG", - "EnvVarName": "TF_VAR_environment_tag", - "HCLName": "environment_tag", - "Value": "stg" - }, - { - "CICDSecretName": "ARM_DOMAIN", - "EnvVarName": "TF_VAR_domain", - "HCLName": "domain", - "Value": "microsoft.com" - }, - { - "CICDSecretName": "", - "EnvVarName": "", - "HCLName": "subscription_id", - "Value": "035a1364-f00d-48e2-b582-4fe125905ee3" - }, - { - "CICDSecretName": "", - "EnvVarName": "", - "HCLName": "prefix", - "Value": "ads" - }, - { - "CICDSecretName": "ARM_RESOURCE_GROUP_NAME", - "EnvVarName": "TF_VAR_resource_group_name", - "HCLName": "resource_group_name", - "Value": "gft2" - }, - { - "CICDSecretName": "", - "EnvVarName": "", - "HCLName": "ip_address", - "Value": "144.138.148.220" - }, - { - "CICDSecretName": "", - "EnvVarName": "", - "HCLName": "ip_address2", - "Value": "144.138.148.220" - }, - { - "CICDSecretName": "", - "EnvVarName": "", - "HCLName": "synapse_administrators", - "Value": { - "Jorampon": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578" - } - }, - { - "CICDSecretName": "ARM_STORAGE_NAME", - "EnvVarName": "TF_VAR_state_storage_account_name", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "ARM_SYNAPSE_PASSWORD", - "EnvVarName": "TF_VAR_synapse_sql_password", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "ARM_JUMPHOST_PASSWORD", - "EnvVarName": "TF_VAR_jumphost_password", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "WEB_APP_ADMIN_SECURITY_GROUP", - "EnvVarName": "TF_VAR_web_app_admin_security_group", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "GIT_REPOSITORY_NAME", - "EnvVarName": "TF_VAR_adf_git_repository_name", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "GIT_SYNAPSE_REPOSITORY_BRANCH_NAME", - "EnvVarName": "TF_VAR_synapse_git_repository_branch_name", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "GIT_PAT", - "EnvVarName": "TF_VAR_synapse_git_pat", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "GIT_USER_NAME", - "EnvVarName": "TF_VAR_synapse_git_user_name", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "GIT_EMAIL_ADDRESS", - "EnvVarName": "TF_VAR_synapse_git_email_address", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "GIT_ADF_REPOSITORY_NAME", - "EnvVarName": "TF_VAR_adf_git_repository_name", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "GIT_ADF_REPOSITORY_BRANCH_NAME", - "EnvVarName": "TF_VAR_adf_git_repository_branch_name", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "GIT_ADF_PAT", - "EnvVarName": "TF_VAR_adf_git_pat", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "GIT_ADF_USER_NAME", - "EnvVarName": "TF_VAR_adf_git_user_name", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "GIT_ADF_EMAIL_ADDRESS", - "EnvVarName": "TF_VAR_adf_git_email_address", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "", - "EnvVarName": "TF_VAR_deploy_web_app", - "HCLName": "", - "Value": true - }, - { - "CICDSecretName": "", - "EnvVarName": "TF_VAR_deploy_function_app", - "HCLName": "", - "Value": true - }, - { - "CICDSecretName": "", - "EnvVarName": "TF_VAR_deploy_custom_terraform", - "HCLName": "", - "Value": false - }, - { - "CICDSecretName": "", - "EnvVarName": "TF_VAR_deploy_app_service_plan", - "HCLName": "", - "Value": true - }, - { - "CICDSecretName": "", - "EnvVarName": "TF_VAR_deploy_data_factory", - "HCLName": "", - "Value": true - }, - { - "CICDSecretName": "", - "EnvVarName": "TF_VAR_deploy_sentinel", - "HCLName": "", - "Value": true - }, - { - "CICDSecretName": "", - "EnvVarName": "TF_VAR_deploy_purview", - "HCLName": "", - "Value": true - }, - { - "CICDSecretName": "", - "EnvVarName": "TF_VAR_deploy_synapse", - "HCLName": "", - "Value": true - }, - { - "CICDSecretName": "", - "EnvVarName": "TF_VAR_deploy_metadata_database", - "HCLName": "", - "Value": true - }, - { - "CICDSecretName": "", - "EnvVarName": "TF_VAR_is_vnet_isolated", - "HCLName": "", - "Value": true - }, - { - "CICDSecretName": "", - "EnvVarName": "TF_VAR_publish_web_app", - "HCLName": "", - "Value": true - }, - { - "CICDSecretName": "", - "EnvVarName": "TF_VAR_publish_function_app", - "HCLName": "", - "Value": true - }, - { - "CICDSecretName": "", - "EnvVarName": "TF_VAR_publish_sample_files", - "HCLName": "", - "Value": true - }, - { - "CICDSecretName": "", - "EnvVarName": "TF_VAR_publish_metadata_database", - "HCLName": "", - "Value": true - }, - { - "CICDSecretName": "", - "EnvVarName": "TF_VAR_configure_networking", - "HCLName": "", - "Value": true - }, - { - "CICDSecretName": "", - "EnvVarName": "TF_VAR_publish_datafactory_pipelines", - "HCLName": "", - "Value": true - }, - { - "CICDSecretName": "", - "EnvVarName": "TF_VAR_publish_web_app_addcurrentuserasadmin", - "HCLName": "", - "Value": true - }, - { - "CICDSecretName": "", - "EnvVarName": "TF_VAR_deploy_selfhostedsql", - "HCLName": "", - "Value": false - }, - { - "CICDSecretName": "", - "EnvVarName": "TF_VAR_is_onprem_datafactory_ir_registered", - "HCLName": "", - "Value": false - }, - { - "CICDSecretName": "", - "EnvVarName": "TF_VAR_publish_sif_database", - "HCLName": "", - "Value": true - } - ] + "tenant_id": "72f988bf-86f1-41af-91ab-2d7cd011db47" + }, + "ForSecretFile": { + "ARM_CLIENT_ID": "#####", + "ARM_CLIENT_SECRET": "#####", + "ARM_DATALAKE_NAME": "adsstgdlsadskhpvadsl", + "ARM_DOMAIN": "microsoft.com", + "ARM_JUMPHOST_PASSWORD": "#####", + "ARM_KEYVAULT_NAME": "ads-stg-kv-ads-khpv", + "ARM_PAL_PARTNER_ID": "#####", + "ARM_RESOURCE_GROUP_NAME": "gft2", + "ARM_STORAGE_NAME": "#####", + "ARM_SUBSCRIPTION_ID": "#####", + "ARM_SYNAPSE_PASSWORD": "#####", + "ARM_SYNAPSE_WORKSPACE_NAME": "adsstgsynwadskhpv", + "ARM_TENANT_ID": "72f988bf-86f1-41af-91ab-2d7cd011db47", + "ENVIRONMENT_TAG": "stg", + "GIT_ADF_EMAIL_ADDRESS": "#####", + "GIT_ADF_PAT": "#####", + "GIT_ADF_REPOSITORY_BRANCH_NAME": "#####", + "GIT_ADF_REPOSITORY_NAME": "#####", + "GIT_ADF_USER_NAME": "#####", + "GIT_EMAIL_ADDRESS": "#####", + "GIT_PAT": "#####", + "GIT_REPOSITORY_NAME": "#####", + "GIT_SYNAPSE_REPOSITORY_BRANCH_NAME": "#####", + "GIT_USER_NAME": "#####", + "WEB_APP_ADMIN_SECURITY_GROUP": "#####", + "WEB_APP_ADMIN_USER": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578" + } } diff --git a/solution/DeploymentV2/environments/vars/staging/common_vars.yaml b/solution/DeploymentV2/environments/vars/staging/common_vars.yaml deleted file mode 100644 index 9e3f574a..00000000 --- a/solution/DeploymentV2/environments/vars/staging/common_vars.yaml +++ /dev/null @@ -1,14 +0,0 @@ -domain: microsoft.com -owner_tag: Contoso -tenant_id: 72f988bf-86f1-41af-91ab-2d7cd011db47 -resource_group_name: gft2 -subscription_id: 035a1364-f00d-48e2-b582-4fe125905ee3 -ip_address: 144.138.148.220 -deployment_principal_layers1and3: ccbdbba4-669c-48d6-86b8-75c9ab2ee578 -resource_location: AustraliaEast -ip_address2: 144.138.148.220 -prefix: ads -environment_tag: stg -synapse_administrators: - Jorampon: ccbdbba4-669c-48d6-86b8-75c9ab2ee578 - diff --git a/solution/DeploymentV2/environments/vars/staging/common_vars_for_hcl.json b/solution/DeploymentV2/environments/vars/staging/common_vars_for_hcl.json new file mode 100644 index 00000000..5d9310c0 --- /dev/null +++ b/solution/DeploymentV2/environments/vars/staging/common_vars_for_hcl.json @@ -0,0 +1,16 @@ +{ + "deployment_principal_layers1and3": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", + "domain": "microsoft.com", + "environment_tag": "stg", + "ip_address": "144.138.148.220", + "ip_address2": "144.138.148.220", + "owner_tag": "Contoso", + "prefix": "ads", + "resource_group_name": "gft2", + "resource_location": "AustraliaEast", + "subscription_id": "035a1364-f00d-48e2-b582-4fe125905ee3", + "synapse_administrators": { + "Jorampon": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578" + }, + "tenant_id": "72f988bf-86f1-41af-91ab-2d7cd011db47" +} diff --git a/solution/DeploymentV2/environments/vars/staging/common_vars_template.jsonnet b/solution/DeploymentV2/environments/vars/staging/common_vars_template.jsonnet deleted file mode 100644 index ea3ac10d..00000000 --- a/solution/DeploymentV2/environments/vars/staging/common_vars_template.jsonnet +++ /dev/null @@ -1,246 +0,0 @@ -function (featuretemplatenam="full_deployment") -local locals = import './common_vars_values.jsonc'; -local featuretemplates = { - "basic_deployment" : import './../../featuretemplates/full_deployment.jsonc', - "full_deployment" : import './../../featuretemplates/full_deployment.jsonc', - "functional_tests" : import './../../featuretemplates/full_deployment.jsonc', -}; - -local featuretemplate = [ // Object comprehension. - { - ["CICDSecretName"]: "", - ["EnvVarName"]: "TF_VAR_" + sd.Name, - ["HCLName"]: "", - ["Value"]: sd.Value, - } - for sd in featuretemplates[featuretemplatenam] - ]; - - -{ - "Variables": [ - /*Attributes: - CICDSecretName: Name of the Secret that will hold the value in CICD. This mapps to the Env section of the CICD yaml", - EnvVarName: Name to be used when creating local environment Variable if this is blank no local environment variable will be created - HCLName: Name to be used when common_vars.yaml which is injected into HCL file. If this is blank it will not be included in HCL - */ - /* - Environment Only Vars - - these are not used in local console based deployments. Only when runnning in git hub - */ - { - "CICDSecretName": "WEB_APP_ADMIN_USER", - "EnvVarName": "WEB_APP_ADMIN_USER", - "HCLName": "", - "Value": locals.WEB_APP_ADMIN_USER - }, - { - "CICDSecretName": "ARM_SYNAPSE_WORKSPACE_NAME", - "EnvVarName": "ARM_SYNAPSE_WORKSPACE_NAME", - "HCLName": "", - "Value": locals.ARM_SYNAPSE_WORKSPACE_NAME - }, - { - "CICDSecretName": "ARM_KEYVAULT_NAME", - "EnvVarName": "keyVaultName", - "HCLName": "", - "Value": locals.ARM_KEYVAULT_NAME - }, - { - "CICDSecretName": "ARM_DATALAKE_NAME", - "EnvVarName": "datalakeName", - "HCLName": "", - "Value": locals.ARM_DATALAKE_NAME - }, - /* - Required for Automated CICD Deployment - */ - { - "CICDSecretName": "ARM_CLIENT_ID", - "EnvVarName": "ARM_CLIENT_ID", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "ARM_PAL_PARTNER_ID", - "EnvVarName": "ARM_PAL_PARTNER_ID", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "ARM_CLIENT_SECRET", - "EnvVarName": "ARM_CLIENT_SECRET", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "ARM_SUBSCRIPTION_ID", - "EnvVarName": "ARM_SUBSCRIPTION_ID", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "ARM_TENANT_ID", - "EnvVarName": "ARM_TENANT_ID", - "HCLName": "tenant_id", - "Value": locals.tenant_id - }, - - /* - HCL Common Vars & Terraform Customisations - */ - { - "CICDSecretName": "", - "EnvVarName": "", - "HCLName": "owner_tag", - "Value": locals.owner_tag - }, - { - "CICDSecretName": "", - "EnvVarName": "", - "HCLName": "deployment_principal_layers1and3", - "Value": locals.deployment_principal_layers1and3 - }, - { - "CICDSecretName": "", - "EnvVarName": "", - "HCLName": "resource_location", - "Value": locals.resource_location - }, - { - "CICDSecretName": "ENVIRONMENT_TAG", - "EnvVarName": "TF_VAR_environment_tag", - "HCLName": "environment_tag", - "Value": locals.environment_tag - }, - { - "CICDSecretName": "ARM_DOMAIN", - "EnvVarName": "TF_VAR_domain", - "HCLName": "domain", - "Value": locals.domain - }, - { - "CICDSecretName": "", - "EnvVarName": "", - "HCLName": "subscription_id", - "Value": locals.subscription_id - }, - { - "CICDSecretName": "", - "EnvVarName": "", - "HCLName": "prefix", - "Value": locals.prefix - }, - { - "CICDSecretName": "ARM_RESOURCE_GROUP_NAME", - "EnvVarName": "TF_VAR_resource_group_name", - "HCLName": "resource_group_name", - "Value": locals.resource_group_name - }, - { - "CICDSecretName": "", - "EnvVarName": "", - "HCLName": "ip_address", - "Value": locals.ip_address - }, - { - "CICDSecretName": "", - "EnvVarName": "", - "HCLName": "ip_address2", - "Value": locals.ip_address2 - }, - { - "CICDSecretName": "", - "EnvVarName": "", - "HCLName": "synapse_administrators", - "Value": locals.synapse_administrators - }, - { - "CICDSecretName": "ARM_STORAGE_NAME", - "EnvVarName": "TF_VAR_state_storage_account_name", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "ARM_SYNAPSE_PASSWORD", - "EnvVarName": "TF_VAR_synapse_sql_password", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "ARM_JUMPHOST_PASSWORD", - "EnvVarName": "TF_VAR_jumphost_password", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "WEB_APP_ADMIN_SECURITY_GROUP", - "EnvVarName": "TF_VAR_web_app_admin_security_group", - "HCLName": "", - "Value": "#####" - }, - /* - Git Integration Set-Up - */ - { - "CICDSecretName": "GIT_REPOSITORY_NAME", - "EnvVarName": "TF_VAR_adf_git_repository_name", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "GIT_SYNAPSE_REPOSITORY_BRANCH_NAME", - "EnvVarName": "TF_VAR_synapse_git_repository_branch_name", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "GIT_PAT", - "EnvVarName": "TF_VAR_synapse_git_pat", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "GIT_USER_NAME", - "EnvVarName": "TF_VAR_synapse_git_user_name", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "GIT_EMAIL_ADDRESS", - "EnvVarName": "TF_VAR_synapse_git_email_address", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "GIT_ADF_REPOSITORY_NAME", - "EnvVarName": "TF_VAR_adf_git_repository_name", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "GIT_ADF_REPOSITORY_BRANCH_NAME", - "EnvVarName": "TF_VAR_adf_git_repository_branch_name", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "GIT_ADF_PAT", - "EnvVarName": "TF_VAR_adf_git_pat", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "GIT_ADF_USER_NAME", - "EnvVarName": "TF_VAR_adf_git_user_name", - "HCLName": "", - "Value": "#####" - }, - { - "CICDSecretName": "GIT_ADF_EMAIL_ADDRESS", - "EnvVarName": "TF_VAR_adf_git_email_address", - "HCLName": "", - "Value": "#####" - } - ]+featuretemplate -} \ No newline at end of file diff --git a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc index 4ad56b08..3108640a 100644 --- a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc +++ b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc @@ -13,7 +13,7 @@ "synapse_administrators": {"Jorampon":"ccbdbba4-669c-48d6-86b8-75c9ab2ee578"}, "WEB_APP_ADMIN_USER":"ccbdbba4-669c-48d6-86b8-75c9ab2ee578", - "ARM_SYNAPSE_WORKSPACE_NAME": "adsdevsynwadskhpv", //Required by deployment script (pwsh) to openfirewall and provide access.. note that this will not be known until first deployment - "ARM_KEYVAULT_NAME": "ads-dev-kv-ads-khpv", //Required by deployment script (pwsh) to openfirewall and provide access.. note that this will not be known until first deployment - "ARM_DATALAKE_NAME": "adsdevdlsadskhpvadsl" //Required by deployment script (pwsh) to openfirewall and provide access.. note that this will not be known until first deployment + "ARM_SYNAPSE_WORKSPACE_NAME": "adsstgsynwadskhpv", //Required by deployment script (pwsh) to openfirewall and provide access.. note that this will not be known until first deployment + "ARM_KEYVAULT_NAME": "ads-stg-kv-ads-khpv", //Required by deployment script (pwsh) to openfirewall and provide access.. note that this will not be known until first deployment + "ARM_DATALAKE_NAME": "adsstgdlsadskhpvadsl" //Required by deployment script (pwsh) to openfirewall and provide access.. note that this will not be known until first deployment } \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer1/vars/local/terragrunt.hcl b/solution/DeploymentV2/terraform_layer1/vars/local/terragrunt.hcl index 2825ab48..d132c24e 100644 --- a/solution/DeploymentV2/terraform_layer1/vars/local/terragrunt.hcl +++ b/solution/DeploymentV2/terraform_layer1/vars/local/terragrunt.hcl @@ -1,5 +1,5 @@ locals { - common_vars = yamldecode(file("../../../environments/vars/local/common_vars.yaml")) + common_vars = jsondecode(file("../../../environments/vars/local/common_vars_for_hcl.json")) } diff --git a/solution/DeploymentV2/terraform_layer1/vars/production/terragrunt.hcl b/solution/DeploymentV2/terraform_layer1/vars/production/terragrunt.hcl index 6c5f1bae..c0f91719 100644 --- a/solution/DeploymentV2/terraform_layer1/vars/production/terragrunt.hcl +++ b/solution/DeploymentV2/terraform_layer1/vars/production/terragrunt.hcl @@ -1,5 +1,5 @@ locals { - common_vars = yamldecode(file("../../../environments/vars/production/common_vars.yaml")) + common_vars = jsondecode(file("../../../environments/vars/production/common_vars_for_hcl.json") } diff --git a/solution/DeploymentV2/terraform_layer1/vars/staging/terragrunt.hcl b/solution/DeploymentV2/terraform_layer1/vars/staging/terragrunt.hcl index 010d5f2a..6919c425 100644 --- a/solution/DeploymentV2/terraform_layer1/vars/staging/terragrunt.hcl +++ b/solution/DeploymentV2/terraform_layer1/vars/staging/terragrunt.hcl @@ -1,5 +1,5 @@ locals { - common_vars = yamldecode(file("../../../environments/vars/staging/common_vars.yaml")) + common_vars = jsondecode(file("../../../environments/vars/staging/common_vars_for_hcl.json")) } diff --git a/solution/DeploymentV2/terraform_layer2/vars/local/terragrunt.hcl b/solution/DeploymentV2/terraform_layer2/vars/local/terragrunt.hcl index 4d3c1a00..2e3a2bec 100644 --- a/solution/DeploymentV2/terraform_layer2/vars/local/terragrunt.hcl +++ b/solution/DeploymentV2/terraform_layer2/vars/local/terragrunt.hcl @@ -1,5 +1,5 @@ locals { - common_vars = yamldecode(file("../../../environments/vars/local/common_vars.yaml")) + common_vars = jsondecode(file("../../../environments/vars/local/common_vars_for_hcl.json")) } diff --git a/solution/DeploymentV2/terraform_layer2/vars/production/terragrunt.hcl b/solution/DeploymentV2/terraform_layer2/vars/production/terragrunt.hcl index b55d43c2..39a7f3fa 100644 --- a/solution/DeploymentV2/terraform_layer2/vars/production/terragrunt.hcl +++ b/solution/DeploymentV2/terraform_layer2/vars/production/terragrunt.hcl @@ -1,5 +1,5 @@ locals { - common_vars = yamldecode(file("../../../environments/vars/production/common_vars.yaml")) + common_vars = jsondecode(file("../../../environments/vars/production/common_vars_for_hcl.json")) } diff --git a/solution/DeploymentV2/terraform_layer2/vars/staging/terragrunt.hcl b/solution/DeploymentV2/terraform_layer2/vars/staging/terragrunt.hcl index 3a2624e9..92c7093e 100644 --- a/solution/DeploymentV2/terraform_layer2/vars/staging/terragrunt.hcl +++ b/solution/DeploymentV2/terraform_layer2/vars/staging/terragrunt.hcl @@ -1,5 +1,5 @@ locals { - common_vars = yamldecode(file("../../../environments/vars/staging/common_vars.yaml")) + common_vars = jsondecode(file("../../../environments/vars/staging/common_vars_for_hcl.json")) } diff --git a/solution/DeploymentV2/terraform_layer3/vars/local/terragrunt.hcl b/solution/DeploymentV2/terraform_layer3/vars/local/terragrunt.hcl index 39147ca9..db88118a 100644 --- a/solution/DeploymentV2/terraform_layer3/vars/local/terragrunt.hcl +++ b/solution/DeploymentV2/terraform_layer3/vars/local/terragrunt.hcl @@ -1,5 +1,5 @@ locals { - common_vars = yamldecode(file("../../../environments/vars/local/common_vars.yaml")) + common_vars = jsondecode(file("../../../environments/vars/local/common_vars_for_hcl.json")) } generate "layer2.tf" { diff --git a/solution/DeploymentV2/terraform_layer3/vars/production/terragrunt.hcl b/solution/DeploymentV2/terraform_layer3/vars/production/terragrunt.hcl index d2f4238b..c5b5da40 100644 --- a/solution/DeploymentV2/terraform_layer3/vars/production/terragrunt.hcl +++ b/solution/DeploymentV2/terraform_layer3/vars/production/terragrunt.hcl @@ -1,5 +1,5 @@ locals { - common_vars = yamldecode(file("../../../environments/vars/production/common_vars.yaml")) + common_vars = jsondecode(file("../../../environments/vars/production/common_vars_for_hcl.json")) } generate "layer2.tf" { diff --git a/solution/DeploymentV2/terraform_layer3/vars/staging/terragrunt.hcl b/solution/DeploymentV2/terraform_layer3/vars/staging/terragrunt.hcl index 47b959c4..234d6bcf 100644 --- a/solution/DeploymentV2/terraform_layer3/vars/staging/terragrunt.hcl +++ b/solution/DeploymentV2/terraform_layer3/vars/staging/terragrunt.hcl @@ -1,5 +1,5 @@ locals { - common_vars = yamldecode(file("../../../environments/vars/staging/common_vars.yaml")) + common_vars = jsondecode(file("../../../environments/vars/staging/common_vars_for_hcl.json")) } generate "layer2.tf" { From 0a91e8460e07af1a7bc0f78cbe1801006c6cb7a1 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Wed, 27 Jul 2022 03:07:25 +0800 Subject: [PATCH 047/151] modified: solution/DeploymentV2/terraform_layer1/vars/staging/terragrunt.hcl --- .../DeploymentV2/terraform_layer1/vars/staging/terragrunt.hcl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/solution/DeploymentV2/terraform_layer1/vars/staging/terragrunt.hcl b/solution/DeploymentV2/terraform_layer1/vars/staging/terragrunt.hcl index 6919c425..e4028b4a 100644 --- a/solution/DeploymentV2/terraform_layer1/vars/staging/terragrunt.hcl +++ b/solution/DeploymentV2/terraform_layer1/vars/staging/terragrunt.hcl @@ -1,5 +1,5 @@ locals { - common_vars = jsondecode(file("../../../environments/vars/staging/common_vars_for_hcl.json")) + common_vars = jsondecode(file("../../../environments/vars/staging/common_vars_for_hcl.json")) } From 53972aab360fac5aaa79c9cb57c75290b8071bc9 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Wed, 27 Jul 2022 03:08:22 +0800 Subject: [PATCH 048/151] modified: .github/workflows/continuous-delivery.yml --- .github/workflows/continuous-delivery.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/continuous-delivery.yml b/.github/workflows/continuous-delivery.yml index 29640979..13342ce1 100644 --- a/.github/workflows/continuous-delivery.yml +++ b/.github/workflows/continuous-delivery.yml @@ -3,7 +3,7 @@ name: Continuous Delivery on: workflow_dispatch: push: - branches: $default-branch + branches: feature-1.0.4 jobs: deploy-to-env-one: From 95e141ff52ff6a1eadc69c13763c3e3ff2650d31 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Wed, 27 Jul 2022 03:15:31 +0800 Subject: [PATCH 049/151] CICD Testing --- .github/workflows/continuous-delivery.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/continuous-delivery.yml b/.github/workflows/continuous-delivery.yml index 13342ce1..68eb16db 100644 --- a/.github/workflows/continuous-delivery.yml +++ b/.github/workflows/continuous-delivery.yml @@ -110,7 +110,7 @@ jobs: TF_VAR_ip_address : ${{steps.ip.outputs.ipv4}} run: | git update-index --chmod=+x ./Deploy.ps1 - ./Deploy.ps1 -RunTerraformLayer2 $true -FeatureTemplate $featureTemplate + ./Deploy.ps1 -RunTerraformLayer2 $true -FeatureTemplate ${{ env.featureTemplate}} #PROD ENVIRONMENT From 763466c1680ef19a1e0f5f844a0d1b0ed3e0b708 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Wed, 27 Jul 2022 03:47:24 +0800 Subject: [PATCH 050/151] CICD Fixed IP Addres2 Issue --- .github/workflows/continuous-delivery.yml | 1 + .../environments/vars/common_vars_template.jsonnet | 8 ++++---- .../environments/vars/staging/GetSecretsTemplate.env | 2 ++ .../environments/vars/staging/common_vars.json | 4 ++++ solution/DeploymentV2/terraform_layer2/vars.tf | 2 ++ 5 files changed, 13 insertions(+), 4 deletions(-) diff --git a/.github/workflows/continuous-delivery.yml b/.github/workflows/continuous-delivery.yml index 68eb16db..f339b538 100644 --- a/.github/workflows/continuous-delivery.yml +++ b/.github/workflows/continuous-delivery.yml @@ -28,6 +28,7 @@ jobs: ARM_SUBSCRIPTION_ID: ${{ secrets.ARM_SUBSCRIPTION_ID }} ARM_TENANT_ID: ${{ secrets.ARM_TENANT_ID }} # Customizing Terraform vars + TF_VAR_ip_address2 : ${{ secrets.ARM_IP_ADDRESS2 }} TF_VAR_domain : ${{ secrets.ARM_DOMAIN }} TF_VAR_subscription_id : ${{ secrets.ARM_SUBSCRIPTION_ID }} TF_VAR_tenant_id : ${{ secrets.ARM_TENANT_ID }} diff --git a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet index d01f7141..d1b7ba18 100644 --- a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet +++ b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet @@ -143,14 +143,14 @@ local AllVariables = [ "Value": locals[environment].resource_group_name }, { - "CICDSecretName": "", - "EnvVarName": "", + "CICDSecretName": "ARM_IP_ADDRESS", + "EnvVarName": "TF_VAR_ip_address", "HCLName": "ip_address", "Value": locals[environment].ip_address }, { - "CICDSecretName": "", - "EnvVarName": "", + "CICDSecretName": "ARM_IP_ADDRESS2", + "EnvVarName": "TF_VAR_ip_address2", "HCLName": "ip_address2", "Value": locals[environment].ip_address2 }, diff --git a/solution/DeploymentV2/environments/vars/staging/GetSecretsTemplate.env b/solution/DeploymentV2/environments/vars/staging/GetSecretsTemplate.env index 3613de7f..67a4f2a7 100644 --- a/solution/DeploymentV2/environments/vars/staging/GetSecretsTemplate.env +++ b/solution/DeploymentV2/environments/vars/staging/GetSecretsTemplate.env @@ -2,6 +2,8 @@ ARM_CLIENT_ID=##### ARM_CLIENT_SECRET=##### ARM_DATALAKE_NAME=adsstgdlsadskhpvadsl ARM_DOMAIN=microsoft.com +ARM_IP_ADDRESS=144.138.148.220 +ARM_IP_ADDRESS2=144.138.148.220 ARM_JUMPHOST_PASSWORD=##### ARM_KEYVAULT_NAME=ads-stg-kv-ads-khpv ARM_PAL_PARTNER_ID=##### diff --git a/solution/DeploymentV2/environments/vars/staging/common_vars.json b/solution/DeploymentV2/environments/vars/staging/common_vars.json index 324bd328..7e3f1deb 100644 --- a/solution/DeploymentV2/environments/vars/staging/common_vars.json +++ b/solution/DeploymentV2/environments/vars/staging/common_vars.json @@ -24,6 +24,8 @@ "TF_VAR_deploy_web_app": true, "TF_VAR_domain": "microsoft.com", "TF_VAR_environment_tag": "stg", + "TF_VAR_ip_address": "144.138.148.220", + "TF_VAR_ip_address2": "144.138.148.220", "TF_VAR_is_onprem_datafactory_ir_registered": false, "TF_VAR_is_vnet_isolated": false, "TF_VAR_jumphost_password": "#####", @@ -68,6 +70,8 @@ "ARM_CLIENT_SECRET": "#####", "ARM_DATALAKE_NAME": "adsstgdlsadskhpvadsl", "ARM_DOMAIN": "microsoft.com", + "ARM_IP_ADDRESS": "144.138.148.220", + "ARM_IP_ADDRESS2": "144.138.148.220", "ARM_JUMPHOST_PASSWORD": "#####", "ARM_KEYVAULT_NAME": "ads-stg-kv-ads-khpv", "ARM_PAL_PARTNER_ID": "#####", diff --git a/solution/DeploymentV2/terraform_layer2/vars.tf b/solution/DeploymentV2/terraform_layer2/vars.tf index 9c85028d..a739158c 100644 --- a/solution/DeploymentV2/terraform_layer2/vars.tf +++ b/solution/DeploymentV2/terraform_layer2/vars.tf @@ -4,11 +4,13 @@ variable "ip_address" { description = "The CICD ipaddress. We add an IP whitelisting to allow the setting of keyvault secrets" type = string + default = "" } variable "ip_address2" { description = "The CICD ipaddress. We add an IP whitelisting to allow the setting of keyvault secrets" type = string + default = "" } variable "tenant_id" { From 89fe7c9a7b363620489a669a4321242cc16a5334 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Wed, 27 Jul 2022 03:54:28 +0800 Subject: [PATCH 051/151] Added FeatureTemplate to Secrets --- .github/workflows/continuous-delivery.yml | 2 +- .../environments/vars/common_vars_template.jsonnet | 6 ++++++ .../environments/vars/staging/GetSecretsTemplate.env | 1 + .../DeploymentV2/environments/vars/staging/common_vars.json | 1 + 4 files changed, 9 insertions(+), 1 deletion(-) diff --git a/.github/workflows/continuous-delivery.yml b/.github/workflows/continuous-delivery.yml index f339b538..61e52fdb 100644 --- a/.github/workflows/continuous-delivery.yml +++ b/.github/workflows/continuous-delivery.yml @@ -16,7 +16,7 @@ jobs: environmentName: staging gitDeploy : true skipTerraformDeployment: false - featureTemplate: basic_deployment + featureTemplate: ${{ secrets.ARM_FEATURE_TEMPLATE }} WEB_APP_ADMIN_USER: ${{ secrets.WEB_APP_ADMIN_USER }} keyVaultName: ${{ secrets.ARM_KEYVAULT_NAME }} ARM_SYNAPSE_WORKSPACE_NAME: ${{ secrets.ARM_SYNAPSE_WORKSPACE_NAME }} diff --git a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet index d1b7ba18..11146790 100644 --- a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet +++ b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet @@ -160,6 +160,12 @@ local AllVariables = [ "HCLName": "synapse_administrators", "Value": locals[environment].synapse_administrators }, + { + "CICDSecretName": "ARM_FEATURE_TEMPLATE", + "EnvVarName": "", + "HCLName": "", + "Value": "#####" + }, { "CICDSecretName": "ARM_STORAGE_NAME", "EnvVarName": "TF_VAR_state_storage_account_name", diff --git a/solution/DeploymentV2/environments/vars/staging/GetSecretsTemplate.env b/solution/DeploymentV2/environments/vars/staging/GetSecretsTemplate.env index 67a4f2a7..4a49691a 100644 --- a/solution/DeploymentV2/environments/vars/staging/GetSecretsTemplate.env +++ b/solution/DeploymentV2/environments/vars/staging/GetSecretsTemplate.env @@ -2,6 +2,7 @@ ARM_CLIENT_ID=##### ARM_CLIENT_SECRET=##### ARM_DATALAKE_NAME=adsstgdlsadskhpvadsl ARM_DOMAIN=microsoft.com +ARM_FEATURE_TEMPLATE=##### ARM_IP_ADDRESS=144.138.148.220 ARM_IP_ADDRESS2=144.138.148.220 ARM_JUMPHOST_PASSWORD=##### diff --git a/solution/DeploymentV2/environments/vars/staging/common_vars.json b/solution/DeploymentV2/environments/vars/staging/common_vars.json index 7e3f1deb..a070b01a 100644 --- a/solution/DeploymentV2/environments/vars/staging/common_vars.json +++ b/solution/DeploymentV2/environments/vars/staging/common_vars.json @@ -70,6 +70,7 @@ "ARM_CLIENT_SECRET": "#####", "ARM_DATALAKE_NAME": "adsstgdlsadskhpvadsl", "ARM_DOMAIN": "microsoft.com", + "ARM_FEATURE_TEMPLATE": "#####", "ARM_IP_ADDRESS": "144.138.148.220", "ARM_IP_ADDRESS2": "144.138.148.220", "ARM_JUMPHOST_PASSWORD": "#####", From 87a79f5080120891125128e21fbcc3f0ed069dff Mon Sep 17 00:00:00 2001 From: John Rampono Date: Wed, 27 Jul 2022 14:47:39 +0800 Subject: [PATCH 052/151] Updating Admin adn owners to lists and arrays --- .github/workflows/continuous-delivery.yml | 2 +- .../vars/common_vars_template.jsonnet | 6 +++ .../vars/staging/common_vars.json | 5 ++ .../vars/staging/common_vars_for_hcl.json | 5 ++ .../vars/staging/common_vars_values.jsonc | 18 +++++-- .../DeploymentV2/terraform_layer1/locals.tf | 7 ++- .../DeploymentV2/terraform_layer1/outputs.tf | 16 ++++-- .../DeploymentV2/terraform_layer1/purview.tf | 14 +++++ .../DeploymentV2/terraform_layer1/vars.tf | 31 ++++++++++- .../vars/local/terragrunt.hcl | 1 + .../vars/production/terragrunt.hcl | 1 + .../vars/staging/terragrunt.hcl | 1 + .../DeploymentV2/terraform_layer2/database.tf | 2 +- .../terraform_layer2/key_vault.tf | 54 +++---------------- .../DeploymentV2/terraform_layer2/outputs.tf | 2 +- .../DeploymentV2/terraform_layer2/purview.tf | 19 ------- .../terraform_layer2/storage_adls.tf | 10 ++-- .../terraform_layer2/storage_blob.tf | 10 ++-- .../DeploymentV2/terraform_layer2/synapse.tf | 23 ++++++++ .../DeploymentV2/terraform_layer2/vars.tf | 54 ++++++++++++++----- .../vars/staging/terragrunt.hcl | 1 + .../terraform_layer3/key_vault.tf | 29 ++++++++++ .../DeploymentV2/terraform_layer3/locals.tf | 6 ++- .../DeploymentV2/terraform_layer3/purview.tf | 4 ++ .../DeploymentV2/terraform_layer3/vars.tf | 10 +++- 25 files changed, 229 insertions(+), 102 deletions(-) create mode 100644 solution/DeploymentV2/terraform_layer1/purview.tf create mode 100644 solution/DeploymentV2/terraform_layer3/purview.tf diff --git a/.github/workflows/continuous-delivery.yml b/.github/workflows/continuous-delivery.yml index 61e52fdb..a75c3f48 100644 --- a/.github/workflows/continuous-delivery.yml +++ b/.github/workflows/continuous-delivery.yml @@ -3,7 +3,7 @@ name: Continuous Delivery on: workflow_dispatch: push: - branches: feature-1.0.4 + branches: main jobs: deploy-to-env-one: diff --git a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet index 11146790..7573e726 100644 --- a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet +++ b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet @@ -160,6 +160,12 @@ local AllVariables = [ "HCLName": "synapse_administrators", "Value": locals[environment].synapse_administrators }, + { + "CICDSecretName": "", + "EnvVarName": "", + "HCLName": "resource_owners", + "Value": locals[environment].resource_owners + }, { "CICDSecretName": "ARM_FEATURE_TEMPLATE", "EnvVarName": "", diff --git a/solution/DeploymentV2/environments/vars/staging/common_vars.json b/solution/DeploymentV2/environments/vars/staging/common_vars.json index a070b01a..fbb0176e 100644 --- a/solution/DeploymentV2/environments/vars/staging/common_vars.json +++ b/solution/DeploymentV2/environments/vars/staging/common_vars.json @@ -59,8 +59,13 @@ "prefix": "ads", "resource_group_name": "gft2", "resource_location": "AustraliaEast", + "resource_owners": [ + "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", + "4c732d19-4076-4a76-87f3-6fbfd77f007d" + ], "subscription_id": "035a1364-f00d-48e2-b582-4fe125905ee3", "synapse_administrators": { + "Agent_Deployer": "4c732d19-4076-4a76-87f3-6fbfd77f007d", "Jorampon": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578" }, "tenant_id": "72f988bf-86f1-41af-91ab-2d7cd011db47" diff --git a/solution/DeploymentV2/environments/vars/staging/common_vars_for_hcl.json b/solution/DeploymentV2/environments/vars/staging/common_vars_for_hcl.json index 5d9310c0..e34e7f4f 100644 --- a/solution/DeploymentV2/environments/vars/staging/common_vars_for_hcl.json +++ b/solution/DeploymentV2/environments/vars/staging/common_vars_for_hcl.json @@ -8,8 +8,13 @@ "prefix": "ads", "resource_group_name": "gft2", "resource_location": "AustraliaEast", + "resource_owners": [ + "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", + "4c732d19-4076-4a76-87f3-6fbfd77f007d" + ], "subscription_id": "035a1364-f00d-48e2-b582-4fe125905ee3", "synapse_administrators": { + "Agent_Deployer": "4c732d19-4076-4a76-87f3-6fbfd77f007d", "Jorampon": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578" }, "tenant_id": "72f988bf-86f1-41af-91ab-2d7cd011db47" diff --git a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc index 3108640a..b7b9ec1b 100644 --- a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc +++ b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc @@ -10,10 +10,22 @@ "ip_address": "144.138.148.220", "ip_address2": "144.138.148.220", "tenant_id": "72f988bf-86f1-41af-91ab-2d7cd011db47", - "synapse_administrators": - {"Jorampon":"ccbdbba4-669c-48d6-86b8-75c9ab2ee578"}, + "WEB_APP_ADMIN_USER":"ccbdbba4-669c-48d6-86b8-75c9ab2ee578", "ARM_SYNAPSE_WORKSPACE_NAME": "adsstgsynwadskhpv", //Required by deployment script (pwsh) to openfirewall and provide access.. note that this will not be known until first deployment "ARM_KEYVAULT_NAME": "ads-stg-kv-ads-khpv", //Required by deployment script (pwsh) to openfirewall and provide access.. note that this will not be known until first deployment - "ARM_DATALAKE_NAME": "adsstgdlsadskhpvadsl" //Required by deployment script (pwsh) to openfirewall and provide access.. note that this will not be known until first deployment + "ARM_DATALAKE_NAME": "adsstgdlsadskhpvadsl", //Required by deployment script (pwsh) to openfirewall and provide access.. note that this will not be known until first deployment + + "resource_owners": + [ + /*Jorampon*/"ccbdbba4-669c-48d6-86b8-75c9ab2ee578", + /*Agent_Deployer*/ "4c732d19-4076-4a76-87f3-6fbfd77f007d" + ], + "synapse_administrators": + { + "Jorampon":"ccbdbba4-669c-48d6-86b8-75c9ab2ee578", + "Agent_Deployer": "4c732d19-4076-4a76-87f3-6fbfd77f007d" + }, + "synapse_publishers": {}, + "synapse_contributors": {} } \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer1/locals.tf b/solution/DeploymentV2/terraform_layer1/locals.tf index 2de29d5d..d35119fa 100644 --- a/solution/DeploymentV2/terraform_layer1/locals.tf +++ b/solution/DeploymentV2/terraform_layer1/locals.tf @@ -5,7 +5,12 @@ locals { functionapp_url = "https://${local.functionapp_name}.azurewebsites.net" aad_webapp_name = (var.aad_webapp_name != "" ? var.aad_webapp_name : "ADS GoFast Web Portal (${var.environment_tag})") aad_functionapp_name = (var.aad_functionapp_name != "" ? var.aad_functionapp_name : "ADS GoFast Orchestration App (${var.environment_tag})") - + purview_name = (var.purview_name != "" ? var.purview_name : "${var.prefix}${var.environment_tag}pur${var.app_name}${element(split("-", module.naming.data_factory.name_unique),length(split("-", module.naming.data_factory.name_unique))-1)}") + purview_account_plink = (var.purview_name != "" ? var.purview_name : "${var.prefix}-${var.environment_tag}-pura-${lower(var.app_name)}-plink-${element(split("-", module.naming.data_factory.name_unique),length(split("-", module.naming.data_factory.name_unique))-1)}") + purview_portal_plink = (var.purview_name != "" ? var.purview_name : "${var.prefix}-${var.environment_tag}-purp-${lower(var.app_name)}-plink-${element(split("-", module.naming.data_factory.name_unique),length(split("-", module.naming.data_factory.name_unique))-1)}") + purview_resource_group_name = "managed-${module.naming.resource_group.name_unique}-purview" + purview_ir_app_reg_name = (var.purview_ir_app_reg_name != "" ? var.purview_ir_app_reg_name : "ADS GoFast Purview Integration Runtime (${var.environment_tag})") + tags = { Environment = var.environment_tag diff --git a/solution/DeploymentV2/terraform_layer1/outputs.tf b/solution/DeploymentV2/terraform_layer1/outputs.tf index 7e5c6035..ecedf7fb 100644 --- a/solution/DeploymentV2/terraform_layer1/outputs.tf +++ b/solution/DeploymentV2/terraform_layer1/outputs.tf @@ -37,9 +37,6 @@ output "aad_webreg_id" { value = var.deploy_web_app ? azuread_application.web_reg[0].application_id : "" } - - - output "webapp_name" { value = local.webapp_name } @@ -48,3 +45,16 @@ output "functionapp_name" { value = local.functionapp_name } +output "purview_name" { + value = local.purview_name +} +output "purview_sp_name" { + value = local.purview_ir_app_reg_name +} +output "purview_sp_id" { + value = var.deploy_purview && var.is_vnet_isolated ? azuread_application.purview_ir[0].application_id : "0" +} +output "purview_sp_object_id" { + value = var.deploy_purview && var.is_vnet_isolated ? azuread_application.purview_ir[0].object_id : "0" +} + diff --git a/solution/DeploymentV2/terraform_layer1/purview.tf b/solution/DeploymentV2/terraform_layer1/purview.tf new file mode 100644 index 00000000..96d1cdd9 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer1/purview.tf @@ -0,0 +1,14 @@ +// Create an IR service principal (private linked resources can't use the azure hosted IRs) + +resource "azuread_application" "purview_ir" { + count = var.deploy_purview && var.is_vnet_isolated ? 1 : 0 + display_name = local.purview_ir_app_reg_name + owners = var.resource_owners +} + +resource "azuread_service_principal" "purview_ir" { + count = var.deploy_purview && var.is_vnet_isolated ? 1 : 0 + application_id = azuread_application.purview_ir[0].application_id + owners = var.resource_owners +} + diff --git a/solution/DeploymentV2/terraform_layer1/vars.tf b/solution/DeploymentV2/terraform_layer1/vars.tf index 81c9fc91..6a550929 100644 --- a/solution/DeploymentV2/terraform_layer1/vars.tf +++ b/solution/DeploymentV2/terraform_layer1/vars.tf @@ -89,12 +89,28 @@ variable "aad_functionapp_name" { type = string } +variable "purview_name" { + description = "The override name for the Purview component. If empty, will be autogenerated based on prefix settings" + default = "" + type = string +} +variable "purview_ir_app_reg_name" { + description = "The override name for the Purview Integration runtime SP. If empty, will be autogenerated based on prefix settings" + default = "" + type = string +} + #--------------------------------------------------------------- # Feature Toggles #--------------------------------------------------------------- +variable "is_vnet_isolated" { + description = "Whether to deploy the resources as vnet attached / private linked" + default = true + type = bool +} variable "deploy_web_app" { description = "Feature toggle for deploying the Web App" @@ -107,7 +123,6 @@ variable "deploy_function_app" { type = bool } - variable "deploy_azure_ad_web_app_registration" { description = "Feature toggle for deploying the Azure AD App registration for the Web Portal" default = true @@ -117,4 +132,16 @@ variable "deploy_azure_ad_function_app_registration" { description = "Feature toggle for deploying the Azure AD App registration for the Function App" default = true type = bool -} \ No newline at end of file +} + +variable "deploy_purview" { + description = "Feature toggle for deploying Azure Purview" + default = false + type = bool +} + +variable "resource_owners" { + description = "A web app Azure security group used for admin access." + default = [] + type = list(string) +} diff --git a/solution/DeploymentV2/terraform_layer1/vars/local/terragrunt.hcl b/solution/DeploymentV2/terraform_layer1/vars/local/terragrunt.hcl index d132c24e..5a332505 100644 --- a/solution/DeploymentV2/terraform_layer1/vars/local/terragrunt.hcl +++ b/solution/DeploymentV2/terraform_layer1/vars/local/terragrunt.hcl @@ -32,6 +32,7 @@ inputs = { owner_tag = "${local.common_vars.owner_tag}" # Owner tag value for Azure resources environment_tag = "${local.common_vars.environment_tag}" # This is used on Azure tags as well as all resource names ip_address = "${local.common_vars.ip_address}" # This is the ip address of the agent/current IP. Used to create firewall exemptions. + resource_owners = "${local.common_vars.resource_owners}" deploy_web_app = true deploy_function_app = true } diff --git a/solution/DeploymentV2/terraform_layer1/vars/production/terragrunt.hcl b/solution/DeploymentV2/terraform_layer1/vars/production/terragrunt.hcl index c0f91719..6f634c3e 100644 --- a/solution/DeploymentV2/terraform_layer1/vars/production/terragrunt.hcl +++ b/solution/DeploymentV2/terraform_layer1/vars/production/terragrunt.hcl @@ -32,6 +32,7 @@ inputs = { owner_tag = "${local.common_vars.owner_tag}" # Owner tag value for Azure resources environment_tag = "${local.common_vars.environment_tag}" # This is used on Azure tags as well as all resource names ip_address = "${local.common_vars.ip_address}" # This is the ip address of the agent/current IP. Used to create firewall exemptions. + resource_owners = "${local.common_vars.resource_owners}" deploy_web_app = true deploy_function_app = true } diff --git a/solution/DeploymentV2/terraform_layer1/vars/staging/terragrunt.hcl b/solution/DeploymentV2/terraform_layer1/vars/staging/terragrunt.hcl index e4028b4a..7964b99b 100644 --- a/solution/DeploymentV2/terraform_layer1/vars/staging/terragrunt.hcl +++ b/solution/DeploymentV2/terraform_layer1/vars/staging/terragrunt.hcl @@ -32,6 +32,7 @@ inputs = { owner_tag = "${local.common_vars.owner_tag}" # Owner tag value for Azure resources environment_tag = "${local.common_vars.environment_tag}" # This is used on Azure tags as well as all resource names ip_address = "${local.common_vars.ip_address}" # This is the ip address of the agent/current IP. Used to create firewall exemptions. + resource_owners = "${local.common_vars.resource_owners}" deploy_web_app = true deploy_function_app = true } diff --git a/solution/DeploymentV2/terraform_layer2/database.tf b/solution/DeploymentV2/terraform_layer2/database.tf index 62d9e460..2e6d7dc4 100644 --- a/solution/DeploymentV2/terraform_layer2/database.tf +++ b/solution/DeploymentV2/terraform_layer2/database.tf @@ -26,7 +26,7 @@ resource "azurerm_mssql_server" "sqlserver" { azuread_administrator { login_username = "sqladmin" - object_id = data.azurerm_client_config.current.object_id + object_id = var.resource_owners[0] } identity { type = "SystemAssigned" diff --git a/solution/DeploymentV2/terraform_layer2/key_vault.tf b/solution/DeploymentV2/terraform_layer2/key_vault.tf index 5a07b548..9d1bfcd9 100644 --- a/solution/DeploymentV2/terraform_layer2/key_vault.tf +++ b/solution/DeploymentV2/terraform_layer2/key_vault.tf @@ -24,11 +24,13 @@ resource "azurerm_key_vault" "app_vault" { // Grant secret and key access to the current app to store the secret values -------------------------- // Allows the deployment service principal to compare / check state later -resource "azurerm_key_vault_access_policy" "user_access" { - count = (var.cicd_sp_id == data.azurerm_client_config.current.object_id ? 0 : 1) +resource "azurerm_key_vault_access_policy" "cicd_and_admin_access" { + for_each = { + for ro in var.resource_owners : ro => ro + } key_vault_id = azurerm_key_vault.app_vault.id tenant_id = data.azurerm_client_config.current.tenant_id - object_id = data.azurerm_client_config.current.object_id + object_id = each.value key_permissions = [ "Delete", "List", "Get", "Create", "Update", "Purge" @@ -42,23 +44,6 @@ resource "azurerm_key_vault_access_policy" "user_access" { ] } -resource "azurerm_key_vault_access_policy" "cicd_access" { - count = (var.cicd_sp_id == "" ? 0 : 1) - key_vault_id = azurerm_key_vault.app_vault.id - tenant_id = data.azurerm_client_config.current.tenant_id - object_id = (var.cicd_sp_id == data.azurerm_client_config.current.object_id ? var.cicd_sp_id : data.azurerm_client_config.current.object_id) - - key_permissions = [ - "Delete", "List", "Get", "Create", "Update", "Purge" - ] - - secret_permissions = [ - "Delete", "List", "Get", "Set", "Purge" - ] - depends_on = [ - azurerm_key_vault.app_vault, - ] -} resource "azurerm_key_vault_access_policy" "cicd_access_layers1and3" { count = (var.deployment_principal_layers1and3 == "" ? 0 : 1) @@ -79,7 +64,7 @@ resource "azurerm_key_vault_access_policy" "cicd_access_layers1and3" { } resource "time_sleep" "cicd_access" { - depends_on = [azurerm_key_vault_access_policy.cicd_access, azurerm_key_vault_access_policy.user_access] + depends_on = [azurerm_key_vault_access_policy.cicd_and_admin_access] create_duration = "10s" } @@ -102,24 +87,6 @@ resource "azurerm_key_vault_access_policy" "adf_access" { ] } -// Allows purview to retrieve the IR service principal password -resource "azurerm_key_vault_access_policy" "purview_access" { - count = var.deploy_purview ? 1 : 0 - key_vault_id = azurerm_key_vault.app_vault.id - tenant_id = var.tenant_id - object_id = azurerm_purview_account.purview[0].identity[0].principal_id - - key_permissions = [ - "Get", "List" - ] - - secret_permissions = [ - "List", "Get" - ] - depends_on = [ - azurerm_key_vault.app_vault, - ] -} // Allows the Azure function to retrieve the Function App - AAD App Reg - Client Secret resource "azurerm_key_vault_access_policy" "function_app" { @@ -253,15 +220,6 @@ resource "azurerm_key_vault_secret" "function_app_key" { ] } -resource "azurerm_key_vault_secret" "purview_ir_sp_password" { - count = var.deploy_purview && var.is_vnet_isolated ? 1 : 0 - name = "AzurePurviewIr" - value = azuread_application_password.purview_ir[0].value - key_vault_id = azurerm_key_vault.app_vault.id - depends_on = [ - time_sleep.cicd_access, - ] -} resource "azurerm_key_vault_secret" "selfhostedsql_password" { count = var.deploy_selfhostedsql ? 1 : 0 diff --git a/solution/DeploymentV2/terraform_layer2/outputs.tf b/solution/DeploymentV2/terraform_layer2/outputs.tf index 1b140392..6419739f 100644 --- a/solution/DeploymentV2/terraform_layer2/outputs.tf +++ b/solution/DeploymentV2/terraform_layer2/outputs.tf @@ -79,7 +79,7 @@ output "aad_funcreg_id" { value = data.terraform_remote_state.layer1.outputs.aad_funcreg_id } output "purview_sp_id" { - value = var.deploy_purview && var.is_vnet_isolated ? azuread_application.purview_ir[0].application_id : "0" + value = data.terraform_remote_state.layer1.outputs.purview_sp_id } output "integration_runtimes" { value = local.integration_runtimes diff --git a/solution/DeploymentV2/terraform_layer2/purview.tf b/solution/DeploymentV2/terraform_layer2/purview.tf index 017c00e3..8fc48d46 100644 --- a/solution/DeploymentV2/terraform_layer2/purview.tf +++ b/solution/DeploymentV2/terraform_layer2/purview.tf @@ -99,22 +99,3 @@ module "purview_ingestion_private_endpoints" { name_suffix = random_id.rg_deployment_unique.id subscription_id = var.subscription_id } - -// Create an IR service principal (private linked resources can't use the azure hosted IRs) -resource "azuread_application" "purview_ir" { - count = var.deploy_purview && var.is_vnet_isolated ? 1 : 0 - display_name = local.purview_ir_app_reg_name - owners = [data.azurerm_client_config.current.object_id] -} - -resource "azuread_service_principal" "purview_ir" { - count = var.deploy_purview && var.is_vnet_isolated ? 1 : 0 - application_id = azuread_application.purview_ir[0].application_id - owners = [data.azurerm_client_config.current.object_id] -} - - -resource "azuread_application_password" "purview_ir" { - count = var.deploy_purview && var.is_vnet_isolated ? 1 : 0 - application_object_id = azuread_application.purview_ir[0].object_id -} diff --git a/solution/DeploymentV2/terraform_layer2/storage_adls.tf b/solution/DeploymentV2/terraform_layer2/storage_adls.tf index 899fdb5e..118bf5e9 100644 --- a/solution/DeploymentV2/terraform_layer2/storage_adls.tf +++ b/solution/DeploymentV2/terraform_layer2/storage_adls.tf @@ -24,11 +24,13 @@ resource "azurerm_storage_account" "adls" { } -resource "azurerm_role_assignment" "adls_deployment_agent" { - count = var.deploy_adls ? 1 : 0 +resource "azurerm_role_assignment" "adls_deployment_agents" { + for_each = { + for ro in var.resource_owners : ro => ro + } scope = azurerm_storage_account.adls[0].id role_definition_name = "Storage Blob Data Contributor" - principal_id = data.azurerm_client_config.current.object_id + principal_id = each.value } resource "azurerm_role_assignment" "adls_function_app" { @@ -56,7 +58,7 @@ resource "azurerm_role_assignment" "adls_purview_sp" { count = var.deploy_purview && var.is_vnet_isolated ? 1 : 0 scope = azurerm_storage_account.adls[0].id role_definition_name = "Storage Blob Data Contributor" - principal_id = azuread_service_principal.purview_ir[0].object_id + principal_id = data.terraform_remote_state.layer1.outputs.purview_sp_object_id } diff --git a/solution/DeploymentV2/terraform_layer2/storage_blob.tf b/solution/DeploymentV2/terraform_layer2/storage_blob.tf index d1f79477..84eb1732 100644 --- a/solution/DeploymentV2/terraform_layer2/storage_blob.tf +++ b/solution/DeploymentV2/terraform_layer2/storage_blob.tf @@ -25,11 +25,13 @@ resource "azurerm_storage_account" "blob" { } } -resource "azurerm_role_assignment" "blob_deployment_agent" { - count = var.deploy_storage_account ? 1 : 0 +resource "azurerm_role_assignment" "blob_deployment_agents" { + for_each = { + for ro in var.resource_owners : ro => ro + } scope = azurerm_storage_account.blob[0].id role_definition_name = "Storage Blob Data Contributor" - principal_id = data.azurerm_client_config.current.object_id + principal_id = each.value } resource "azurerm_role_assignment" "blob_function_app" { @@ -50,7 +52,7 @@ resource "azurerm_role_assignment" "blob_purview_sp" { count = var.deploy_purview && var.is_vnet_isolated ? 1 : 0 scope = azurerm_storage_account.blob[0].id role_definition_name = "Storage Blob Data Contributor" - principal_id = azuread_service_principal.purview_ir[0].object_id + principal_id = data.terraform_remote_state.layer1.outputs.purview_sp_object_id } diff --git a/solution/DeploymentV2/terraform_layer2/synapse.tf b/solution/DeploymentV2/terraform_layer2/synapse.tf index 942c7487..b52245d3 100644 --- a/solution/DeploymentV2/terraform_layer2/synapse.tf +++ b/solution/DeploymentV2/terraform_layer2/synapse.tf @@ -179,6 +179,29 @@ resource "azurerm_synapse_role_assignment" "synapse_admin_assignments" { ] } +resource "azurerm_synapse_role_assignment" "synapse_contributor_assignments" { + for_each = ( var.synapse_contributors) + synapse_workspace_id = azurerm_synapse_workspace.synapse[0].id + role_name = "Synapse Contributor" + principal_id = each.value + depends_on = [ + azurerm_synapse_firewall_rule.public_access, + time_sleep.azurerm_synapse_firewall_rule_wait_30_seconds_cicd + ] +} + +resource "azurerm_synapse_role_assignment" "synapse_publisher_assignments" { + for_each = ( var.synapse_publishers) + synapse_workspace_id = azurerm_synapse_workspace.synapse[0].id + role_name = "Synapse Artifact Publisher" + principal_id = each.value + depends_on = [ + azurerm_synapse_firewall_rule.public_access, + time_sleep.azurerm_synapse_firewall_rule_wait_30_seconds_cicd + ] +} + + resource "azurerm_synapse_linked_service" "synapse_keyvault_linkedservice" { diff --git a/solution/DeploymentV2/terraform_layer2/vars.tf b/solution/DeploymentV2/terraform_layer2/vars.tf index a739158c..7e424492 100644 --- a/solution/DeploymentV2/terraform_layer2/vars.tf +++ b/solution/DeploymentV2/terraform_layer2/vars.tf @@ -355,11 +355,6 @@ variable "publish_datafactory_pipelines" { type = bool } -variable "publish_web_app_addcurrentuserasadmin" { - description = "Feature toggle for adding user running deployment as a webapp admin" - default = false - type = bool -} variable "publish_sif_database" { description = "Feature toggle for Publishing SIF Database" @@ -800,12 +795,6 @@ variable "existing_synapse_private_link_hub_id" { type = string } -variable "web_app_admin_security_group" { - description = "A web app Azure security group used for admin access." - default = "" - type = string -} - variable "custom_vm_plan_name" { description = "An Azure vm plan name to be referenced for a custom vm image." default = "" @@ -842,6 +831,13 @@ variable "custom_vm_image_version" { type = string } + + + +#--------------------------------------------------------------- +# User Access and Ownership/ +#--------------------------------------------------------------- + variable "deployment_principal_layers1and3" { description = "Object Id of the azure account that will deploy layers 1 & 3. If it is the same as the layer 2 user then leave as empty string." default = "" @@ -851,6 +847,38 @@ variable "deployment_principal_layers1and3" { variable "synapse_administrators" { description = "List of Synapse Administrators" type = map(string) - default = { - } + default = {} } + +variable "synapse_contributors" { + description = "List of Synapse Contributors" + type = map(string) + default = {} +} + +variable "synapse_publishers" { + description = "List of Synapse Publishers" + type = map(string) + default = {} +} + +variable "publish_web_app_addcurrentuserasadmin" { + description = "Feature toggle for adding user running deployment as a webapp admin" + default = false + type = bool +} + + +variable "web_app_admin_security_group" { + description = "A web app Azure security group used for admin access." + default = "" + type = string +} + + +variable "resource_owners" { + description = "A web app Azure security group used for admin access." + default = [] + type = list(string) +} + diff --git a/solution/DeploymentV2/terraform_layer2/vars/staging/terragrunt.hcl b/solution/DeploymentV2/terraform_layer2/vars/staging/terragrunt.hcl index 92c7093e..5cde3485 100644 --- a/solution/DeploymentV2/terraform_layer2/vars/staging/terragrunt.hcl +++ b/solution/DeploymentV2/terraform_layer2/vars/staging/terragrunt.hcl @@ -52,6 +52,7 @@ inputs = { environment_tag = "${local.common_vars.environment_tag}" # This is used on Azure tags as well as all resource names ip_address = "${local.common_vars.ip_address}" # This is the ip address of the agent/current IP. Used to create firewall exemptions. synapse_administrators = "${local.common_vars.synapse_administrators}" + resource_owners = "${local.common_vars.resource_owners}" deploy_web_app = true deploy_function_app = true deploy_custom_terraform = false # This is whether the infrastructure located in the terraform_custom folder is deployed or not. diff --git a/solution/DeploymentV2/terraform_layer3/key_vault.tf b/solution/DeploymentV2/terraform_layer3/key_vault.tf index 2be0a6d5..c0458ecb 100644 --- a/solution/DeploymentV2/terraform_layer3/key_vault.tf +++ b/solution/DeploymentV2/terraform_layer3/key_vault.tf @@ -1,3 +1,32 @@ +// Allows purview to retrieve the IR service principal password +resource "azurerm_key_vault_access_policy" "purview_access" { + count = var.deploy_purview ? 1 : 0 + key_vault_id = azurerm_key_vault.app_vault.id + tenant_id = var.tenant_id + object_id = azurerm_purview_account.purview[0].identity[0].principal_id + + key_permissions = [ + "Get", "List" + ] + + secret_permissions = [ + "List", "Get" + ] + depends_on = [ + azurerm_key_vault.app_vault, + ] +} + +resource "azurerm_key_vault_secret" "purview_ir_sp_password" { + count = var.deploy_purview && var.is_vnet_isolated ? 1 : 0 + name = "AzurePurviewIr" + value = azuread_application_password.purview_ir[0].value + key_vault_id = azurerm_key_vault.app_vault.id + depends_on = [ + time_sleep.cicd_access, + ] +} + resource "azurerm_key_vault_secret" "azure_function_secret" { count = var.deploy_function_app ? 1 : 0 name = "AzureFunctionClientSecret" diff --git a/solution/DeploymentV2/terraform_layer3/locals.tf b/solution/DeploymentV2/terraform_layer3/locals.tf index 2de29d5d..88294952 100644 --- a/solution/DeploymentV2/terraform_layer3/locals.tf +++ b/solution/DeploymentV2/terraform_layer3/locals.tf @@ -5,7 +5,11 @@ locals { functionapp_url = "https://${local.functionapp_name}.azurewebsites.net" aad_webapp_name = (var.aad_webapp_name != "" ? var.aad_webapp_name : "ADS GoFast Web Portal (${var.environment_tag})") aad_functionapp_name = (var.aad_functionapp_name != "" ? var.aad_functionapp_name : "ADS GoFast Orchestration App (${var.environment_tag})") - + purview_name = (var.purview_name != "" ? var.purview_name : "${var.prefix}${var.environment_tag}pur${var.app_name}${element(split("-", module.naming.data_factory.name_unique),length(split("-", module.naming.data_factory.name_unique))-1)}") + purview_account_plink = (var.purview_name != "" ? var.purview_name : "${var.prefix}-${var.environment_tag}-pura-${lower(var.app_name)}-plink-${element(split("-", module.naming.data_factory.name_unique),length(split("-", module.naming.data_factory.name_unique))-1)}") + purview_portal_plink = (var.purview_name != "" ? var.purview_name : "${var.prefix}-${var.environment_tag}-purp-${lower(var.app_name)}-plink-${element(split("-", module.naming.data_factory.name_unique),length(split("-", module.naming.data_factory.name_unique))-1)}") + purview_resource_group_name = "managed-${module.naming.resource_group.name_unique}-purview" + purview_ir_app_reg_name = (var.purview_ir_app_reg_name != "" ? var.purview_ir_app_reg_name : "ADS GoFast Purview Integration Runtime (${var.environment_tag})") tags = { Environment = var.environment_tag diff --git a/solution/DeploymentV2/terraform_layer3/purview.tf b/solution/DeploymentV2/terraform_layer3/purview.tf new file mode 100644 index 00000000..cff9c1e8 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer3/purview.tf @@ -0,0 +1,4 @@ +resource "azuread_application_password" "purview_ir" { + count = var.deploy_purview && var.is_vnet_isolated ? 1 : 0 + application_object_id = azuread_application.purview_ir[0].object_id +} diff --git a/solution/DeploymentV2/terraform_layer3/vars.tf b/solution/DeploymentV2/terraform_layer3/vars.tf index 384a9958..954dcfa5 100644 --- a/solution/DeploymentV2/terraform_layer3/vars.tf +++ b/solution/DeploymentV2/terraform_layer3/vars.tf @@ -118,4 +118,12 @@ variable "deploy_azure_ad_function_app_registration" { description = "Feature toggle for deploying the Azure AD App registration for the Function App" default = true type = bool -} \ No newline at end of file +} + + +variable "resource_owners" { + description = "A web app Azure security group used for admin access." + default = { + } + type = map(string) +} From f86f686d9bc721a1214cc206a5ff08dc09d53892 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Thu, 28 Jul 2022 08:12:46 +0800 Subject: [PATCH 053/151] Separated Purview AAD Registrations into Layers --- solution/DeploymentV2/Deploy.ps1 | 8 ++--- solution/DeploymentV2/Deploy_1_Infra0.ps1 | 6 ++-- solution/DeploymentV2/Prepare.ps1 | 36 ++++++++++--------- .../vars/common_vars_template.jsonnet | 8 ++++- .../vars/staging/GetSecretsTemplate.env | 8 ++--- .../vars/staging/common_vars.json | 24 +++++++------ .../vars/staging/common_vars_for_hcl.json | 6 ++-- .../vars/staging/common_vars_values.jsonc | 29 ++++++--------- .../DeploymentV2/terraform_layer1/outputs.tf | 2 +- .../DeploymentV2/terraform_layer1/purview.tf | 4 +-- .../DeploymentV2/terraform_layer2/.tflint.hcl | 5 +++ .../DeploymentV2/terraform_layer2/layer1.tf | 4 +-- .../DeploymentV2/terraform_layer2/outputs.tf | 8 +++++ .../vars/local/terragrunt.hcl | 3 +- .../vars/production/terragrunt.hcl | 3 +- .../terraform_layer3/key_vault.tf | 15 ++++---- .../DeploymentV2/terraform_layer3/layer2.tf | 4 +-- .../DeploymentV2/terraform_layer3/locals.tf | 8 ++--- .../DeploymentV2/terraform_layer3/purview.tf | 4 +-- .../DeploymentV2/terraform_layer3/vars.tf | 14 ++++++-- .../vars/staging/terragrunt.hcl | 1 + 21 files changed, 114 insertions(+), 86 deletions(-) create mode 100644 solution/DeploymentV2/terraform_layer2/.tflint.hcl diff --git a/solution/DeploymentV2/Deploy.ps1 b/solution/DeploymentV2/Deploy.ps1 index 52599f3a..41f38aa3 100644 --- a/solution/DeploymentV2/Deploy.ps1 +++ b/solution/DeploymentV2/Deploy.ps1 @@ -22,13 +22,13 @@ #---------------------------------------------------------------------------------------------------------------- param ( [Parameter(Mandatory=$false)] - [bool]$RunTerraformLayer1=$false, + [bool]$RunTerraformLayer1=0, [Parameter(Mandatory=$false)] - [bool]$RunTerraformLayer2=$false, + [bool]$RunTerraformLayer2=0, [Parameter(Mandatory=$false)] - [bool]$RunTerraformLayer3=$false, + [bool]$RunTerraformLayer3=0, [Parameter(Mandatory=$false)] - [string]$FeatureTemplate="full_deployment" + [string]$FeatureTemplate="basic_deployment" ) #------------------------------------------------------------------------------------------------------------ # Preparation #Mandatory diff --git a/solution/DeploymentV2/Deploy_1_Infra0.ps1 b/solution/DeploymentV2/Deploy_1_Infra0.ps1 index aedce7c9..520bb6a4 100644 --- a/solution/DeploymentV2/Deploy_1_Infra0.ps1 +++ b/solution/DeploymentV2/Deploy_1_Infra0.ps1 @@ -1,6 +1,6 @@ param ( [Parameter(Mandatory=$false)] - [System.Boolean]$skipTerraformDeployment=$true, + [System.Boolean]$skipTerraformDeployment=$false, [Parameter(Mandatory=$false)] [System.Boolean]$RunTerraformLayer1=$false, [Parameter(Mandatory=$false)] @@ -8,7 +8,9 @@ param ( [Parameter(Mandatory=$false)] [System.Boolean]$RunTerraformLayer3=$false ) - +#Write-Host $RunTerraformLayer1 +#Write-Host $RunTerraformLayer2 +#Write-Host $RunTerraformLayer3 #---------------------------------------------------------------------------------------------------------------- # Deploy Infrastructure #---------------------------------------------------------------------------------------------------------------- diff --git a/solution/DeploymentV2/Prepare.ps1 b/solution/DeploymentV2/Prepare.ps1 index c2d6707e..8129d356 100644 --- a/solution/DeploymentV2/Prepare.ps1 +++ b/solution/DeploymentV2/Prepare.ps1 @@ -25,7 +25,7 @@ #by default $gitDeploy will not be true, only being set by the git environment - meaning if not using a runner it will default to a standard execution. $gitDeploy = ([System.Environment]::GetEnvironmentVariable('gitDeploy') -eq 'true') - +$deploymentFolderPath = (Get-Location).Path $envlist = (Get-ChildItem -Directory -Path ./environments/vars | Select-Object -Property Name).Name if ($gitDeploy) @@ -216,8 +216,10 @@ else ##Exit } + if ($PersistEnv -eq "Yes") { + $common_vars_values = Get-Content ./environments/vars/$environmentName/common_vars_values.jsonc | ConvertFrom-Json -Depth 10 $common_vars_values.resource_group_name = $env:TF_VAR_resource_group_name $common_vars_values.domain = $env:TF_VAR_domain @@ -225,8 +227,7 @@ else $common_vars_values.ip_address2 = $env:TF_VAR_ip_address $common_vars_values.tenant_id = $env:TF_VAR_tenant_id $common_vars_values.WEB_APP_ADMIN_USER = (az ad signed-in-user show --query id -o tsv) - $common_vars_values.deployment_principal_layers1and3 = $common_vars_values.WEB_APP_ADMIN_USER - $common_vars_values.synapse_administrators = $common_vars_values.deployment_principal_layers1and3 + $common_vars_values.deployment_principal_layers1and3 = $common_vars_values.WEB_APP_ADMIN_USER $foundUser = $false foreach($u in $common_vars_values.synapse_administrators) @@ -237,35 +238,38 @@ else break } } - if($foundUser -eq $false) + if($foundUser -eq $true) { - $common_vars_values.synapse_administrators | Add-Member -Name $common_vars_values.WEB_APP_ADMIN_USER -Value $common_vars_values.WEB_APP_ADMIN_USER -Type NoteProperty + $common_vars_values.synapse_administrators.Deploy_User = $common_vars_values.WEB_APP_ADMIN_USER } - - $fts = (Get-ChildItem -Path ./environments/featuretemplates | Select-Object -Property Name).Name.replace(".jsonc","") - #------------------------------------------------------------------------------------------------------------ - # Templated Configurations - #------------------------------------------------------------------------------------------------------------ + $common_vars_values | Convertto-Json -Depth 10 | Set-Content ./environments/vars/$environmentName/common_vars_values.jsonc + + if($environmentName -eq "admz") { Exit } + #------------------------------------------------------------------------------------------------------------ + # Templated Configurations + #------------------------------------------------------------------------------------------------------------ + $fts = (Get-ChildItem -Path ./environments/featuretemplates | Select-Object -Property Name).Name.replace(".jsonc","") $templateName = Get-SelectionFromUser -Options ($fts) -Prompt "Select deployment fast start template" if ($templateName -eq "Quit") { Exit } - - Set-Location ./environments/vars/ - ./PreprocessEnvironment.ps1 -Environment $environmentName -FeatureTemplate $templateName -gitDeploy $gitDeploy - Set-Location $deploymentFolderPath - - + else + { + Set-Location ./environments/vars/ + ./PreprocessEnvironment.ps1 -Environment $environmentName -FeatureTemplate $templateName -gitDeploy $gitDeploy + } } } +Set-Location $deploymentFolderPath + diff --git a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet index 7573e726..37f3e3f2 100644 --- a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet +++ b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet @@ -90,6 +90,12 @@ local AllVariables = [ "HCLName": "tenant_id", "Value": locals[environment].tenant_id }, + { + "CICDSecretName": "", + "EnvVarName": "TF_VAR_tenant_id", + "HCLName": "", + "Value": locals[environment].tenant_id + }, /* HCL Common Vars & Terraform Customisations @@ -126,7 +132,7 @@ local AllVariables = [ }, { "CICDSecretName": "", - "EnvVarName": "", + "EnvVarName": "TF_VAR_subscription_id", "HCLName": "subscription_id", "Value": locals[environment].subscription_id }, diff --git a/solution/DeploymentV2/environments/vars/staging/GetSecretsTemplate.env b/solution/DeploymentV2/environments/vars/staging/GetSecretsTemplate.env index 4a49691a..5349b4ed 100644 --- a/solution/DeploymentV2/environments/vars/staging/GetSecretsTemplate.env +++ b/solution/DeploymentV2/environments/vars/staging/GetSecretsTemplate.env @@ -1,18 +1,18 @@ ARM_CLIENT_ID=##### ARM_CLIENT_SECRET=##### -ARM_DATALAKE_NAME=adsstgdlsadskhpvadsl +ARM_DATALAKE_NAME=adsstgdlsadsvrojadsl ARM_DOMAIN=microsoft.com ARM_FEATURE_TEMPLATE=##### ARM_IP_ADDRESS=144.138.148.220 ARM_IP_ADDRESS2=144.138.148.220 ARM_JUMPHOST_PASSWORD=##### -ARM_KEYVAULT_NAME=ads-stg-kv-ads-khpv +ARM_KEYVAULT_NAME=ads-stg-kv-ads-vroj ARM_PAL_PARTNER_ID=##### -ARM_RESOURCE_GROUP_NAME=gft2 +ARM_RESOURCE_GROUP_NAME=gft3 ARM_STORAGE_NAME=##### ARM_SUBSCRIPTION_ID=##### ARM_SYNAPSE_PASSWORD=##### -ARM_SYNAPSE_WORKSPACE_NAME=adsstgsynwadskhpv +ARM_SYNAPSE_WORKSPACE_NAME=adsstgsynwadsvroj ARM_TENANT_ID=72f988bf-86f1-41af-91ab-2d7cd011db47 ENVIRONMENT_TAG=stg GIT_ADF_EMAIL_ADDRESS=##### diff --git a/solution/DeploymentV2/environments/vars/staging/common_vars.json b/solution/DeploymentV2/environments/vars/staging/common_vars.json index fbb0176e..fcea6cff 100644 --- a/solution/DeploymentV2/environments/vars/staging/common_vars.json +++ b/solution/DeploymentV2/environments/vars/staging/common_vars.json @@ -4,7 +4,7 @@ "ARM_CLIENT_SECRET": "#####", "ARM_PAL_PARTNER_ID": "#####", "ARM_SUBSCRIPTION_ID": "#####", - "ARM_SYNAPSE_WORKSPACE_NAME": "adsstgsynwadskhpv", + "ARM_SYNAPSE_WORKSPACE_NAME": "adsstgsynwadsvroj", "ARM_TENANT_ID": "72f988bf-86f1-41af-91ab-2d7cd011db47", "TF_VAR_adf_git_email_address": "#####", "TF_VAR_adf_git_pat": "#####", @@ -36,18 +36,20 @@ "TF_VAR_publish_sif_database": true, "TF_VAR_publish_web_app": true, "TF_VAR_publish_web_app_addcurrentuserasadmin": true, - "TF_VAR_resource_group_name": "gft2", + "TF_VAR_resource_group_name": "gft3", "TF_VAR_state_storage_account_name": "#####", + "TF_VAR_subscription_id": "035a1364-f00d-48e2-b582-4fe125905ee3", "TF_VAR_synapse_git_email_address": "#####", "TF_VAR_synapse_git_pat": "#####", "TF_VAR_synapse_git_repository_branch_name": "#####", "TF_VAR_synapse_git_repository_name": "#####", "TF_VAR_synapse_git_user_name": "#####", "TF_VAR_synapse_sql_password": "#####", + "TF_VAR_tenant_id": "72f988bf-86f1-41af-91ab-2d7cd011db47", "TF_VAR_web_app_admin_security_group": "#####", "WEB_APP_ADMIN_USER": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", - "datalakeName": "adsstgdlsadskhpvadsl", - "keyVaultName": "ads-stg-kv-ads-khpv" + "datalakeName": "adsstgdlsadsvrojadsl", + "keyVaultName": "ads-stg-kv-ads-vroj" }, "ForHCL": { "deployment_principal_layers1and3": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", @@ -57,7 +59,7 @@ "ip_address2": "144.138.148.220", "owner_tag": "Contoso", "prefix": "ads", - "resource_group_name": "gft2", + "resource_group_name": "gft3", "resource_location": "AustraliaEast", "resource_owners": [ "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", @@ -65,27 +67,27 @@ ], "subscription_id": "035a1364-f00d-48e2-b582-4fe125905ee3", "synapse_administrators": { - "Agent_Deployer": "4c732d19-4076-4a76-87f3-6fbfd77f007d", - "Jorampon": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578" + "deploy_agent": "4c732d19-4076-4a76-87f3-6fbfd77f007d", + "deploy_user": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578" }, "tenant_id": "72f988bf-86f1-41af-91ab-2d7cd011db47" }, "ForSecretFile": { "ARM_CLIENT_ID": "#####", "ARM_CLIENT_SECRET": "#####", - "ARM_DATALAKE_NAME": "adsstgdlsadskhpvadsl", + "ARM_DATALAKE_NAME": "adsstgdlsadsvrojadsl", "ARM_DOMAIN": "microsoft.com", "ARM_FEATURE_TEMPLATE": "#####", "ARM_IP_ADDRESS": "144.138.148.220", "ARM_IP_ADDRESS2": "144.138.148.220", "ARM_JUMPHOST_PASSWORD": "#####", - "ARM_KEYVAULT_NAME": "ads-stg-kv-ads-khpv", + "ARM_KEYVAULT_NAME": "ads-stg-kv-ads-vroj", "ARM_PAL_PARTNER_ID": "#####", - "ARM_RESOURCE_GROUP_NAME": "gft2", + "ARM_RESOURCE_GROUP_NAME": "gft3", "ARM_STORAGE_NAME": "#####", "ARM_SUBSCRIPTION_ID": "#####", "ARM_SYNAPSE_PASSWORD": "#####", - "ARM_SYNAPSE_WORKSPACE_NAME": "adsstgsynwadskhpv", + "ARM_SYNAPSE_WORKSPACE_NAME": "adsstgsynwadsvroj", "ARM_TENANT_ID": "72f988bf-86f1-41af-91ab-2d7cd011db47", "ENVIRONMENT_TAG": "stg", "GIT_ADF_EMAIL_ADDRESS": "#####", diff --git a/solution/DeploymentV2/environments/vars/staging/common_vars_for_hcl.json b/solution/DeploymentV2/environments/vars/staging/common_vars_for_hcl.json index e34e7f4f..75527720 100644 --- a/solution/DeploymentV2/environments/vars/staging/common_vars_for_hcl.json +++ b/solution/DeploymentV2/environments/vars/staging/common_vars_for_hcl.json @@ -6,7 +6,7 @@ "ip_address2": "144.138.148.220", "owner_tag": "Contoso", "prefix": "ads", - "resource_group_name": "gft2", + "resource_group_name": "gft3", "resource_location": "AustraliaEast", "resource_owners": [ "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", @@ -14,8 +14,8 @@ ], "subscription_id": "035a1364-f00d-48e2-b582-4fe125905ee3", "synapse_administrators": { - "Agent_Deployer": "4c732d19-4076-4a76-87f3-6fbfd77f007d", - "Jorampon": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578" + "deploy_agent": "4c732d19-4076-4a76-87f3-6fbfd77f007d", + "deploy_user": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578" }, "tenant_id": "72f988bf-86f1-41af-91ab-2d7cd011db47" } diff --git a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc index b7b9ec1b..ba5fc00c 100644 --- a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc +++ b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc @@ -6,26 +6,19 @@ "domain": "microsoft.com", "subscription_id": "035a1364-f00d-48e2-b582-4fe125905ee3", "prefix": "ads", - "resource_group_name": "gft2", + "resource_group_name": "gft3", "ip_address": "144.138.148.220", "ip_address2": "144.138.148.220", "tenant_id": "72f988bf-86f1-41af-91ab-2d7cd011db47", - - "WEB_APP_ADMIN_USER":"ccbdbba4-669c-48d6-86b8-75c9ab2ee578", - "ARM_SYNAPSE_WORKSPACE_NAME": "adsstgsynwadskhpv", //Required by deployment script (pwsh) to openfirewall and provide access.. note that this will not be known until first deployment - "ARM_KEYVAULT_NAME": "ads-stg-kv-ads-khpv", //Required by deployment script (pwsh) to openfirewall and provide access.. note that this will not be known until first deployment - "ARM_DATALAKE_NAME": "adsstgdlsadskhpvadsl", //Required by deployment script (pwsh) to openfirewall and provide access.. note that this will not be known until first deployment - - "resource_owners": - [ - /*Jorampon*/"ccbdbba4-669c-48d6-86b8-75c9ab2ee578", - /*Agent_Deployer*/ "4c732d19-4076-4a76-87f3-6fbfd77f007d" - ], - "synapse_administrators": - { - "Jorampon":"ccbdbba4-669c-48d6-86b8-75c9ab2ee578", - "Agent_Deployer": "4c732d19-4076-4a76-87f3-6fbfd77f007d" - }, + "WEB_APP_ADMIN_USER": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", + "ARM_SYNAPSE_WORKSPACE_NAME": "adsstgsynwadsvroj", + "ARM_KEYVAULT_NAME": "ads-stg-kv-ads-vroj", + "ARM_DATALAKE_NAME": "adsstgdlsadsvrojadsl", + "resource_owners": [ + "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", + "4c732d19-4076-4a76-87f3-6fbfd77f007d" + ], + "synapse_administrators": {"deploy_user": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", "deploy_agent": "4c732d19-4076-4a76-87f3-6fbfd77f007d"}, "synapse_publishers": {}, "synapse_contributors": {} -} \ No newline at end of file +} diff --git a/solution/DeploymentV2/terraform_layer1/outputs.tf b/solution/DeploymentV2/terraform_layer1/outputs.tf index ecedf7fb..83f06f9e 100644 --- a/solution/DeploymentV2/terraform_layer1/outputs.tf +++ b/solution/DeploymentV2/terraform_layer1/outputs.tf @@ -54,7 +54,7 @@ output "purview_sp_name" { output "purview_sp_id" { value = var.deploy_purview && var.is_vnet_isolated ? azuread_application.purview_ir[0].application_id : "0" } + output "purview_sp_object_id" { value = var.deploy_purview && var.is_vnet_isolated ? azuread_application.purview_ir[0].object_id : "0" } - diff --git a/solution/DeploymentV2/terraform_layer1/purview.tf b/solution/DeploymentV2/terraform_layer1/purview.tf index 96d1cdd9..6fffbbdd 100644 --- a/solution/DeploymentV2/terraform_layer1/purview.tf +++ b/solution/DeploymentV2/terraform_layer1/purview.tf @@ -1,5 +1,4 @@ // Create an IR service principal (private linked resources can't use the azure hosted IRs) - resource "azuread_application" "purview_ir" { count = var.deploy_purview && var.is_vnet_isolated ? 1 : 0 display_name = local.purview_ir_app_reg_name @@ -10,5 +9,4 @@ resource "azuread_service_principal" "purview_ir" { count = var.deploy_purview && var.is_vnet_isolated ? 1 : 0 application_id = azuread_application.purview_ir[0].application_id owners = var.resource_owners -} - +} \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer2/.tflint.hcl b/solution/DeploymentV2/terraform_layer2/.tflint.hcl new file mode 100644 index 00000000..2a61e067 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/.tflint.hcl @@ -0,0 +1,5 @@ +plugin "azurerm" { + enabled = true + version = "0.17.0" + source = "github.com/terraform-linters/tflint-ruleset-azurerm" +} \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer2/layer1.tf b/solution/DeploymentV2/terraform_layer2/layer1.tf index aa9dd0c6..7eef56af 100644 --- a/solution/DeploymentV2/terraform_layer2/layer1.tf +++ b/solution/DeploymentV2/terraform_layer2/layer1.tf @@ -7,7 +7,7 @@ config = { container_name = "tstate" key = "terraform_layer1.tfstate" - resource_group_name = "gft2" - storage_account_name = "gft2state" + resource_group_name = "gft3" + storage_account_name = "gft3state" } } diff --git a/solution/DeploymentV2/terraform_layer2/outputs.tf b/solution/DeploymentV2/terraform_layer2/outputs.tf index 6419739f..8d478033 100644 --- a/solution/DeploymentV2/terraform_layer2/outputs.tf +++ b/solution/DeploymentV2/terraform_layer2/outputs.tf @@ -261,6 +261,14 @@ output "azuread_application_function_app_reg_object_id" { value = data.terraform_remote_state.layer1.outputs.azuread_application_function_app_reg_object_id } +output "purview_sp_object_id" { + value = data.terraform_remote_state.layer1.outputs.purview_sp_object_id +} + +output "purview_account_principal_id" { + value = var.deploy_purview && var.is_vnet_isolated ? azurerm_purview_account.purview[0].identity[0].principal_id : "0" +} + /*Variables for Naming Module*/ output "naming_unique_seed" { value = data.terraform_remote_state.layer1.outputs.naming_unique_seed diff --git a/solution/DeploymentV2/terraform_layer2/vars/local/terragrunt.hcl b/solution/DeploymentV2/terraform_layer2/vars/local/terragrunt.hcl index 2e3a2bec..e79bc119 100644 --- a/solution/DeploymentV2/terraform_layer2/vars/local/terragrunt.hcl +++ b/solution/DeploymentV2/terraform_layer2/vars/local/terragrunt.hcl @@ -51,7 +51,8 @@ inputs = { owner_tag = "${local.common_vars.owner_tag}" # Owner tag value for Azure resources environment_tag = "${local.common_vars.environment_tag}" # This is used on Azure tags as well as all resource names ip_address = "${local.common_vars.ip_address}" # This is the ip address of the agent/current IP. Used to create firewall exemptions. - synapse_administrators = "${local.common_vars.synapse_administrators}" + synapse_administrators = "${local.common_vars.synapse_administrators}" + resource_owners = "${local.common_vars.resource_owners}" deploy_web_app = true deploy_function_app = true deploy_custom_terraform = false # This is whether the infrastructure located in the terraform_custom folder is deployed or not. diff --git a/solution/DeploymentV2/terraform_layer2/vars/production/terragrunt.hcl b/solution/DeploymentV2/terraform_layer2/vars/production/terragrunt.hcl index 39a7f3fa..02fb6fae 100644 --- a/solution/DeploymentV2/terraform_layer2/vars/production/terragrunt.hcl +++ b/solution/DeploymentV2/terraform_layer2/vars/production/terragrunt.hcl @@ -51,7 +51,8 @@ inputs = { owner_tag = "${local.common_vars.owner_tag}" # Owner tag value for Azure resources environment_tag = "${local.common_vars.environment_tag}" # This is used on Azure tags as well as all resource names ip_address = "${local.common_vars.ip_address}" # This is the ip address of the agent/current IP. Used to create firewall exemptions. - synapse_administrators = "${local.common_vars.synapse_administrators}" + synapse_administrators = "${local.common_vars.synapse_administrators}" + resource_owners = "${local.common_vars.resource_owners}" deploy_web_app = true deploy_function_app = true deploy_custom_terraform = false # This is whether the infrastructure located in the terraform_custom folder is deployed or not. diff --git a/solution/DeploymentV2/terraform_layer3/key_vault.tf b/solution/DeploymentV2/terraform_layer3/key_vault.tf index c0458ecb..46efe493 100644 --- a/solution/DeploymentV2/terraform_layer3/key_vault.tf +++ b/solution/DeploymentV2/terraform_layer3/key_vault.tf @@ -1,9 +1,10 @@ // Allows purview to retrieve the IR service principal password resource "azurerm_key_vault_access_policy" "purview_access" { count = var.deploy_purview ? 1 : 0 - key_vault_id = azurerm_key_vault.app_vault.id + key_vault_id = data.terraform_remote_state.layer2.outputs.azurerm_key_vault_app_vault_id tenant_id = var.tenant_id - object_id = azurerm_purview_account.purview[0].identity[0].principal_id + object_id = data.terraform_remote_state.layer2.outputs.purview_account_principal_id + key_permissions = [ "Get", "List" @@ -12,19 +13,15 @@ resource "azurerm_key_vault_access_policy" "purview_access" { secret_permissions = [ "List", "Get" ] - depends_on = [ - azurerm_key_vault.app_vault, - ] + depends_on = [] } resource "azurerm_key_vault_secret" "purview_ir_sp_password" { count = var.deploy_purview && var.is_vnet_isolated ? 1 : 0 name = "AzurePurviewIr" value = azuread_application_password.purview_ir[0].value - key_vault_id = azurerm_key_vault.app_vault.id - depends_on = [ - time_sleep.cicd_access, - ] + key_vault_id = data.terraform_remote_state.layer2.outputs.azurerm_key_vault_app_vault_id + depends_on = [ ] } resource "azurerm_key_vault_secret" "azure_function_secret" { diff --git a/solution/DeploymentV2/terraform_layer3/layer2.tf b/solution/DeploymentV2/terraform_layer3/layer2.tf index 535b443b..0467deaa 100644 --- a/solution/DeploymentV2/terraform_layer3/layer2.tf +++ b/solution/DeploymentV2/terraform_layer3/layer2.tf @@ -7,7 +7,7 @@ config = { container_name = "tstate" key = "terraform_layer2.tfstate" - resource_group_name = "gft2" - storage_account_name = "gft2state" + resource_group_name = "gft3" + storage_account_name = "gft3state" } } diff --git a/solution/DeploymentV2/terraform_layer3/locals.tf b/solution/DeploymentV2/terraform_layer3/locals.tf index 88294952..01e9b844 100644 --- a/solution/DeploymentV2/terraform_layer3/locals.tf +++ b/solution/DeploymentV2/terraform_layer3/locals.tf @@ -5,11 +5,11 @@ locals { functionapp_url = "https://${local.functionapp_name}.azurewebsites.net" aad_webapp_name = (var.aad_webapp_name != "" ? var.aad_webapp_name : "ADS GoFast Web Portal (${var.environment_tag})") aad_functionapp_name = (var.aad_functionapp_name != "" ? var.aad_functionapp_name : "ADS GoFast Orchestration App (${var.environment_tag})") - purview_name = (var.purview_name != "" ? var.purview_name : "${var.prefix}${var.environment_tag}pur${var.app_name}${element(split("-", module.naming.data_factory.name_unique),length(split("-", module.naming.data_factory.name_unique))-1)}") - purview_account_plink = (var.purview_name != "" ? var.purview_name : "${var.prefix}-${var.environment_tag}-pura-${lower(var.app_name)}-plink-${element(split("-", module.naming.data_factory.name_unique),length(split("-", module.naming.data_factory.name_unique))-1)}") - purview_portal_plink = (var.purview_name != "" ? var.purview_name : "${var.prefix}-${var.environment_tag}-purp-${lower(var.app_name)}-plink-${element(split("-", module.naming.data_factory.name_unique),length(split("-", module.naming.data_factory.name_unique))-1)}") + purview_name = data.terraform_remote_state.layer2.outputs.purview_name + purview_account_plink = (data.terraform_remote_state.layer2.outputs.purview_name != "" ? data.terraform_remote_state.layer2.outputs.purview_name : "${var.prefix}-${var.environment_tag}-pura-${lower(var.app_name)}-plink-${element(split("-", module.naming.data_factory.name_unique),length(split("-", module.naming.data_factory.name_unique))-1)}") + purview_portal_plink = (data.terraform_remote_state.layer2.outputs.purview_name != "" ? data.terraform_remote_state.layer2.outputs.purview_name : "${var.prefix}-${var.environment_tag}-purp-${lower(var.app_name)}-plink-${element(split("-", module.naming.data_factory.name_unique),length(split("-", module.naming.data_factory.name_unique))-1)}") purview_resource_group_name = "managed-${module.naming.resource_group.name_unique}-purview" - purview_ir_app_reg_name = (var.purview_ir_app_reg_name != "" ? var.purview_ir_app_reg_name : "ADS GoFast Purview Integration Runtime (${var.environment_tag})") + purview_ir_app_reg_name = data.terraform_remote_state.layer2.outputs.purview_sp_name tags = { Environment = var.environment_tag diff --git a/solution/DeploymentV2/terraform_layer3/purview.tf b/solution/DeploymentV2/terraform_layer3/purview.tf index cff9c1e8..ef29edd1 100644 --- a/solution/DeploymentV2/terraform_layer3/purview.tf +++ b/solution/DeploymentV2/terraform_layer3/purview.tf @@ -1,4 +1,4 @@ resource "azuread_application_password" "purview_ir" { count = var.deploy_purview && var.is_vnet_isolated ? 1 : 0 - application_object_id = azuread_application.purview_ir[0].object_id -} + application_object_id = data.terraform_remote_state.layer2.outputs.purview_sp_object_id +} \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer3/vars.tf b/solution/DeploymentV2/terraform_layer3/vars.tf index 954dcfa5..446bd5e8 100644 --- a/solution/DeploymentV2/terraform_layer3/vars.tf +++ b/solution/DeploymentV2/terraform_layer3/vars.tf @@ -90,8 +90,6 @@ variable "aad_functionapp_name" { type = string } - - #--------------------------------------------------------------- # Feature Toggles #--------------------------------------------------------------- @@ -120,6 +118,18 @@ variable "deploy_azure_ad_function_app_registration" { type = bool } +variable "deploy_purview" { + description = "Feature toggle for deploying Azure Purview" + default = false + type = bool +} + + +variable "is_vnet_isolated" { + description = "Whether to deploy the resources as vnet attached / private linked" + default = true + type = bool +} variable "resource_owners" { description = "A web app Azure security group used for admin access." diff --git a/solution/DeploymentV2/terraform_layer3/vars/staging/terragrunt.hcl b/solution/DeploymentV2/terraform_layer3/vars/staging/terragrunt.hcl index 234d6bcf..b5152d8d 100644 --- a/solution/DeploymentV2/terraform_layer3/vars/staging/terragrunt.hcl +++ b/solution/DeploymentV2/terraform_layer3/vars/staging/terragrunt.hcl @@ -52,4 +52,5 @@ inputs = { ip_address = "${local.common_vars.ip_address}" # This is the ip address of the agent/current IP. Used to create firewall exemptions. deploy_web_app = true deploy_function_app = true + deploy_purview = false } From 72ebbc47a5d4ba26890c6003dbf67952a3ef8575 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Thu, 28 Jul 2022 08:14:15 +0800 Subject: [PATCH 054/151] Separated Purview AAD Registrations into Layers --- .github/workflows/continuous-delivery.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/continuous-delivery.yml b/.github/workflows/continuous-delivery.yml index a75c3f48..61e52fdb 100644 --- a/.github/workflows/continuous-delivery.yml +++ b/.github/workflows/continuous-delivery.yml @@ -3,7 +3,7 @@ name: Continuous Delivery on: workflow_dispatch: push: - branches: main + branches: feature-1.0.4 jobs: deploy-to-env-one: From 97b6ed966bad055b0393e33cd10b2a8f800a1f65 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Thu, 28 Jul 2022 14:00:15 +0800 Subject: [PATCH 055/151] Removed old Deployment Dir, Moved Generated Environment artefacts to bin --- .github/workflows/continuous-delivery.yml | 2 +- .../Deployment/.devcontainer/.gitattributes | 2 - solution/Deployment/.devcontainer/Dockerfile | 23 - .../.devcontainer/devcontainer.json | 35 - .../library-scripts/azcli-debian.sh | 67 - .../library-scripts/common-debian.sh | 478 ---- solution/Deployment/arm/AppService_Func.bicep | 28 - solution/Deployment/arm/AppService_Func.json | 49 - solution/Deployment/arm/AppService_Web.bicep | 27 - solution/Deployment/arm/AppService_Web.json | 48 - .../Deployment/arm/ApplicationInsights.bicep | 288 --- .../Deployment/arm/ApplicationInsights.json | 345 --- solution/Deployment/arm/AzureSQLServer.json | 172 -- solution/Deployment/arm/AzureSqlServer.bicep | 120 - solution/Deployment/arm/DataFactory.bicep | 14 - solution/Deployment/arm/DataFactory.json | 31 - solution/Deployment/arm/FunctionApp.bicep | 48 - solution/Deployment/arm/FunctionApp.json | 84 - solution/Deployment/arm/KeyVault.bicep | 28 - solution/Deployment/arm/KeyVault.json | 50 - solution/Deployment/arm/LogAnalytics.bicep | 36 - solution/Deployment/arm/LogAnalytics.json | 69 - solution/Deployment/arm/Networking.bicep | 107 - solution/Deployment/arm/Networking.json | 210 -- solution/Deployment/arm/Storage_ADLS.bicep | 53 - solution/Deployment/arm/Storage_ADLS.json | 89 - solution/Deployment/arm/Storage_Blob.bicep | 53 - solution/Deployment/arm/Storage_Blob.json | 89 - solution/Deployment/arm/Storage_Logging.bicep | 21 - solution/Deployment/arm/Storage_Logging.json | 46 - solution/Deployment/arm/VirtualMachine.bicep | 254 -- solution/Deployment/arm/VirtualMachine.json | 342 --- solution/Deployment/arm/WebApp.bicep | 42 - solution/Deployment/arm/WebApp.json | 76 - .../Deployment/environments/EditSettings.html | 67 - .../environments/Node/package-lock.json | 872 ------- .../Deployment/environments/Node/package.json | 16 - .../Deployment/environments/Node/server.js | 37 - .../Deployment/environments/development.json | 197 -- .../environments/environment.schema.json | 2076 ----------------- solution/Deployment/environments/test.ps1 | 25 - ...a_CreateServicePrincipals_AAD_Elevated.ps1 | 82 - .../workflows/CD_1a_DeployServices.ps1 | 56 - .../CD_2a_CreateMSIs_AAD_Elevated.ps1 | 178 -- .../workflows/CD_2b_ConfigureServices.ps1 | 35 - .../Deployment/workflows/CI_1a_BuildCode.ps1 | 18 - .../workflows/Cleanup_RemoveAll.ps1 | 17 - .../workflows/Functions/Helpers.psm1 | 32 - .../LocalDevOnly_EnvironmentSetUp.ps1 | 16 - .../LocalDevOnly_InstallOnPremSHIR.ps1 | 8 - .../workflows/Steps/CD_ConfigureADF.ps1 | 277 --- .../Steps/CD_ConfigureAzureSQLServer.ps1 | 192 -- ...ureSqlServer_UpdateTaskTypeMappingJson.ps1 | 31 - .../Steps/CD_ConfigureFunctionApp.ps1 | 50 - .../workflows/Steps/CD_ConfigureKeyVault.ps1 | 28 - .../Steps/CD_ConfigureSampleData.ps1 | 10 - .../workflows/Steps/CD_ConfigureVnet.ps1 | 203 -- .../workflows/Steps/CD_ConfigureWebApp.ps1 | 47 - .../workflows/Steps/CD_DeployADF.ps1 | 10 - .../Steps/CD_DeployADFOnPremSHIR.ps1 | 60 - .../workflows/Steps/CD_DeployAppInsights.ps1 | 12 - .../workflows/Steps/CD_DeployAppService.ps1 | 27 - .../Steps/CD_DeployAzureSqlServer.ps1 | 45 - .../workflows/Steps/CD_DeployFunctionApp.ps1 | 27 - .../workflows/Steps/CD_DeployKeyVault.ps1 | 10 - .../workflows/Steps/CD_DeployLogAnalytics.ps1 | 10 - .../Steps/CD_DeployResourceGroup.ps1 | 14 - .../workflows/Steps/CD_DeployStorageADLS.ps1 | 23 - .../workflows/Steps/CD_DeployStorageBlob.ps1 | 11 - .../Steps/CD_DeployStorageForLogging.ps1 | 12 - .../workflows/Steps/CD_DeployVnet.ps1 | 25 - .../workflows/Steps/CD_DeployWebSite.ps1 | 22 - .../workflows/Steps/CD_GrantRBAC.ps1 | 58 - .../workflows/Steps/CD_GrantWebAppAccess.ps1 | 21 - .../Steps/CD_SetResourceGroupHash.ps1 | 37 - .../Steps/CI_BuildAdsGoFastDatabase.ps1 | 14 - .../workflows/Steps/CI_BuildDataFactory.ps1 | 22 - .../workflows/Steps/CI_BuildFunctionApp.ps1 | 14 - .../workflows/Steps/CI_BuildWebApp.ps1 | 14 - .../Steps/InstallGatewayFunctions.ps1 | 107 - .../Steps/PushEnvFileIntoVariables.ps1 | 100 - solution/DeploymentV2/Cleanup_RemoveAll.ps1 | 13 +- .../vars/PreprocessEnvironment.ps1 | 49 +- .../environments/vars/admz/common_vars.yaml | 10 - .../vars/common_vars_template.jsonnet | 172 +- .../environments/vars/local/common_vars.yaml | 10 - .../vars/production/common_vars.yaml | 10 - .../vars/staging/GetSecretsTemplate.env | 30 - .../vars/staging/common_vars.json | 106 - .../vars/staging/common_vars_for_hcl.json | 21 - .../vars/staging/common_vars_values.jsonc | 39 +- .../vars/local/terragrunt.hcl | 2 +- .../vars/production/terragrunt.hcl | 2 +- .../vars/staging/terragrunt.hcl | 2 +- .../DeploymentV2/terraform_layer2/layer1.tf | 4 +- .../vars/local/terragrunt.hcl | 2 +- .../vars/production/terragrunt.hcl | 2 +- .../vars/staging/terragrunt.hcl | 2 +- .../DeploymentV2/terraform_layer3/layer2.tf | 4 +- .../vars/local/terragrunt.hcl | 2 +- .../vars/production/terragrunt.hcl | 2 +- .../vars/staging/terragrunt.hcl | 2 +- 102 files changed, 225 insertions(+), 8952 deletions(-) delete mode 100644 solution/Deployment/.devcontainer/.gitattributes delete mode 100644 solution/Deployment/.devcontainer/Dockerfile delete mode 100644 solution/Deployment/.devcontainer/devcontainer.json delete mode 100644 solution/Deployment/.devcontainer/library-scripts/azcli-debian.sh delete mode 100644 solution/Deployment/.devcontainer/library-scripts/common-debian.sh delete mode 100644 solution/Deployment/arm/AppService_Func.bicep delete mode 100644 solution/Deployment/arm/AppService_Func.json delete mode 100644 solution/Deployment/arm/AppService_Web.bicep delete mode 100644 solution/Deployment/arm/AppService_Web.json delete mode 100644 solution/Deployment/arm/ApplicationInsights.bicep delete mode 100644 solution/Deployment/arm/ApplicationInsights.json delete mode 100644 solution/Deployment/arm/AzureSQLServer.json delete mode 100644 solution/Deployment/arm/AzureSqlServer.bicep delete mode 100644 solution/Deployment/arm/DataFactory.bicep delete mode 100644 solution/Deployment/arm/DataFactory.json delete mode 100644 solution/Deployment/arm/FunctionApp.bicep delete mode 100644 solution/Deployment/arm/FunctionApp.json delete mode 100644 solution/Deployment/arm/KeyVault.bicep delete mode 100644 solution/Deployment/arm/KeyVault.json delete mode 100644 solution/Deployment/arm/LogAnalytics.bicep delete mode 100644 solution/Deployment/arm/LogAnalytics.json delete mode 100644 solution/Deployment/arm/Networking.bicep delete mode 100644 solution/Deployment/arm/Networking.json delete mode 100644 solution/Deployment/arm/Storage_ADLS.bicep delete mode 100644 solution/Deployment/arm/Storage_ADLS.json delete mode 100644 solution/Deployment/arm/Storage_Blob.bicep delete mode 100644 solution/Deployment/arm/Storage_Blob.json delete mode 100644 solution/Deployment/arm/Storage_Logging.bicep delete mode 100644 solution/Deployment/arm/Storage_Logging.json delete mode 100644 solution/Deployment/arm/VirtualMachine.bicep delete mode 100644 solution/Deployment/arm/VirtualMachine.json delete mode 100644 solution/Deployment/arm/WebApp.bicep delete mode 100644 solution/Deployment/arm/WebApp.json delete mode 100644 solution/Deployment/environments/EditSettings.html delete mode 100644 solution/Deployment/environments/Node/package-lock.json delete mode 100644 solution/Deployment/environments/Node/package.json delete mode 100644 solution/Deployment/environments/Node/server.js delete mode 100644 solution/Deployment/environments/development.json delete mode 100644 solution/Deployment/environments/environment.schema.json delete mode 100644 solution/Deployment/environments/test.ps1 delete mode 100644 solution/Deployment/workflows/CD_0a_CreateServicePrincipals_AAD_Elevated.ps1 delete mode 100644 solution/Deployment/workflows/CD_1a_DeployServices.ps1 delete mode 100644 solution/Deployment/workflows/CD_2a_CreateMSIs_AAD_Elevated.ps1 delete mode 100644 solution/Deployment/workflows/CD_2b_ConfigureServices.ps1 delete mode 100644 solution/Deployment/workflows/CI_1a_BuildCode.ps1 delete mode 100644 solution/Deployment/workflows/Cleanup_RemoveAll.ps1 delete mode 100644 solution/Deployment/workflows/Functions/Helpers.psm1 delete mode 100644 solution/Deployment/workflows/LocalDevOnly_EnvironmentSetUp.ps1 delete mode 100644 solution/Deployment/workflows/LocalDevOnly_InstallOnPremSHIR.ps1 delete mode 100644 solution/Deployment/workflows/Steps/CD_ConfigureADF.ps1 delete mode 100644 solution/Deployment/workflows/Steps/CD_ConfigureAzureSQLServer.ps1 delete mode 100644 solution/Deployment/workflows/Steps/CD_ConfigureAzureSqlServer_UpdateTaskTypeMappingJson.ps1 delete mode 100644 solution/Deployment/workflows/Steps/CD_ConfigureFunctionApp.ps1 delete mode 100644 solution/Deployment/workflows/Steps/CD_ConfigureKeyVault.ps1 delete mode 100644 solution/Deployment/workflows/Steps/CD_ConfigureSampleData.ps1 delete mode 100644 solution/Deployment/workflows/Steps/CD_ConfigureVnet.ps1 delete mode 100644 solution/Deployment/workflows/Steps/CD_ConfigureWebApp.ps1 delete mode 100644 solution/Deployment/workflows/Steps/CD_DeployADF.ps1 delete mode 100644 solution/Deployment/workflows/Steps/CD_DeployADFOnPremSHIR.ps1 delete mode 100644 solution/Deployment/workflows/Steps/CD_DeployAppInsights.ps1 delete mode 100644 solution/Deployment/workflows/Steps/CD_DeployAppService.ps1 delete mode 100644 solution/Deployment/workflows/Steps/CD_DeployAzureSqlServer.ps1 delete mode 100644 solution/Deployment/workflows/Steps/CD_DeployFunctionApp.ps1 delete mode 100644 solution/Deployment/workflows/Steps/CD_DeployKeyVault.ps1 delete mode 100644 solution/Deployment/workflows/Steps/CD_DeployLogAnalytics.ps1 delete mode 100644 solution/Deployment/workflows/Steps/CD_DeployResourceGroup.ps1 delete mode 100644 solution/Deployment/workflows/Steps/CD_DeployStorageADLS.ps1 delete mode 100644 solution/Deployment/workflows/Steps/CD_DeployStorageBlob.ps1 delete mode 100644 solution/Deployment/workflows/Steps/CD_DeployStorageForLogging.ps1 delete mode 100644 solution/Deployment/workflows/Steps/CD_DeployVnet.ps1 delete mode 100644 solution/Deployment/workflows/Steps/CD_DeployWebSite.ps1 delete mode 100644 solution/Deployment/workflows/Steps/CD_GrantRBAC.ps1 delete mode 100644 solution/Deployment/workflows/Steps/CD_GrantWebAppAccess.ps1 delete mode 100644 solution/Deployment/workflows/Steps/CD_SetResourceGroupHash.ps1 delete mode 100644 solution/Deployment/workflows/Steps/CI_BuildAdsGoFastDatabase.ps1 delete mode 100644 solution/Deployment/workflows/Steps/CI_BuildDataFactory.ps1 delete mode 100644 solution/Deployment/workflows/Steps/CI_BuildFunctionApp.ps1 delete mode 100644 solution/Deployment/workflows/Steps/CI_BuildWebApp.ps1 delete mode 100644 solution/Deployment/workflows/Steps/InstallGatewayFunctions.ps1 delete mode 100644 solution/Deployment/workflows/Steps/PushEnvFileIntoVariables.ps1 delete mode 100644 solution/DeploymentV2/environments/vars/admz/common_vars.yaml delete mode 100644 solution/DeploymentV2/environments/vars/local/common_vars.yaml delete mode 100644 solution/DeploymentV2/environments/vars/production/common_vars.yaml delete mode 100644 solution/DeploymentV2/environments/vars/staging/GetSecretsTemplate.env delete mode 100644 solution/DeploymentV2/environments/vars/staging/common_vars.json delete mode 100644 solution/DeploymentV2/environments/vars/staging/common_vars_for_hcl.json diff --git a/.github/workflows/continuous-delivery.yml b/.github/workflows/continuous-delivery.yml index 61e52fdb..a75c3f48 100644 --- a/.github/workflows/continuous-delivery.yml +++ b/.github/workflows/continuous-delivery.yml @@ -3,7 +3,7 @@ name: Continuous Delivery on: workflow_dispatch: push: - branches: feature-1.0.4 + branches: main jobs: deploy-to-env-one: diff --git a/solution/Deployment/.devcontainer/.gitattributes b/solution/Deployment/.devcontainer/.gitattributes deleted file mode 100644 index 2a365b59..00000000 --- a/solution/Deployment/.devcontainer/.gitattributes +++ /dev/null @@ -1,2 +0,0 @@ -# Force line endings on bas scripts -*.sh text eol=lf \ No newline at end of file diff --git a/solution/Deployment/.devcontainer/Dockerfile b/solution/Deployment/.devcontainer/Dockerfile deleted file mode 100644 index 50c06824..00000000 --- a/solution/Deployment/.devcontainer/Dockerfile +++ /dev/null @@ -1,23 +0,0 @@ -# You can pick any Debian/Ubuntu-based image. 😊 -FROM mcr.microsoft.com/vscode/devcontainers/base:0-buster - -# [Option] Install zsh -ARG INSTALL_ZSH="true" -# [Option] Upgrade OS packages to their latest versions -ARG UPGRADE_PACKAGES="false" - -# Install needed packages and setup non-root user. Use a separate RUN statement to add your own dependencies. -ARG USERNAME=vscode -ARG USER_UID=1000 -ARG USER_GID=$USER_UID -COPY library-scripts/*.sh /tmp/library-scripts/ -RUN bash /tmp/library-scripts/common-debian.sh "${INSTALL_ZSH}" "${USERNAME}" "${USER_UID}" "${USER_GID}" "${UPGRADE_PACKAGES}" "true" "true" \ - # Install the Azure CLI - && bash /tmp/library-scripts/azcli-debian.sh \ - # Clean up - && apt-get clean -y && rm -rf /var/lib/apt/lists/* /tmp/library-scripts - -# [Optional] Uncomment this section to install additional OS packages. -# RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \ -# && apt-get -y install --no-install-recommends - diff --git a/solution/Deployment/.devcontainer/devcontainer.json b/solution/Deployment/.devcontainer/devcontainer.json deleted file mode 100644 index e3883a67..00000000 --- a/solution/Deployment/.devcontainer/devcontainer.json +++ /dev/null @@ -1,35 +0,0 @@ -// For format details, see https://aka.ms/devcontainer.json. For config options, see the README at: -// https://github.com/microsoft/vscode-dev-containers/tree/v0.191.1/containers/azure-cli -{ - "name": "Azure CLI", - "dockerFile": "Dockerfile", - - // Set *default* container specific settings.json values on container create. - "settings": { - "terminal.integrated.defaultProfile.linux": "pwsh" - //"terminal.integrated.cwd": "/home/vscode" - }, - - // Instead of mounting the local workspace folder (Deployment) by default, - // this should mount its parent folder (per "..") into the default /workspace folder - // of the container. This assumes that the Deployment folder was opened in vscode - "workspaceMount": "source=${localWorkspaceFolder}/..,target=/workspace,type=bind", - "workspaceFolder": "/workspace", - - // Add the IDs of extensions you want installed when the container is created. - "extensions": [ - "ms-vscode.azurecli" - ], - - // Use 'forwardPorts' to make a list of ports inside the container available locally. - // "forwardPorts": [], - - // Use 'postCreateCommand' to run commands after the container is created. - "postCreateCommand": "sudo apt-get update && sudo apt-get install -y wget apt-transport-https software-properties-common && wget -q https://packages.microsoft.com/config/ubuntu/16.04/packages-microsoft-prod.deb && sudo dpkg -i packages-microsoft-prod.deb && sudo apt-get update && sudo apt-get install -y powershell && rm ./packages-microsoft-prod.deb && sudo apt install -y dotnet-sdk-3.1 && curl -fsSL https://deb.nodesource.com/setup_17.x | sudo -E bash - && apt-get install -y nodejs", - - // Uncomment when using a ptrace-based debugger like C++, Go, and Rust - // "runArgs": [ "--cap-add=SYS_PTRACE", "--security-opt", "seccomp=unconfined" ], - - // Comment out connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root. - //"remoteUser": "vscode" -} \ No newline at end of file diff --git a/solution/Deployment/.devcontainer/library-scripts/azcli-debian.sh b/solution/Deployment/.devcontainer/library-scripts/azcli-debian.sh deleted file mode 100644 index 34cbb35c..00000000 --- a/solution/Deployment/.devcontainer/library-scripts/azcli-debian.sh +++ /dev/null @@ -1,67 +0,0 @@ -#!/usr/bin/env bash -#------------------------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information. -#------------------------------------------------------------------------------------------------------------- -# -# Docs: https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/azcli.md -# Maintainer: The VS Code and Codespaces Teams -# -# Syntax: ./azcli-debian.sh - -set -e - -MICROSOFT_GPG_KEYS_URI="https://packages.microsoft.com/keys/microsoft.asc" - -if [ "$(id -u)" -ne 0 ]; then - echo -e 'Script must be run as root. Use sudo, su, or add "USER root" to your Dockerfile before running this script.' - exit 1 -fi - -# Get central common setting -get_common_setting() { - if [ "${common_settings_file_loaded}" != "true" ]; then - curl -sfL "https://aka.ms/vscode-dev-containers/script-library/settings.env" 2>/dev/null -o /tmp/vsdc-settings.env || echo "Could not download settings file. Skipping." - common_settings_file_loaded=true - fi - if [ -f "/tmp/vsdc-settings.env" ]; then - local multi_line="" - if [ "$2" = "true" ]; then multi_line="-z"; fi - local result="$(grep ${multi_line} -oP "$1=\"?\K[^\"]+" /tmp/vsdc-settings.env | tr -d '\0')" - if [ ! -z "${result}" ]; then declare -g $1="${result}"; fi - fi - echo "$1=${!1}" -} - -# Function to run apt-get if needed -apt_get_update_if_needed() -{ - if [ ! -d "/var/lib/apt/lists" ] || [ "$(ls /var/lib/apt/lists/ | wc -l)" = "0" ]; then - echo "Running apt-get update..." - apt-get update - else - echo "Skipping apt-get update." - fi -} - -# Checks if packages are installed and installs them if not -check_packages() { - if ! dpkg -s "$@" > /dev/null 2>&1; then - apt_get_update_if_needed - apt-get -y install --no-install-recommends "$@" - fi -} - -export DEBIAN_FRONTEND=noninteractive - -# Install dependencies -check_packages apt-transport-https curl ca-certificates lsb-release gnupg2 - -# Import key safely (new 'signed-by' method rather than deprecated apt-key approach) and install -. /etc/os-release -get_common_setting MICROSOFT_GPG_KEYS_URI -curl -sSL ${MICROSOFT_GPG_KEYS_URI} | gpg --dearmor > /usr/share/keyrings/microsoft-archive-keyring.gpg -echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/microsoft-archive-keyring.gpg] https://packages.microsoft.com/repos/azure-cli/ ${VERSION_CODENAME} main" > /etc/apt/sources.list.d/azure-cli.list -apt-get update -apt-get install -y azure-cli -echo "Done!" \ No newline at end of file diff --git a/solution/Deployment/.devcontainer/library-scripts/common-debian.sh b/solution/Deployment/.devcontainer/library-scripts/common-debian.sh deleted file mode 100644 index 283b57ee..00000000 --- a/solution/Deployment/.devcontainer/library-scripts/common-debian.sh +++ /dev/null @@ -1,478 +0,0 @@ -#!/usr/bin/env bash -#------------------------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information. -#------------------------------------------------------------------------------------------------------------- -# -# Docs: https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/common.md -# Maintainer: The VS Code and Codespaces Teams -# -# Syntax: ./common-debian.sh [install zsh flag] [username] [user UID] [user GID] [upgrade packages flag] [install Oh My Zsh! flag] [Add non-free packages] - -set -e - -INSTALL_ZSH=${1:-"true"} -USERNAME=${2:-"automatic"} -USER_UID=${3:-"automatic"} -USER_GID=${4:-"automatic"} -UPGRADE_PACKAGES=${5:-"true"} -INSTALL_OH_MYS=${6:-"true"} -ADD_NON_FREE_PACKAGES=${7:-"false"} -SCRIPT_DIR="$(cd $(dirname "${BASH_SOURCE[0]}") && pwd)" -MARKER_FILE="/usr/local/etc/vscode-dev-containers/common" - - -if [ "$(id -u)" -ne 0 ]; then - echo -e 'Script must be run as root. Use sudo, su, or add "USER root" to your Dockerfile before running this script.' - exit 1 -fi - -# Ensure that login shells get the correct path if the user updated the PATH using ENV. -rm -f /etc/profile.d/00-restore-env.sh -echo "export PATH=${PATH//$(sh -lc 'echo $PATH')/\$PATH}" > /etc/profile.d/00-restore-env.sh -chmod +x /etc/profile.d/00-restore-env.sh - -# If in automatic mode, determine if a user already exists, if not use vscode -if [ "${USERNAME}" = "auto" ] || [ "${USERNAME}" = "automatic" ]; then - USERNAME="" - POSSIBLE_USERS=("vscode" "node" "codespace" "$(awk -v val=1000 -F ":" '$3==val{print $1}' /etc/passwd)") - for CURRENT_USER in ${POSSIBLE_USERS[@]}; do - if id -u ${CURRENT_USER} > /dev/null 2>&1; then - USERNAME=${CURRENT_USER} - break - fi - done - if [ "${USERNAME}" = "" ]; then - USERNAME=vscode - fi -elif [ "${USERNAME}" = "none" ]; then - USERNAME=root - USER_UID=0 - USER_GID=0 -fi - -# Load markers to see which steps have already run -if [ -f "${MARKER_FILE}" ]; then - echo "Marker file found:" - cat "${MARKER_FILE}" - source "${MARKER_FILE}" -fi - -# Ensure apt is in non-interactive to avoid prompts -export DEBIAN_FRONTEND=noninteractive - -# Function to call apt-get if needed -apt_get_update_if_needed() -{ - if [ ! -d "/var/lib/apt/lists" ] || [ "$(ls /var/lib/apt/lists/ | wc -l)" = "0" ]; then - echo "Running apt-get update..." - apt-get update - else - echo "Skipping apt-get update." - fi -} - -# Run install apt-utils to avoid debconf warning then verify presence of other common developer tools and dependencies -if [ "${PACKAGES_ALREADY_INSTALLED}" != "true" ]; then - - package_list="apt-utils \ - openssh-client \ - gnupg2 \ - iproute2 \ - procps \ - lsof \ - htop \ - net-tools \ - psmisc \ - curl \ - wget \ - rsync \ - ca-certificates \ - unzip \ - zip \ - nano \ - vim-tiny \ - less \ - jq \ - lsb-release \ - apt-transport-https \ - dialog \ - libc6 \ - libgcc1 \ - libkrb5-3 \ - libgssapi-krb5-2 \ - libicu[0-9][0-9] \ - liblttng-ust0 \ - libstdc++6 \ - zlib1g \ - locales \ - sudo \ - ncdu \ - man-db \ - strace \ - manpages \ - manpages-dev \ - init-system-helpers" - - # Needed for adding manpages-posix and manpages-posix-dev which are non-free packages in Debian - if [ "${ADD_NON_FREE_PACKAGES}" = "true" ]; then - # Bring in variables from /etc/os-release like VERSION_CODENAME - . /etc/os-release - sed -i -E "s/deb http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME} main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME} main contrib non-free/" /etc/apt/sources.list - sed -i -E "s/deb-src http:\/\/(deb|httredir)\.debian\.org\/debian ${VERSION_CODENAME} main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME} main contrib non-free/" /etc/apt/sources.list - sed -i -E "s/deb http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME}-updates main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME}-updates main contrib non-free/" /etc/apt/sources.list - sed -i -E "s/deb-src http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME}-updates main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME}-updates main contrib non-free/" /etc/apt/sources.list - sed -i "s/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main contrib non-free/" /etc/apt/sources.list - sed -i "s/deb-src http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main contrib non-free/" /etc/apt/sources.list - sed -i "s/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main contrib non-free/" /etc/apt/sources.list - sed -i "s/deb-src http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main contrib non-free/" /etc/apt/sources.list - echo "Running apt-get update..." - apt-get update - package_list="${package_list} manpages-posix manpages-posix-dev" - else - apt_get_update_if_needed - fi - - # Install libssl1.1 if available - if [[ ! -z $(apt-cache --names-only search ^libssl1.1$) ]]; then - package_list="${package_list} libssl1.1" - fi - - # Install appropriate version of libssl1.0.x if available - libssl_package=$(dpkg-query -f '${db:Status-Abbrev}\t${binary:Package}\n' -W 'libssl1\.0\.?' 2>&1 || echo '') - if [ "$(echo "$LIlibssl_packageBSSL" | grep -o 'libssl1\.0\.[0-9]:' | uniq | sort | wc -l)" -eq 0 ]; then - if [[ ! -z $(apt-cache --names-only search ^libssl1.0.2$) ]]; then - # Debian 9 - package_list="${package_list} libssl1.0.2" - elif [[ ! -z $(apt-cache --names-only search ^libssl1.0.0$) ]]; then - # Ubuntu 18.04, 16.04, earlier - package_list="${package_list} libssl1.0.0" - fi - fi - - echo "Packages to verify are installed: ${package_list}" - apt-get -y install --no-install-recommends ${package_list} 2> >( grep -v 'debconf: delaying package configuration, since apt-utils is not installed' >&2 ) - - # Install git if not already installed (may be more recent than distro version) - if ! type git > /dev/null 2>&1; then - apt-get -y install --no-install-recommends git - fi - - PACKAGES_ALREADY_INSTALLED="true" -fi - -# Get to latest versions of all packages -if [ "${UPGRADE_PACKAGES}" = "true" ]; then - apt_get_update_if_needed - apt-get -y upgrade --no-install-recommends - apt-get autoremove -y -fi - -# Ensure at least the en_US.UTF-8 UTF-8 locale is available. -# Common need for both applications and things like the agnoster ZSH theme. -if [ "${LOCALE_ALREADY_SET}" != "true" ] && ! grep -o -E '^\s*en_US.UTF-8\s+UTF-8' /etc/locale.gen > /dev/null; then - echo "en_US.UTF-8 UTF-8" >> /etc/locale.gen - locale-gen - LOCALE_ALREADY_SET="true" -fi - -# Create or update a non-root user to match UID/GID. -if id -u ${USERNAME} > /dev/null 2>&1; then - # User exists, update if needed - if [ "${USER_GID}" != "automatic" ] && [ "$USER_GID" != "$(id -G $USERNAME)" ]; then - groupmod --gid $USER_GID $USERNAME - usermod --gid $USER_GID $USERNAME - fi - if [ "${USER_UID}" != "automatic" ] && [ "$USER_UID" != "$(id -u $USERNAME)" ]; then - usermod --uid $USER_UID $USERNAME - fi -else - # Create user - if [ "${USER_GID}" = "automatic" ]; then - groupadd $USERNAME - else - groupadd --gid $USER_GID $USERNAME - fi - if [ "${USER_UID}" = "automatic" ]; then - useradd -s /bin/bash --gid $USERNAME -m $USERNAME - else - useradd -s /bin/bash --uid $USER_UID --gid $USERNAME -m $USERNAME - fi -fi - -# Add add sudo support for non-root user -if [ "${USERNAME}" != "root" ] && [ "${EXISTING_NON_ROOT_USER}" != "${USERNAME}" ]; then - echo $USERNAME ALL=\(root\) NOPASSWD:ALL > /etc/sudoers.d/$USERNAME - chmod 0440 /etc/sudoers.d/$USERNAME - EXISTING_NON_ROOT_USER="${USERNAME}" -fi - -# ** Shell customization section ** -if [ "${USERNAME}" = "root" ]; then - user_rc_path="/root" -else - user_rc_path="/home/${USERNAME}" -fi - -# Restore user .bashrc defaults from skeleton file if it doesn't exist or is empty -if [ ! -f "${user_rc_path}/.bashrc" ] || [ ! -s "${user_rc_path}/.bashrc" ] ; then - cp /etc/skel/.bashrc "${user_rc_path}/.bashrc" -fi - -# Restore user .profile defaults from skeleton file if it doesn't exist or is empty -if [ ! -f "${user_rc_path}/.profile" ] || [ ! -s "${user_rc_path}/.profile" ] ; then - cp /etc/skel/.profile "${user_rc_path}/.profile" -fi - -# .bashrc/.zshrc snippet -rc_snippet="$(cat << 'EOF' - -if [ -z "${USER}" ]; then export USER=$(whoami); fi -if [[ "${PATH}" != *"$HOME/.local/bin"* ]]; then export PATH="${PATH}:$HOME/.local/bin"; fi - -# Display optional first run image specific notice if configured and terminal is interactive -if [ -t 1 ] && [[ "${TERM_PROGRAM}" = "vscode" || "${TERM_PROGRAM}" = "codespaces" ]] && [ ! -f "$HOME/.config/vscode-dev-containers/first-run-notice-already-displayed" ]; then - if [ -f "/usr/local/etc/vscode-dev-containers/first-run-notice.txt" ]; then - cat "/usr/local/etc/vscode-dev-containers/first-run-notice.txt" - elif [ -f "/workspaces/.codespaces/shared/first-run-notice.txt" ]; then - cat "/workspaces/.codespaces/shared/first-run-notice.txt" - fi - mkdir -p "$HOME/.config/vscode-dev-containers" - # Mark first run notice as displayed after 10s to avoid problems with fast terminal refreshes hiding it - ((sleep 10s; touch "$HOME/.config/vscode-dev-containers/first-run-notice-already-displayed") &) -fi - -# Set the default git editor if not already set -if [ -z "$(git config --get core.editor)" ] && [ -z "${GIT_EDITOR}" ]; then - if [ "${TERM_PROGRAM}" = "vscode" ]; then - if [[ -n $(command -v code-insiders) && -z $(command -v code) ]]; then - export GIT_EDITOR="code-insiders --wait" - else - export GIT_EDITOR="code --wait" - fi - fi -fi - -EOF -)" - -# code shim, it fallbacks to code-insiders if code is not available -cat << 'EOF' > /usr/local/bin/code -#!/bin/sh - -get_in_path_except_current() { - which -a "$1" | grep -A1 "$0" | grep -v "$0" -} - -code="$(get_in_path_except_current code)" - -if [ -n "$code" ]; then - exec "$code" "$@" -elif [ "$(command -v code-insiders)" ]; then - exec code-insiders "$@" -else - echo "code or code-insiders is not installed" >&2 - exit 127 -fi -EOF -chmod +x /usr/local/bin/code - -# systemctl shim - tells people to use 'service' if systemd is not running -cat << 'EOF' > /usr/local/bin/systemctl -#!/bin/sh -set -e -if [ -d "/run/systemd/system" ]; then - exec /bin/systemctl/systemctl "$@" -else - echo '\n"systemd" is not running in this container due to its overhead.\nUse the "service" command to start services intead. e.g.: \n\nservice --status-all' -fi -EOF -chmod +x /usr/local/bin/systemctl - -# Codespaces bash and OMZ themes - partly inspired by https://github.com/ohmyzsh/ohmyzsh/blob/master/themes/robbyrussell.zsh-theme -codespaces_bash="$(cat \ -<<'EOF' - -# Codespaces bash prompt theme -__bash_prompt() { - local userpart='`export XIT=$? \ - && [ ! -z "${GITHUB_USER}" ] && echo -n "\[\033[0;32m\]@${GITHUB_USER} " || echo -n "\[\033[0;32m\]\u " \ - && [ "$XIT" -ne "0" ] && echo -n "\[\033[1;31m\]➜" || echo -n "\[\033[0m\]➜"`' - local gitbranch='`\ - export BRANCH=$(git symbolic-ref --short HEAD 2>/dev/null || git rev-parse --short HEAD 2>/dev/null); \ - if [ "${BRANCH}" != "" ]; then \ - echo -n "\[\033[0;36m\](\[\033[1;31m\]${BRANCH}" \ - && if git ls-files --error-unmatch -m --directory --no-empty-directory -o --exclude-standard ":/*" > /dev/null 2>&1; then \ - echo -n " \[\033[1;33m\]✗"; \ - fi \ - && echo -n "\[\033[0;36m\]) "; \ - fi`' - local lightblue='\[\033[1;34m\]' - local removecolor='\[\033[0m\]' - PS1="${userpart} ${lightblue}\w ${gitbranch}${removecolor}\$ " - unset -f __bash_prompt -} -__bash_prompt - -EOF -)" - -codespaces_zsh="$(cat \ -<<'EOF' -# Codespaces zsh prompt theme -__zsh_prompt() { - local prompt_username - if [ ! -z "${GITHUB_USER}" ]; then - prompt_username="@${GITHUB_USER}" - else - prompt_username="%n" - fi - PROMPT="%{$fg[green]%}${prompt_username} %(?:%{$reset_color%}➜ :%{$fg_bold[red]%}➜ )" # User/exit code arrow - PROMPT+='%{$fg_bold[blue]%}%(5~|%-1~/…/%3~|%4~)%{$reset_color%} ' # cwd - PROMPT+='$(git_prompt_info)%{$fg[white]%}$ %{$reset_color%}' # Git status - unset -f __zsh_prompt -} -ZSH_THEME_GIT_PROMPT_PREFIX="%{$fg_bold[cyan]%}(%{$fg_bold[red]%}" -ZSH_THEME_GIT_PROMPT_SUFFIX="%{$reset_color%} " -ZSH_THEME_GIT_PROMPT_DIRTY=" %{$fg_bold[yellow]%}✗%{$fg_bold[cyan]%})" -ZSH_THEME_GIT_PROMPT_CLEAN="%{$fg_bold[cyan]%})" -__zsh_prompt - -EOF -)" - -# Add notice that Oh My Bash! has been removed from images and how to provide information on how to install manually -omb_readme="$(cat \ -<<'EOF' -"Oh My Bash!" has been removed from this image in favor of a simple shell prompt. If you -still wish to use it, remove "~/.oh-my-bash" and install it from: https://github.com/ohmybash/oh-my-bash -You may also want to consider "Bash-it" as an alternative: https://github.com/bash-it/bash-it -See here for infomation on adding it to your image or dotfiles: https://aka.ms/codespaces/omb-remove -EOF -)" -omb_stub="$(cat \ -<<'EOF' -#!/usr/bin/env bash -if [ -t 1 ]; then - cat $HOME/.oh-my-bash/README.md -fi -EOF -)" - -# Add RC snippet and custom bash prompt -if [ "${RC_SNIPPET_ALREADY_ADDED}" != "true" ]; then - echo "${rc_snippet}" >> /etc/bash.bashrc - echo "${codespaces_bash}" >> "${user_rc_path}/.bashrc" - echo 'export PROMPT_DIRTRIM=4' >> "${user_rc_path}/.bashrc" - if [ "${USERNAME}" != "root" ]; then - echo "${codespaces_bash}" >> "/root/.bashrc" - echo 'export PROMPT_DIRTRIM=4' >> "/root/.bashrc" - fi - chown ${USERNAME}:${USERNAME} "${user_rc_path}/.bashrc" - RC_SNIPPET_ALREADY_ADDED="true" -fi - -# Add stub for Oh My Bash! -if [ ! -d "${user_rc_path}/.oh-my-bash}" ] && [ "${INSTALL_OH_MYS}" = "true" ]; then - mkdir -p "${user_rc_path}/.oh-my-bash" "/root/.oh-my-bash" - echo "${omb_readme}" >> "${user_rc_path}/.oh-my-bash/README.md" - echo "${omb_stub}" >> "${user_rc_path}/.oh-my-bash/oh-my-bash.sh" - chmod +x "${user_rc_path}/.oh-my-bash/oh-my-bash.sh" - if [ "${USERNAME}" != "root" ]; then - echo "${omb_readme}" >> "/root/.oh-my-bash/README.md" - echo "${omb_stub}" >> "/root/.oh-my-bash/oh-my-bash.sh" - chmod +x "/root/.oh-my-bash/oh-my-bash.sh" - fi - chown -R "${USERNAME}:${USERNAME}" "${user_rc_path}/.oh-my-bash" -fi - -# Optionally install and configure zsh and Oh My Zsh! -if [ "${INSTALL_ZSH}" = "true" ]; then - if ! type zsh > /dev/null 2>&1; then - apt_get_update_if_needed - apt-get install -y zsh - fi - if [ "${ZSH_ALREADY_INSTALLED}" != "true" ]; then - echo "${rc_snippet}" >> /etc/zsh/zshrc - ZSH_ALREADY_INSTALLED="true" - fi - - # Adapted, simplified inline Oh My Zsh! install steps that adds, defaults to a codespaces theme. - # See https://github.com/ohmyzsh/ohmyzsh/blob/master/tools/install.sh for official script. - oh_my_install_dir="${user_rc_path}/.oh-my-zsh" - if [ ! -d "${oh_my_install_dir}" ] && [ "${INSTALL_OH_MYS}" = "true" ]; then - template_path="${oh_my_install_dir}/templates/zshrc.zsh-template" - user_rc_file="${user_rc_path}/.zshrc" - umask g-w,o-w - mkdir -p ${oh_my_install_dir} - git clone --depth=1 \ - -c core.eol=lf \ - -c core.autocrlf=false \ - -c fsck.zeroPaddedFilemode=ignore \ - -c fetch.fsck.zeroPaddedFilemode=ignore \ - -c receive.fsck.zeroPaddedFilemode=ignore \ - "https://github.com/ohmyzsh/ohmyzsh" "${oh_my_install_dir}" 2>&1 - echo -e "$(cat "${template_path}")\nDISABLE_AUTO_UPDATE=true\nDISABLE_UPDATE_PROMPT=true" > ${user_rc_file} - sed -i -e 's/ZSH_THEME=.*/ZSH_THEME="codespaces"/g' ${user_rc_file} - - mkdir -p ${oh_my_install_dir}/custom/themes - echo "${codespaces_zsh}" > "${oh_my_install_dir}/custom/themes/codespaces.zsh-theme" - # Shrink git while still enabling updates - cd "${oh_my_install_dir}" - git repack -a -d -f --depth=1 --window=1 - # Copy to non-root user if one is specified - if [ "${USERNAME}" != "root" ]; then - cp -rf "${user_rc_file}" "${oh_my_install_dir}" /root - chown -R ${USERNAME}:${USERNAME} "${user_rc_path}" - fi - fi -fi - -# Persist image metadata info, script if meta.env found in same directory -meta_info_script="$(cat << 'EOF' -#!/bin/sh -. /usr/local/etc/vscode-dev-containers/meta.env - -# Minimal output -if [ "$1" = "version" ] || [ "$1" = "image-version" ]; then - echo "${VERSION}" - exit 0 -elif [ "$1" = "release" ]; then - echo "${GIT_REPOSITORY_RELEASE}" - exit 0 -elif [ "$1" = "content" ] || [ "$1" = "content-url" ] || [ "$1" = "contents" ] || [ "$1" = "contents-url" ]; then - echo "${CONTENTS_URL}" - exit 0 -fi - -#Full output -echo -echo "Development container image information" -echo -if [ ! -z "${VERSION}" ]; then echo "- Image version: ${VERSION}"; fi -if [ ! -z "${DEFINITION_ID}" ]; then echo "- Definition ID: ${DEFINITION_ID}"; fi -if [ ! -z "${VARIANT}" ]; then echo "- Variant: ${VARIANT}"; fi -if [ ! -z "${GIT_REPOSITORY}" ]; then echo "- Source code repository: ${GIT_REPOSITORY}"; fi -if [ ! -z "${GIT_REPOSITORY_RELEASE}" ]; then echo "- Source code release/branch: ${GIT_REPOSITORY_RELEASE}"; fi -if [ ! -z "${BUILD_TIMESTAMP}" ]; then echo "- Timestamp: ${BUILD_TIMESTAMP}"; fi -if [ ! -z "${CONTENTS_URL}" ]; then echo && echo "More info: ${CONTENTS_URL}"; fi -echo -EOF -)" -if [ -f "${SCRIPT_DIR}/meta.env" ]; then - mkdir -p /usr/local/etc/vscode-dev-containers/ - cp -f "${SCRIPT_DIR}/meta.env" /usr/local/etc/vscode-dev-containers/meta.env - echo "${meta_info_script}" > /usr/local/bin/devcontainer-info - chmod +x /usr/local/bin/devcontainer-info -fi - -# Write marker file -mkdir -p "$(dirname "${MARKER_FILE}")" -echo -e "\ - PACKAGES_ALREADY_INSTALLED=${PACKAGES_ALREADY_INSTALLED}\n\ - LOCALE_ALREADY_SET=${LOCALE_ALREADY_SET}\n\ - EXISTING_NON_ROOT_USER=${EXISTING_NON_ROOT_USER}\n\ - RC_SNIPPET_ALREADY_ADDED=${RC_SNIPPET_ALREADY_ADDED}\n\ - ZSH_ALREADY_INSTALLED=${ZSH_ALREADY_INSTALLED}" > "${MARKER_FILE}" - -echo "Done!" diff --git a/solution/Deployment/arm/AppService_Func.bicep b/solution/Deployment/arm/AppService_Func.bicep deleted file mode 100644 index c72f2506..00000000 --- a/solution/Deployment/arm/AppService_Func.bicep +++ /dev/null @@ -1,28 +0,0 @@ -@description('Location for all resources.') -param location string = resourceGroup().location - -@description('') -param asp_name string = 'test' - -resource asp_name_resource 'Microsoft.Web/serverfarms@2020-06-01' = { - name: asp_name - location: location - kind: 'functionapp' - properties: { - perSiteScaling: false - maximumElasticWorkerCount: 1 - isSpot: false - reserved: false - isXenon: false - hyperV: false - targetWorkerCount: 0 - targetWorkerSizeId: 0 - } - sku: { - name: 'Y1' - tier: 'Dynamic' - size: 'Y1' - family: 'Y' - capacity: 0 - } -} \ No newline at end of file diff --git a/solution/Deployment/arm/AppService_Func.json b/solution/Deployment/arm/AppService_Func.json deleted file mode 100644 index 644fc988..00000000 --- a/solution/Deployment/arm/AppService_Func.json +++ /dev/null @@ -1,49 +0,0 @@ -{ - "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", - "contentVersion": "1.0.0.0", - "parameters": { - "location": { - "type": "string", - "defaultValue": "[resourceGroup().location]", - "metadata": { - "description": "Location for all resources." - } - }, - "asp_name": { - "type": "string", - "defaultValue": "test", - "metadata": { - "description": "" - } - } - }, - "resources": [ - { - "type": "Microsoft.Web/serverfarms", - "apiVersion": "2020-06-01", - "name": "[parameters('asp_name')]", - "location": "[parameters('location')]", - "kind": "functionapp", - "properties": { - "perSiteScaling": false, - "maximumElasticWorkerCount": 1, - "isSpot": false, - "reserved": false, - "isXenon": false, - "hyperV": false, - "targetWorkerCount": 0, - "targetWorkerSizeId": 0 - }, - "sku": { - "name": "Y1", - "tier": "Dynamic", - "size": "Y1", - "family": "Y", - "capacity": 0 - } - } - - ], - "outputs": { - } -} diff --git a/solution/Deployment/arm/AppService_Web.bicep b/solution/Deployment/arm/AppService_Web.bicep deleted file mode 100644 index e3db98c0..00000000 --- a/solution/Deployment/arm/AppService_Web.bicep +++ /dev/null @@ -1,27 +0,0 @@ -@description('Location for all resources.') -param location string = resourceGroup().location - -@description('') -param asp_name string = 'test' - -resource asp_name_resource 'Microsoft.Web/serverfarms@2018-02-01' = { - name: asp_name - location: location - properties: { - perSiteScaling: false - maximumElasticWorkerCount: 1 - isSpot: false - reserved: false - isXenon: false - hyperV: false - targetWorkerCount: 0 - targetWorkerSizeId: 0 - } - sku: { - name: 'S1' - tier: 'Standard' - size: 'S1' - family: 'S' - capacity: 1 - } -} \ No newline at end of file diff --git a/solution/Deployment/arm/AppService_Web.json b/solution/Deployment/arm/AppService_Web.json deleted file mode 100644 index f1ca3646..00000000 --- a/solution/Deployment/arm/AppService_Web.json +++ /dev/null @@ -1,48 +0,0 @@ -{ - "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", - "contentVersion": "1.0.0.0", - "parameters": { - "location": { - "type": "string", - "defaultValue": "[resourceGroup().location]", - "metadata": { - "description": "Location for all resources." - } - }, - "asp_name": { - "type": "string", - "defaultValue": "test", - "metadata": { - "description": "" - } - } - }, - "resources": [ - { - "type": "Microsoft.Web/serverfarms", - "apiVersion": "2018-02-01", - "name": "[parameters('asp_name')]", - "location": "[parameters('location')]", - "properties": { - "perSiteScaling": false, - "maximumElasticWorkerCount": 1, - "isSpot": false, - "reserved": false, - "isXenon": false, - "hyperV": false, - "targetWorkerCount": 0, - "targetWorkerSizeId": 0 - }, - "sku": { - "name": "S1", - "tier": "Standard", - "size": "S1", - "family": "S", - "capacity": 1 - } - } - - ], - "outputs": { - } -} diff --git a/solution/Deployment/arm/ApplicationInsights.bicep b/solution/Deployment/arm/ApplicationInsights.bicep deleted file mode 100644 index 7f7667de..00000000 --- a/solution/Deployment/arm/ApplicationInsights.bicep +++ /dev/null @@ -1,288 +0,0 @@ -param appinsights_name string = 'appinsights-adsgofastyckrmqteklajm' -param location string = 'australiaeast' - -resource appinsights_name_resource 'microsoft.insights/components@2020-02-02-preview' = { - name: appinsights_name - location: location - tags: {} - kind: 'web' - properties: { - Application_Type: 'web' - IngestionMode: 'ApplicationInsights' - publicNetworkAccessForIngestion: 'Enabled' - publicNetworkAccessForQuery: 'Enabled' - } -} - -resource appinsights_name_degradationindependencyduration 'microsoft.insights/components/ProactiveDetectionConfigs@2018-05-01-preview' = { - parent: appinsights_name_resource - name: 'degradationindependencyduration' - location: location - properties: { - RuleDefinitions: { - Name: 'degradationindependencyduration' - DisplayName: 'Degradation in dependency duration' - Description: 'Smart Detection rules notify you of performance anomaly issues.' - HelpUrl: 'https://docs.microsoft.com/en-us/azure/application-insights/app-insights-proactive-performance-diagnostics' - IsHidden: false - IsEnabledByDefault: true - IsInPreview: false - SupportsEmailNotifications: true - } - Enabled: true - SendEmailsToSubscriptionOwners: true - CustomEmails: [] - } -} - -resource appinsights_name_degradationinserverresponsetime 'microsoft.insights/components/ProactiveDetectionConfigs@2018-05-01-preview' = { - parent: appinsights_name_resource - name: 'degradationinserverresponsetime' - location: location - properties: { - RuleDefinitions: { - Name: 'degradationinserverresponsetime' - DisplayName: 'Degradation in server response time' - Description: 'Smart Detection rules notify you of performance anomaly issues.' - HelpUrl: 'https://docs.microsoft.com/en-us/azure/application-insights/app-insights-proactive-performance-diagnostics' - IsHidden: false - IsEnabledByDefault: true - IsInPreview: false - SupportsEmailNotifications: true - } - Enabled: true - SendEmailsToSubscriptionOwners: true - CustomEmails: [] - } -} - -resource appinsights_name_digestMailConfiguration 'microsoft.insights/components/ProactiveDetectionConfigs@2018-05-01-preview' = { - parent: appinsights_name_resource - name: 'digestMailConfiguration' - location: location - properties: { - RuleDefinitions: { - Name: 'digestMailConfiguration' - DisplayName: 'Digest Mail Configuration' - Description: 'This rule describes the digest mail preferences' - HelpUrl: 'www.homail.com' - IsHidden: true - IsEnabledByDefault: true - IsInPreview: false - SupportsEmailNotifications: true - } - Enabled: true - SendEmailsToSubscriptionOwners: true - CustomEmails: [] - } -} - -resource appinsights_name_extension_billingdatavolumedailyspikeextension 'microsoft.insights/components/ProactiveDetectionConfigs@2018-05-01-preview' = { - parent: appinsights_name_resource - name: 'extension_billingdatavolumedailyspikeextension' - location: location - properties: { - RuleDefinitions: { - Name: 'extension_billingdatavolumedailyspikeextension' - DisplayName: 'Abnormal rise in daily data volume (preview)' - Description: 'This detection rule automatically analyzes the billing data generated by your application, and can warn you about an unusual increase in your application\'s billing costs' - HelpUrl: 'https://github.com/Microsoft/ApplicationInsights-Home/tree/master/SmartDetection/billing-data-volume-daily-spike.md' - IsHidden: false - IsEnabledByDefault: true - IsInPreview: true - SupportsEmailNotifications: false - } - Enabled: true - SendEmailsToSubscriptionOwners: true - CustomEmails: [] - } -} - -resource appinsights_name_extension_canaryextension 'microsoft.insights/components/ProactiveDetectionConfigs@2018-05-01-preview' = { - parent: appinsights_name_resource - name: 'extension_canaryextension' - location: location - properties: { - RuleDefinitions: { - Name: 'extension_canaryextension' - DisplayName: 'Canary extension' - Description: 'Canary extension' - HelpUrl: 'https://github.com/Microsoft/ApplicationInsights-Home/blob/master/SmartDetection/' - IsHidden: true - IsEnabledByDefault: true - IsInPreview: true - SupportsEmailNotifications: false - } - Enabled: true - SendEmailsToSubscriptionOwners: true - CustomEmails: [] - } -} - -resource appinsights_name_extension_exceptionchangeextension 'microsoft.insights/components/ProactiveDetectionConfigs@2018-05-01-preview' = { - parent: appinsights_name_resource - name: 'extension_exceptionchangeextension' - location: location - properties: { - RuleDefinitions: { - Name: 'extension_exceptionchangeextension' - DisplayName: 'Abnormal rise in exception volume (preview)' - Description: 'This detection rule automatically analyzes the exceptions thrown in your application, and can warn you about unusual patterns in your exception telemetry.' - HelpUrl: 'https://github.com/Microsoft/ApplicationInsights-Home/blob/master/SmartDetection/abnormal-rise-in-exception-volume.md' - IsHidden: false - IsEnabledByDefault: true - IsInPreview: true - SupportsEmailNotifications: false - } - Enabled: true - SendEmailsToSubscriptionOwners: true - CustomEmails: [] - } -} - -resource appinsights_name_extension_memoryleakextension 'microsoft.insights/components/ProactiveDetectionConfigs@2018-05-01-preview' = { - parent: appinsights_name_resource - name: 'extension_memoryleakextension' - location: location - properties: { - RuleDefinitions: { - Name: 'extension_memoryleakextension' - DisplayName: 'Potential memory leak detected (preview)' - Description: 'This detection rule automatically analyzes the memory consumption of each process in your application, and can warn you about potential memory leaks or increased memory consumption.' - HelpUrl: 'https://github.com/Microsoft/ApplicationInsights-Home/tree/master/SmartDetection/memory-leak.md' - IsHidden: false - IsEnabledByDefault: true - IsInPreview: true - SupportsEmailNotifications: false - } - Enabled: true - SendEmailsToSubscriptionOwners: true - CustomEmails: [] - } -} - -resource appinsights_name_extension_securityextensionspackage 'microsoft.insights/components/ProactiveDetectionConfigs@2018-05-01-preview' = { - parent: appinsights_name_resource - name: 'extension_securityextensionspackage' - location: location - properties: { - RuleDefinitions: { - Name: 'extension_securityextensionspackage' - DisplayName: 'Potential security issue detected (preview)' - Description: 'This detection rule automatically analyzes the telemetry generated by your application and detects potential security issues.' - HelpUrl: 'https://github.com/Microsoft/ApplicationInsights-Home/blob/master/SmartDetection/application-security-detection-pack.md' - IsHidden: false - IsEnabledByDefault: true - IsInPreview: true - SupportsEmailNotifications: false - } - Enabled: true - SendEmailsToSubscriptionOwners: true - CustomEmails: [] - } -} - -resource appinsights_name_extension_traceseveritydetector 'microsoft.insights/components/ProactiveDetectionConfigs@2018-05-01-preview' = { - parent: appinsights_name_resource - name: 'extension_traceseveritydetector' - location: location - properties: { - RuleDefinitions: { - Name: 'extension_traceseveritydetector' - DisplayName: 'Degradation in trace severity ratio (preview)' - Description: 'This detection rule automatically analyzes the trace logs emitted from your application, and can warn you about unusual patterns in the severity of your trace telemetry.' - HelpUrl: 'https://github.com/Microsoft/ApplicationInsights-Home/blob/master/SmartDetection/degradation-in-trace-severity-ratio.md' - IsHidden: false - IsEnabledByDefault: true - IsInPreview: true - SupportsEmailNotifications: false - } - Enabled: true - SendEmailsToSubscriptionOwners: true - CustomEmails: [] - } -} - -resource appinsights_name_longdependencyduration 'microsoft.insights/components/ProactiveDetectionConfigs@2018-05-01-preview' = { - parent: appinsights_name_resource - name: 'longdependencyduration' - location: location - properties: { - RuleDefinitions: { - Name: 'longdependencyduration' - DisplayName: 'Long dependency duration' - Description: 'Smart Detection rules notify you of performance anomaly issues.' - HelpUrl: 'https://docs.microsoft.com/en-us/azure/application-insights/app-insights-proactive-performance-diagnostics' - IsHidden: false - IsEnabledByDefault: true - IsInPreview: false - SupportsEmailNotifications: true - } - Enabled: true - SendEmailsToSubscriptionOwners: true - CustomEmails: [] - } -} - -resource appinsights_name_migrationToAlertRulesCompleted 'microsoft.insights/components/ProactiveDetectionConfigs@2018-05-01-preview' = { - parent: appinsights_name_resource - name: 'migrationToAlertRulesCompleted' - location: location - properties: { - RuleDefinitions: { - Name: 'migrationToAlertRulesCompleted' - DisplayName: 'Migration To Alert Rules Completed' - Description: 'A configuration that controls the migration state of Smart Detection to Smart Alerts' - HelpUrl: 'https://docs.microsoft.com/en-us/azure/application-insights/app-insights-proactive-performance-diagnostics' - IsHidden: true - IsEnabledByDefault: false - IsInPreview: true - SupportsEmailNotifications: false - } - Enabled: false - SendEmailsToSubscriptionOwners: true - CustomEmails: [] - } -} - -resource appinsights_name_slowpageloadtime 'microsoft.insights/components/ProactiveDetectionConfigs@2018-05-01-preview' = { - parent: appinsights_name_resource - name: 'slowpageloadtime' - location: location - properties: { - RuleDefinitions: { - Name: 'slowpageloadtime' - DisplayName: 'Slow page load time' - Description: 'Smart Detection rules notify you of performance anomaly issues.' - HelpUrl: 'https://docs.microsoft.com/en-us/azure/application-insights/app-insights-proactive-performance-diagnostics' - IsHidden: false - IsEnabledByDefault: true - IsInPreview: false - SupportsEmailNotifications: true - } - Enabled: true - SendEmailsToSubscriptionOwners: true - CustomEmails: [] - } -} - -resource appinsights_name_slowserverresponsetime 'microsoft.insights/components/ProactiveDetectionConfigs@2018-05-01-preview' = { - parent: appinsights_name_resource - name: 'slowserverresponsetime' - location: location - properties: { - RuleDefinitions: { - Name: 'slowserverresponsetime' - DisplayName: 'Slow server response time' - Description: 'Smart Detection rules notify you of performance anomaly issues.' - HelpUrl: 'https://docs.microsoft.com/en-us/azure/application-insights/app-insights-proactive-performance-diagnostics' - IsHidden: false - IsEnabledByDefault: true - IsInPreview: false - SupportsEmailNotifications: true - } - Enabled: true - SendEmailsToSubscriptionOwners: true - CustomEmails: [] - } -} \ No newline at end of file diff --git a/solution/Deployment/arm/ApplicationInsights.json b/solution/Deployment/arm/ApplicationInsights.json deleted file mode 100644 index d86d2c12..00000000 --- a/solution/Deployment/arm/ApplicationInsights.json +++ /dev/null @@ -1,345 +0,0 @@ -{ - "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", - "contentVersion": "1.0.0.0", - "parameters": { - "appinsights_name": { - "defaultValue": "appinsights-adsgofastyckrmqteklajm", - "type": "String" - }, - "location": { - "defaultValue": "australiaeast", - "type": "String" - } - }, - "variables": {}, - "resources": [ - { - "type": "microsoft.insights/components", - "apiVersion": "2020-02-02-preview", - "name": "[parameters('appinsights_name')]", - "location": "[parameters('location')]", - "tags": { - - }, - "kind": "web", - "properties": { - "Application_Type": "web", - "IngestionMode": "ApplicationInsights", - "publicNetworkAccessForIngestion": "Enabled", - "publicNetworkAccessForQuery": "Enabled" - } - }, - { - "type": "microsoft.insights/components/ProactiveDetectionConfigs", - "apiVersion": "2018-05-01-preview", - "name": "[concat(parameters('appinsights_name'), '/degradationindependencyduration')]", - "location": "[parameters('location')]", - "dependsOn": [ - "[resourceId('microsoft.insights/components', parameters('appinsights_name'))]" - ], - "properties": { - "ruleDefinitions": { - "Name": "degradationindependencyduration", - "DisplayName": "Degradation in dependency duration", - "Description": "Smart Detection rules notify you of performance anomaly issues.", - "HelpUrl": "https://docs.microsoft.com/en-us/azure/application-insights/app-insights-proactive-performance-diagnostics", - "IsHidden": false, - "IsEnabledByDefault": true, - "IsInPreview": false, - "SupportsEmailNotifications": true - }, - "enabled": true, - "sendEmailsToSubscriptionOwners": true, - "customEmails": [] - } - }, - { - "type": "microsoft.insights/components/ProactiveDetectionConfigs", - "apiVersion": "2018-05-01-preview", - "name": "[concat(parameters('appinsights_name'), '/degradationinserverresponsetime')]", - "location": "[parameters('location')]", - "dependsOn": [ - "[resourceId('microsoft.insights/components', parameters('appinsights_name'))]" - ], - "properties": { - "ruleDefinitions": { - "Name": "degradationinserverresponsetime", - "DisplayName": "Degradation in server response time", - "Description": "Smart Detection rules notify you of performance anomaly issues.", - "HelpUrl": "https://docs.microsoft.com/en-us/azure/application-insights/app-insights-proactive-performance-diagnostics", - "IsHidden": false, - "IsEnabledByDefault": true, - "IsInPreview": false, - "SupportsEmailNotifications": true - }, - "enabled": true, - "sendEmailsToSubscriptionOwners": true, - "customEmails": [] - } - }, - { - "type": "microsoft.insights/components/ProactiveDetectionConfigs", - "apiVersion": "2018-05-01-preview", - "name": "[concat(parameters('appinsights_name'), '/digestMailConfiguration')]", - "location": "[parameters('location')]", - "dependsOn": [ - "[resourceId('microsoft.insights/components', parameters('appinsights_name'))]" - ], - "properties": { - "ruleDefinitions": { - "Name": "digestMailConfiguration", - "DisplayName": "Digest Mail Configuration", - "Description": "This rule describes the digest mail preferences", - "HelpUrl": "www.homail.com", - "IsHidden": true, - "IsEnabledByDefault": true, - "IsInPreview": false, - "SupportsEmailNotifications": true - }, - "enabled": true, - "sendEmailsToSubscriptionOwners": true, - "customEmails": [] - } - }, - { - "type": "microsoft.insights/components/ProactiveDetectionConfigs", - "apiVersion": "2018-05-01-preview", - "name": "[concat(parameters('appinsights_name'), '/extension_billingdatavolumedailyspikeextension')]", - "location": "[parameters('location')]", - "dependsOn": [ - "[resourceId('microsoft.insights/components', parameters('appinsights_name'))]" - ], - "properties": { - "ruleDefinitions": { - "Name": "extension_billingdatavolumedailyspikeextension", - "DisplayName": "Abnormal rise in daily data volume (preview)", - "Description": "This detection rule automatically analyzes the billing data generated by your application, and can warn you about an unusual increase in your application's billing costs", - "HelpUrl": "https://github.com/Microsoft/ApplicationInsights-Home/tree/master/SmartDetection/billing-data-volume-daily-spike.md", - "IsHidden": false, - "IsEnabledByDefault": true, - "IsInPreview": true, - "SupportsEmailNotifications": false - }, - "enabled": true, - "sendEmailsToSubscriptionOwners": true, - "customEmails": [] - } - }, - { - "type": "microsoft.insights/components/ProactiveDetectionConfigs", - "apiVersion": "2018-05-01-preview", - "name": "[concat(parameters('appinsights_name'), '/extension_canaryextension')]", - "location": "[parameters('location')]", - "dependsOn": [ - "[resourceId('microsoft.insights/components', parameters('appinsights_name'))]" - ], - "properties": { - "ruleDefinitions": { - "Name": "extension_canaryextension", - "DisplayName": "Canary extension", - "Description": "Canary extension", - "HelpUrl": "https://github.com/Microsoft/ApplicationInsights-Home/blob/master/SmartDetection/", - "IsHidden": true, - "IsEnabledByDefault": true, - "IsInPreview": true, - "SupportsEmailNotifications": false - }, - "enabled": true, - "sendEmailsToSubscriptionOwners": true, - "customEmails": [] - } - }, - { - "type": "microsoft.insights/components/ProactiveDetectionConfigs", - "apiVersion": "2018-05-01-preview", - "name": "[concat(parameters('appinsights_name'), '/extension_exceptionchangeextension')]", - "location": "[parameters('location')]", - "dependsOn": [ - "[resourceId('microsoft.insights/components', parameters('appinsights_name'))]" - ], - "properties": { - "ruleDefinitions": { - "Name": "extension_exceptionchangeextension", - "DisplayName": "Abnormal rise in exception volume (preview)", - "Description": "This detection rule automatically analyzes the exceptions thrown in your application, and can warn you about unusual patterns in your exception telemetry.", - "HelpUrl": "https://github.com/Microsoft/ApplicationInsights-Home/blob/master/SmartDetection/abnormal-rise-in-exception-volume.md", - "IsHidden": false, - "IsEnabledByDefault": true, - "IsInPreview": true, - "SupportsEmailNotifications": false - }, - "enabled": true, - "sendEmailsToSubscriptionOwners": true, - "customEmails": [] - } - }, - { - "type": "microsoft.insights/components/ProactiveDetectionConfigs", - "apiVersion": "2018-05-01-preview", - "name": "[concat(parameters('appinsights_name'), '/extension_memoryleakextension')]", - "location": "[parameters('location')]", - "dependsOn": [ - "[resourceId('microsoft.insights/components', parameters('appinsights_name'))]" - ], - "properties": { - "ruleDefinitions": { - "Name": "extension_memoryleakextension", - "DisplayName": "Potential memory leak detected (preview)", - "Description": "This detection rule automatically analyzes the memory consumption of each process in your application, and can warn you about potential memory leaks or increased memory consumption.", - "HelpUrl": "https://github.com/Microsoft/ApplicationInsights-Home/tree/master/SmartDetection/memory-leak.md", - "IsHidden": false, - "IsEnabledByDefault": true, - "IsInPreview": true, - "SupportsEmailNotifications": false - }, - "enabled": true, - "sendEmailsToSubscriptionOwners": true, - "customEmails": [] - } - }, - { - "type": "microsoft.insights/components/ProactiveDetectionConfigs", - "apiVersion": "2018-05-01-preview", - "name": "[concat(parameters('appinsights_name'), '/extension_securityextensionspackage')]", - "location": "[parameters('location')]", - "dependsOn": [ - "[resourceId('microsoft.insights/components', parameters('appinsights_name'))]" - ], - "properties": { - "ruleDefinitions": { - "Name": "extension_securityextensionspackage", - "DisplayName": "Potential security issue detected (preview)", - "Description": "This detection rule automatically analyzes the telemetry generated by your application and detects potential security issues.", - "HelpUrl": "https://github.com/Microsoft/ApplicationInsights-Home/blob/master/SmartDetection/application-security-detection-pack.md", - "IsHidden": false, - "IsEnabledByDefault": true, - "IsInPreview": true, - "SupportsEmailNotifications": false - }, - "enabled": true, - "sendEmailsToSubscriptionOwners": true, - "customEmails": [] - } - }, - { - "type": "microsoft.insights/components/ProactiveDetectionConfigs", - "apiVersion": "2018-05-01-preview", - "name": "[concat(parameters('appinsights_name'), '/extension_traceseveritydetector')]", - "location": "[parameters('location')]", - "dependsOn": [ - "[resourceId('microsoft.insights/components', parameters('appinsights_name'))]" - ], - "properties": { - "ruleDefinitions": { - "Name": "extension_traceseveritydetector", - "DisplayName": "Degradation in trace severity ratio (preview)", - "Description": "This detection rule automatically analyzes the trace logs emitted from your application, and can warn you about unusual patterns in the severity of your trace telemetry.", - "HelpUrl": "https://github.com/Microsoft/ApplicationInsights-Home/blob/master/SmartDetection/degradation-in-trace-severity-ratio.md", - "IsHidden": false, - "IsEnabledByDefault": true, - "IsInPreview": true, - "SupportsEmailNotifications": false - }, - "enabled": true, - "sendEmailsToSubscriptionOwners": true, - "customEmails": [] - } - }, - { - "type": "microsoft.insights/components/ProactiveDetectionConfigs", - "apiVersion": "2018-05-01-preview", - "name": "[concat(parameters('appinsights_name'), '/longdependencyduration')]", - "location": "[parameters('location')]", - "dependsOn": [ - "[resourceId('microsoft.insights/components', parameters('appinsights_name'))]" - ], - "properties": { - "ruleDefinitions": { - "Name": "longdependencyduration", - "DisplayName": "Long dependency duration", - "Description": "Smart Detection rules notify you of performance anomaly issues.", - "HelpUrl": "https://docs.microsoft.com/en-us/azure/application-insights/app-insights-proactive-performance-diagnostics", - "IsHidden": false, - "IsEnabledByDefault": true, - "IsInPreview": false, - "SupportsEmailNotifications": true - }, - "enabled": true, - "sendEmailsToSubscriptionOwners": true, - "customEmails": [] - } - }, - { - "type": "microsoft.insights/components/ProactiveDetectionConfigs", - "apiVersion": "2018-05-01-preview", - "name": "[concat(parameters('appinsights_name'), '/migrationToAlertRulesCompleted')]", - "location": "[parameters('location')]", - "dependsOn": [ - "[resourceId('microsoft.insights/components', parameters('appinsights_name'))]" - ], - "properties": { - "ruleDefinitions": { - "Name": "migrationToAlertRulesCompleted", - "DisplayName": "Migration To Alert Rules Completed", - "Description": "A configuration that controls the migration state of Smart Detection to Smart Alerts", - "HelpUrl": "https://docs.microsoft.com/en-us/azure/application-insights/app-insights-proactive-performance-diagnostics", - "IsHidden": true, - "IsEnabledByDefault": false, - "IsInPreview": true, - "SupportsEmailNotifications": false - }, - "enabled": false, - "sendEmailsToSubscriptionOwners": true, - "customEmails": [] - } - }, - { - "type": "microsoft.insights/components/ProactiveDetectionConfigs", - "apiVersion": "2018-05-01-preview", - "name": "[concat(parameters('appinsights_name'), '/slowpageloadtime')]", - "location": "[parameters('location')]", - "dependsOn": [ - "[resourceId('microsoft.insights/components', parameters('appinsights_name'))]" - ], - "properties": { - "ruleDefinitions": { - "Name": "slowpageloadtime", - "DisplayName": "Slow page load time", - "Description": "Smart Detection rules notify you of performance anomaly issues.", - "HelpUrl": "https://docs.microsoft.com/en-us/azure/application-insights/app-insights-proactive-performance-diagnostics", - "IsHidden": false, - "IsEnabledByDefault": true, - "IsInPreview": false, - "SupportsEmailNotifications": true - }, - "enabled": true, - "sendEmailsToSubscriptionOwners": true, - "customEmails": [] - } - }, - { - "type": "microsoft.insights/components/ProactiveDetectionConfigs", - "apiVersion": "2018-05-01-preview", - "name": "[concat(parameters('appinsights_name'), '/slowserverresponsetime')]", - "location": "[parameters('location')]", - "dependsOn": [ - "[resourceId('microsoft.insights/components', parameters('appinsights_name'))]" - ], - "properties": { - "ruleDefinitions": { - "Name": "slowserverresponsetime", - "DisplayName": "Slow server response time", - "Description": "Smart Detection rules notify you of performance anomaly issues.", - "HelpUrl": "https://docs.microsoft.com/en-us/azure/application-insights/app-insights-proactive-performance-diagnostics", - "IsHidden": false, - "IsEnabledByDefault": true, - "IsInPreview": false, - "SupportsEmailNotifications": true - }, - "enabled": true, - "sendEmailsToSubscriptionOwners": true, - "customEmails": [] - } - } - ] -} \ No newline at end of file diff --git a/solution/Deployment/arm/AzureSQLServer.json b/solution/Deployment/arm/AzureSQLServer.json deleted file mode 100644 index dfb72c8e..00000000 --- a/solution/Deployment/arm/AzureSQLServer.json +++ /dev/null @@ -1,172 +0,0 @@ -{ - "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", - "contentVersion": "1.0.0.0", - "metadata": { - "_generator": { - "name": "bicep", - "version": "0.4.613.9944", - "templateHash": "6917878293454276384" - } - }, - "parameters": { - "location": { - "type": "string", - "defaultValue": "[resourceGroup().location]", - "metadata": { - "description": "Location for all resources." - } - }, - "sql_server_name": { - "type": "string", - "defaultValue": "[format('adsgofast-srv-{0}', uniqueString(resourceGroup().id))]", - "metadata": { - "description": "Azure SQL Server Name (Logical Server)." - } - }, - "sql_admin_login": { - "type": "string", - "defaultValue": "adsgofastadmin", - "metadata": { - "description": "The administrator username of the SQL logical server" - } - }, - "sql_admin_password": { - "type": "secureString", - "metadata": { - "description": "The administrator password of the SQL logical server." - } - }, - "sample_db_name": { - "type": "string", - "defaultValue": "AdventureWorksLT" - }, - "ads_go_fast_db_name": { - "type": "string", - "defaultValue": "adsgofast" - }, - "staging_db_name": { - "type": "string", - "defaultValue": "staging" - }, - "vnet_name": { - "type": "string", - "defaultValue": "adsgofast-vnet", - "metadata": { - "description": "Name of Azure Bastion resource" - } - } - }, - "functions": [], - "variables": { - "data_subnet_name": "Data", - "sample_database_name_var": "[format('{0}/{1}', parameters('sql_server_name'), parameters('sample_db_name'))]", - "ads_go_fast_database_name_var": "[format('{0}/{1}', parameters('sql_server_name'), parameters('ads_go_fast_db_name'))]", - "staging_database_name_var": "[format('{0}/{1}', parameters('sql_server_name'), parameters('staging_db_name'))]", - "vnet_data_subnet_resource_id": "[resourceId('Microsoft.Network/virtualNetworks/subnets', parameters('vnet_name'), variables('data_subnet_name'))]" - }, - "resources": [ - { - "type": "Microsoft.Sql/servers", - "apiVersion": "2019-06-01-preview", - "name": "[parameters('sql_server_name')]", - "location": "[parameters('location')]", - "tags": { - "displayName": "[parameters('sql_server_name')]" - }, - "properties": { - "administratorLogin": "[parameters('sql_admin_login')]", - "administratorLoginPassword": "[parameters('sql_admin_password')]", - "version": "12.0", - "publicNetworkAccess": "Enabled" - } - }, - { - "type": "Microsoft.Sql/servers/virtualNetworkRules", - "apiVersion": "2015-05-01-preview", - "name": "[format('{0}/{1}', parameters('sql_server_name'), variables('data_subnet_name'))]", - "properties": { - "virtualNetworkSubnetId": "[variables('vnet_data_subnet_resource_id')]", - "ignoreMissingVnetServiceEndpoint": false - }, - "dependsOn": [ - "[resourceId('Microsoft.Sql/servers', parameters('sql_server_name'))]" - ] - }, - { - "type": "Microsoft.Sql/servers/databases", - "apiVersion": "2019-06-01-preview", - "name": "[variables('sample_database_name_var')]", - "location": "[parameters('location')]", - "tags": { - "displayName": "[variables('sample_database_name_var')]" - }, - "sku": { - "name": "Standard", - "tier": "Standard", - "capacity": 50 - }, - "properties": { - "collation": "SQL_Latin1_General_CP1_CI_AS", - "maxSizeBytes": 32212254720, - "catalogCollation": "SQL_Latin1_General_CP1_CI_AS", - "zoneRedundant": false, - "readScale": "Disabled", - "storageAccountType": "GRS", - "sampleName": "AdventureWorksLT" - }, - "dependsOn": [ - "[resourceId('Microsoft.Sql/servers', parameters('sql_server_name'))]" - ] - }, - { - "type": "Microsoft.Sql/servers/databases", - "apiVersion": "2019-06-01-preview", - "name": "[variables('ads_go_fast_database_name_var')]", - "location": "[parameters('location')]", - "tags": { - "displayName": "[variables('ads_go_fast_database_name_var')]" - }, - "sku": { - "name": "Standard", - "tier": "Standard", - "capacity": 50 - }, - "properties": { - "collation": "SQL_Latin1_General_CP1_CI_AS", - "maxSizeBytes": 32212254720, - "catalogCollation": "SQL_Latin1_General_CP1_CI_AS", - "zoneRedundant": false, - "readScale": "Disabled", - "storageAccountType": "GRS" - }, - "dependsOn": [ - "[resourceId('Microsoft.Sql/servers', parameters('sql_server_name'))]" - ] - }, - { - "type": "Microsoft.Sql/servers/databases", - "apiVersion": "2019-06-01-preview", - "name": "[variables('staging_database_name_var')]", - "location": "[parameters('location')]", - "tags": { - "displayName": "[variables('staging_database_name_var')]" - }, - "sku": { - "name": "Standard", - "tier": "Standard", - "capacity": 100 - }, - "properties": { - "collation": "SQL_Latin1_General_CP1_CI_AS", - "maxSizeBytes": 32212254720, - "catalogCollation": "SQL_Latin1_General_CP1_CI_AS", - "zoneRedundant": false, - "readScale": "Disabled", - "storageAccountType": "GRS" - }, - "dependsOn": [ - "[resourceId('Microsoft.Sql/servers', parameters('sql_server_name'))]" - ] - } - ] -} \ No newline at end of file diff --git a/solution/Deployment/arm/AzureSqlServer.bicep b/solution/Deployment/arm/AzureSqlServer.bicep deleted file mode 100644 index 0d745cbf..00000000 --- a/solution/Deployment/arm/AzureSqlServer.bicep +++ /dev/null @@ -1,120 +0,0 @@ -@description('Location for all resources.') -param location string = resourceGroup().location - -@description('Azure SQL Server Name (Logical Server).') -param sql_server_name string = 'adsgofast-srv-${uniqueString(resourceGroup().id)}' - -@description('The administrator username of the SQL logical server') -param sql_admin_login string = 'adsgofastadmin' - -@description('The administrator password of the SQL logical server.') -@secure() -param sql_admin_password string -param sample_db_name string = 'AdventureWorksLT' -param ads_go_fast_db_name string = 'adsgofast' -param staging_db_name string = 'staging' - -@description('Name of Azure Bastion resource') -param vnet_name string = 'adsgofast-vnet' - -var data_subnet_name = 'Data' -var sample_database_name_var = '${sql_server_name}/${sample_db_name}' -var ads_go_fast_database_name_var = '${sql_server_name}/${ads_go_fast_db_name}' -var staging_database_name_var = '${sql_server_name}/${staging_db_name}' -var vnet_data_subnet_resource_id = resourceId('Microsoft.Network/virtualNetworks/subnets', vnet_name, data_subnet_name) - -resource sql_server_name_resource 'Microsoft.Sql/servers@2019-06-01-preview' = { - name: sql_server_name - location: location - tags: { - displayName: sql_server_name - } - properties: { - administratorLogin: sql_admin_login - administratorLoginPassword: sql_admin_password - version: '12.0' - publicNetworkAccess: 'Enabled' - } -} - -resource sql_server_name_data_subnet_name 'Microsoft.Sql/servers/virtualNetworkRules@2015-05-01-preview' = { - parent: sql_server_name_resource - name: '${data_subnet_name}' - properties: { - virtualNetworkSubnetId: vnet_data_subnet_resource_id - ignoreMissingVnetServiceEndpoint: false - } -} - -resource sample_database_name 'Microsoft.Sql/servers/databases@2019-06-01-preview' = { - name: sample_database_name_var - location: location - tags: { - displayName: sample_database_name_var - } - sku: { - name: 'Standard' - tier: 'Standard' - capacity: 50 - } - properties: { - collation: 'SQL_Latin1_General_CP1_CI_AS' - maxSizeBytes: 32212254720 - catalogCollation: 'SQL_Latin1_General_CP1_CI_AS' - zoneRedundant: false - readScale: 'Disabled' - storageAccountType: 'GRS' - sampleName: 'AdventureWorksLT' - } - dependsOn: [ - sql_server_name_resource - ] -} - -resource ads_go_fast_database_name 'Microsoft.Sql/servers/databases@2019-06-01-preview' = { - name: ads_go_fast_database_name_var - location: location - tags: { - displayName: ads_go_fast_database_name_var - } - sku: { - name: 'Standard' - tier: 'Standard' - capacity: 50 - } - properties: { - collation: 'SQL_Latin1_General_CP1_CI_AS' - maxSizeBytes: 32212254720 - catalogCollation: 'SQL_Latin1_General_CP1_CI_AS' - zoneRedundant: false - readScale: 'Disabled' - storageAccountType: 'GRS' - } - dependsOn: [ - sql_server_name_resource - ] -} - -resource staging_database_name 'Microsoft.Sql/servers/databases@2019-06-01-preview' = { - name: staging_database_name_var - location: location - tags: { - displayName: staging_database_name_var - } - sku: { - name: 'Standard' - tier: 'Standard' - capacity: 100 - } - properties: { - collation: 'SQL_Latin1_General_CP1_CI_AS' - maxSizeBytes: 32212254720 - catalogCollation: 'SQL_Latin1_General_CP1_CI_AS' - zoneRedundant: false - readScale: 'Disabled' - storageAccountType: 'GRS' - } - dependsOn: [ - sql_server_name_resource - ] -} diff --git a/solution/Deployment/arm/DataFactory.bicep b/solution/Deployment/arm/DataFactory.bicep deleted file mode 100644 index 966a275e..00000000 --- a/solution/Deployment/arm/DataFactory.bicep +++ /dev/null @@ -1,14 +0,0 @@ -@description('Location for all resources.') -param location string = resourceGroup().location - -@description('') -param adf_name string = 'test' - -resource adf_name_resource 'Microsoft.DataFactory/factories@2018-06-01' = { - name: adf_name - location: location - identity: { - type: 'SystemAssigned' - } - properties: {} -} \ No newline at end of file diff --git a/solution/Deployment/arm/DataFactory.json b/solution/Deployment/arm/DataFactory.json deleted file mode 100644 index 13a39567..00000000 --- a/solution/Deployment/arm/DataFactory.json +++ /dev/null @@ -1,31 +0,0 @@ -{ - "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", - "contentVersion": "1.0.0.0", - "parameters": { - "location": { - "type": "string", - "defaultValue": "[resourceGroup().location]", - "metadata": { - "description": "Location for all resources." - } - }, - "adf-name": { - "type": "string", - "defaultValue": "test", - "metadata": { - "description": "" - } - } - }, - "resources": [{ - "apiVersion": "2018-06-01", - "name": "[parameters('adf-name')]", - "location": "[parameters('location')]", - "type": "Microsoft.DataFactory/factories", - "identity": { - "type": "SystemAssigned" - }, - "properties": {} - }], - "outputs":{} -} \ No newline at end of file diff --git a/solution/Deployment/arm/FunctionApp.bicep b/solution/Deployment/arm/FunctionApp.bicep deleted file mode 100644 index 20aefce7..00000000 --- a/solution/Deployment/arm/FunctionApp.bicep +++ /dev/null @@ -1,48 +0,0 @@ -@description('Location for all resources.') -param location string = resourceGroup().location - -@description('The name of you Web Site.') -param azure_function_site_name string = 'FuncApp-${uniqueString(resourceGroup().id)}' - -@description('The name of Azure Application Insights.') -param app_insights_name string = 'appinsights-adsgofast' - -@description('The name of storage account used for logging') -param storage_log_account_name string = 'adsgofastlog' - -@description('A key to the storage account') -param storage_log_account_key string = '' - -@description('') -param appservice_name string = '' - -resource azure_function_site_name_resource 'Microsoft.Web/sites@2020-06-01' = { - name: azure_function_site_name - kind: 'functionapp' - location: location - properties: { - name: azure_function_site_name - siteConfig: { - appSettings: [ - { - name: 'FUNCTIONS_WORKER_RUNTIME' - value: 'dotnet' - } - { - name: 'FUNCTIONS_EXTENSION_VERSION' - value: '~3' - } - { - name: 'AzureWebJobsStorage' - value: 'DefaultEndpointsProtocol=https;AccountName=${storage_log_account_name};AccountKey=${storage_log_account_key}' - } - { - name: 'APPINSIGHTS_INSTRUMENTATIONKEY' - value: reference(resourceId('microsoft.insights/components/', app_insights_name), '2015-05-01').InstrumentationKey - } - ] - } - serverFarmId: resourceId('Microsoft.Web/serverfarms', appservice_name) - clientAffinityEnabled: false - } -} \ No newline at end of file diff --git a/solution/Deployment/arm/FunctionApp.json b/solution/Deployment/arm/FunctionApp.json deleted file mode 100644 index b07ecc0a..00000000 --- a/solution/Deployment/arm/FunctionApp.json +++ /dev/null @@ -1,84 +0,0 @@ -{ - "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", - "contentVersion": "1.0.0.0", - "parameters": { - "location": { - "type": "string", - "defaultValue": "[resourceGroup().location]", - "metadata": { - "description": "Location for all resources." - } - }, - "azure-function-site-name": { - "type": "string", - "defaultValue": "[concat('FuncApp-', uniqueString(resourceGroup().id))]", - "metadata": { - "description": "The name of you Web Site." - } - }, - "app-insights-name": { - "type": "string", - "defaultValue": "appinsights-adsgofast", - "metadata": { - "description": "The name of Azure Application Insights." - } - }, - "storage-log-account-name": { - "type": "string", - "defaultValue": "adsgofastlog", - "metadata": { - "description": "The name of storage account used for logging" - } - }, - "storage-log-account-key": { - "type": "string", - "defaultValue": "", - "metadata": { - "description": "A key to the storage account" - } - }, - "appservice-name": { - "type": "string", - "defaultValue": "", - "metadata": { - "description": "" - } - } - }, - "resources": [ - { - "type": "Microsoft.Web/sites", - "apiVersion": "2020-06-01", - "name": "[parameters('azure-function-site-name')]", - "kind": "functionapp", - "location": "[parameters('location')]", - "properties": { - "name": "[parameters('azure-function-site-name')]", - "siteConfig": { - "appSettings": [ - { - "name": "FUNCTIONS_WORKER_RUNTIME", - "value": "dotnet" - }, - { - "name": "FUNCTIONS_EXTENSION_VERSION", - "value": "~3" - }, - { - "name": "AzureWebJobsStorage", - "value": "[concat('DefaultEndpointsProtocol=https;AccountName=', parameters('storage-log-account-name'), ';AccountKey=', parameters('storage-log-account-key'))]" - }, - { - "name": "APPINSIGHTS_INSTRUMENTATIONKEY", - "value": "[reference(resourceId('microsoft.insights/components/', parameters('app-insights-name')), '2015-05-01').InstrumentationKey]" - } - ] - }, - "serverFarmId": "[resourceId('Microsoft.Web/serverfarms', parameters('appservice-name'))]", - "clientAffinityEnabled": false - } - } - ], - "outputs": { - } -} diff --git a/solution/Deployment/arm/KeyVault.bicep b/solution/Deployment/arm/KeyVault.bicep deleted file mode 100644 index a1fe1975..00000000 --- a/solution/Deployment/arm/KeyVault.bicep +++ /dev/null @@ -1,28 +0,0 @@ -@description('Location for all resources.') -param location string = '' - -@description('kv') -param keyvault_name string = 'kv' - -@description('kv') -param tenant_id string = 'kv' - -resource keyvault_name_resource 'Microsoft.KeyVault/vaults@2018-02-14' = { - name: keyvault_name - location: location - properties: { - enabledForDeployment: true - enabledForDiskEncryption: true - enabledForTemplateDeployment: true - tenantId: tenant_id - accessPolicies: [] - sku: { - name: 'standard' - family: 'A' - } - networkAcls: { - defaultAction: 'Allow' - bypass: 'AzureServices' - } - } -} \ No newline at end of file diff --git a/solution/Deployment/arm/KeyVault.json b/solution/Deployment/arm/KeyVault.json deleted file mode 100644 index 5d9852cd..00000000 --- a/solution/Deployment/arm/KeyVault.json +++ /dev/null @@ -1,50 +0,0 @@ -{ - "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", - "contentVersion": "1.0.0.0", - "parameters": { - "location": { - "type": "string", - "defaultValue": "", - "metadata": { - "description": "Location for all resources." - } - }, - "keyvault-name": { - "type": "string", - "defaultValue": "kv", - "metadata": { - "description": "kv" - } - }, - "tenant-id": { - "type": "string", - "defaultValue": "kv", - "metadata": { - "description": "kv" - } - } - }, - "resources": [{ - "type": "Microsoft.KeyVault/vaults", - "name": "[parameters('keyvault-name')]", - "apiVersion": "2018-02-14", - "location": "[parameters('location')]", - "properties": { - "enabledForDeployment": true, - "enabledForDiskEncryption":true, - "enabledForTemplateDeployment": true, - "tenantId": "[parameters('tenant-id')]", - "accessPolicies": [ - ], - "sku": { - "name": "standard", - "family": "A" - }, - "networkAcls": { - "defaultAction": "Allow", - "bypass": "AzureServices" - } - } - }], - "outputs":{} -} \ No newline at end of file diff --git a/solution/Deployment/arm/LogAnalytics.bicep b/solution/Deployment/arm/LogAnalytics.bicep deleted file mode 100644 index 242057cc..00000000 --- a/solution/Deployment/arm/LogAnalytics.bicep +++ /dev/null @@ -1,36 +0,0 @@ -@description('Location for all resources.') -param location string = resourceGroup().location -param workspaces_adsgofastloganalytics_name string = 'adsloganalytics' - -@description('Pricing tier: PerGB2018 or legacy tiers (Free, Standalone, PerNode, Standard or Premium) which are not available to all customers.') -@allowed([ - 'pergb2018' - 'Free' - 'Standalone' - 'PerNode' - 'Standard' - 'Premium' -]) -param log_analytics_sku string = 'pergb2018' - -@description('Number of days to retain data.') -param log_analytics_retentionInDays int = 30 - -@description('true to use resource or workspace permissions. false to require workspace permissions.') -param log_analytics_resourcePermissions bool = false - -resource workspaces_adsgofastloganalytics_name_resource 'microsoft.operationalinsights/workspaces@2020-08-01' = { - name: workspaces_adsgofastloganalytics_name - location: location - properties: { - sku: { - name: log_analytics_sku - } - retentionInDays: log_analytics_retentionInDays - features: { - searchVersion: 1 - legacy: 0 - enableLogAccessUsingOnlyResourcePermissions: log_analytics_resourcePermissions - } - } -} \ No newline at end of file diff --git a/solution/Deployment/arm/LogAnalytics.json b/solution/Deployment/arm/LogAnalytics.json deleted file mode 100644 index 096d1a83..00000000 --- a/solution/Deployment/arm/LogAnalytics.json +++ /dev/null @@ -1,69 +0,0 @@ -{ - "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", - "contentVersion": "1.0.0.0", - "parameters": { - "location": { - "type": "string", - "defaultValue": "[resourceGroup().location]", - "metadata": { - "description": "Location for all resources." - } - }, - "workspaces_adsgofastloganalytics_name": { - "defaultValue": "adsloganalytics", - "type": "String" - }, - "log-analytics-sku": { - "type": "string", - "allowedValues": [ - "pergb2018", - "Free", - "Standalone", - "PerNode", - "Standard", - "Premium" - ], - "defaultValue": "pergb2018", - "metadata": { - "description": "Pricing tier: PerGB2018 or legacy tiers (Free, Standalone, PerNode, Standard or Premium) which are not available to all customers." - } - }, - "log-analytics-retentionInDays": { - "type": "int", - "defaultValue": 30, - "metadata": { - "description": "Number of days to retain data." - } - }, - "log-analytics-resourcePermissions": { - "type": "bool", - "defaultValue": false, - "metadata": { - "description": "true to use resource or workspace permissions. false to require workspace permissions." - } - } - - }, - "resources": [ - { - "type": "microsoft.operationalinsights/workspaces", - "apiVersion": "2020-08-01", - "name": "[parameters('workspaces_adsgofastloganalytics_name')]", - "location": "[parameters('location')]", - "properties": { - "sku": { - "name": "[parameters('log-analytics-sku')]" - }, - "retentionInDays": "[parameters('log-analytics-retentionInDays')]", - "features": { - "searchVersion": 1, - "legacy": 0, - "enableLogAccessUsingOnlyResourcePermissions": "[parameters('log-analytics-resourcePermissions')]" - } - } - } - - ], - "outputs": { - } -} diff --git a/solution/Deployment/arm/Networking.bicep b/solution/Deployment/arm/Networking.bicep deleted file mode 100644 index ad82604a..00000000 --- a/solution/Deployment/arm/Networking.bicep +++ /dev/null @@ -1,107 +0,0 @@ -@description('Location for all resources.') -param location string = resourceGroup().location - -@description('Name of Azure Bastion resource') -param vnet_name string = 'adsgofast-vnet' - -@description('Bastion subnet IP prefix MUST be within vnet IP prefix address space') -param vnet_address_prefix string = '10.1.0.0/16' - -@description('Bastion subnet IP prefix MUST be within vnet IP prefix address space') -param bastion_subnet_ip_prefix string = '10.1.1.0/27' - -@description('Bastion subnet IP prefix MUST be within vnet IP prefix address space') -param data_subnet_ip_prefix string = '10.1.2.0/27' - -@description('Name of Azure Bastion resource') -param bastion_host_name string = 'azure-bastion-ads-go-fast' - -var bastion_subnet_name = 'AzureBastionSubnet' -var data_subnet_name = 'Data' -var public_ip_address_name_bastion_var = '${bastion_host_name}-pip' - -resource public_ip_address_name_bastion 'Microsoft.Network/publicIpAddresses@2019-02-01' = { - name: public_ip_address_name_bastion_var - location: location - sku: { - name: 'Standard' - } - properties: { - publicIPAllocationMethod: 'Static' - } -} - -resource vnet_name_resource 'Microsoft.Network/virtualNetworks@2019-02-01' = { - name: vnet_name - location: location - properties: { - addressSpace: { - addressPrefixes: [ - vnet_address_prefix - ] - } - subnets: [ - { - name: bastion_subnet_name - properties: { - addressPrefix: bastion_subnet_ip_prefix - } - } - { - name: data_subnet_name - properties: { - addressPrefix: data_subnet_ip_prefix - } - } - ] - } -} - -resource vnet_name_bastion_subnet_name 'Microsoft.Network/virtualNetworks/subnets@2019-02-01' = { - parent: vnet_name_resource - name: '${bastion_subnet_name}' - location: location - properties: { - addressPrefix: bastion_subnet_ip_prefix - } - dependsOn: [ - vnet_name_data_subnet_name - ] -} - -resource vnet_name_data_subnet_name 'Microsoft.Network/virtualNetworks/subnets@2019-02-01' = { - parent: vnet_name_resource - name: '${data_subnet_name}' - location: location - properties: { - addressPrefix: data_subnet_ip_prefix - serviceEndpoints: [ - { - service: 'Microsoft.Sql' - } - ] - } -} - -resource bastion_host_name_resource 'Microsoft.Network/bastionHosts@2019-04-01' = { - name: bastion_host_name - location: location - properties: { - ipConfigurations: [ - { - name: 'IpConf' - properties: { - subnet: { - id: vnet_name_bastion_subnet_name.id - } - publicIPAddress: { - id: public_ip_address_name_bastion.id - } - } - } - ] - } - dependsOn: [ - vnet_name_resource - ] -} \ No newline at end of file diff --git a/solution/Deployment/arm/Networking.json b/solution/Deployment/arm/Networking.json deleted file mode 100644 index a079f787..00000000 --- a/solution/Deployment/arm/Networking.json +++ /dev/null @@ -1,210 +0,0 @@ -{ - "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", - "contentVersion": "1.0.0.0", - "parameters": { - "location": { - "type": "string", - "defaultValue": "[resourceGroup().location]", - "metadata": { - "description": "Location for all resources." - } - }, - "vnet-name": { - "type": "string", - "defaultValue": "adsgofast-vnet", - "metadata": { - "description": "Name of Azure Bastion resource" - } - }, - "vnet-address-prefix": { - "type": "string", - "defaultValue": "10.1.0.0/16", - "metadata": { - "description": "Bastion subnet IP prefix MUST be within vnet IP prefix address space" - } - }, - "bastion-subnet-ip-prefix": { - "type": "string", - "defaultValue": "10.1.1.0/27", - "metadata": { - "description": "Bastion subnet IP prefix MUST be within vnet IP prefix address space" - } - }, - "data-subnet-ip-prefix": { - "type": "string", - "defaultValue": "10.1.2.0/27", - "metadata": { - "description": "Data subnet IP prefix MUST be within vnet IP prefix address space" - } - }, - "webapp-subnet-ip-prefix": { - "type": "string", - "defaultValue": "10.1.3.0/27", - "metadata": { - "description": "Web App subnet IP prefix MUST be within vnet IP prefix address space" - } - }, - "funcapp-subnet-ip-prefix": { - "type": "string", - "defaultValue": "10.1.4.0/27", - "metadata": { - "description": "Function App subnet IP prefix MUST be within vnet IP prefix address space" - } - }, - - "bastion-host-name": { - "type": "string", - "defaultValue": "azure-bastion-ads-go-fast", - "metadata": { - "description": "Name of Azure Bastion resource" - } - }, - "bastion-subnet-name": { - "type": "string", - "defaultValue": "AzureBastionSubnet", - "metadata": { - "description": "Name of Azure Bastion Subnet" - } - }, - "data-subnet-name": { - "type": "string", - "defaultValue": "Data", - "metadata": { - "description": "Name of Data Subnet" - } - }, - "webapp-subnet-name": { - "type": "string", - "defaultValue": "WebApp", - "metadata": { - "description": "Name of Web App Subnet. vNet integration in WebApp requires an empty Subnet." - } - }, - "funcapp-subnet-name": { - "type": "string", - "defaultValue": "FuncApp", - "metadata": { - "description": "Name of Function App Subnet." - } - } - }, - "variables": { - "bastion-subnet-name": "[parameters('bastion-subnet-name')]", - "data-subnet-name": "[parameters('data-subnet-name')]", - "webapp-subnet-name": "[parameters('webapp-subnet-name')]", - "funcapp-subnet-name": "[parameters('funcapp-subnet-name')]", - "public-ip-address-name-bastion": "[concat(parameters('bastion-host-name'),'-pip')]" - }, - "resources": [ - { - "apiVersion": "2019-02-01", - "type": "Microsoft.Network/publicIpAddresses", - "name": "[variables('public-ip-address-name-bastion')]", - "location": "[parameters('location')]", - "sku": { - "name": "Standard" - }, - "properties": { - "publicIPAllocationMethod": "Static" - } - }, - { - "apiVersion": "2019-02-01", - "name": "[parameters('vnet-name')]", - "type": "Microsoft.Network/virtualNetworks", - "location": "[parameters('location')]", - "properties": { - "addressSpace": { - "addressPrefixes": [ - "[parameters('vnet-address-prefix')]" - ] - }, - "subnets": [ - { - "name": "[variables('bastion-subnet-name')]", - "properties": { - "addressPrefix": "[parameters('bastion-subnet-ip-prefix')]" - } - }, - { - "name": "[variables('data-subnet-name')]", - "properties": { - "addressPrefix": "[parameters('data-subnet-ip-prefix')]" - } - }, - { - "name": "[variables('webapp-subnet-name')]", - "properties": { - "addressPrefix": "[parameters('webapp-subnet-ip-prefix')]" - } - }, - { - "name": "[variables('funcapp-subnet-name')]", - "properties": { - "addressPrefix": "[parameters('funcapp-subnet-ip-prefix')]" - } - } - ] - } - }, - { - "apiVersion": "2019-02-01", - "type": "Microsoft.Network/virtualNetworks/subnets", - "name": "[concat(parameters('vnet-name'), '/', variables('bastion-subnet-name'))]", - "dependsOn": [ - "[resourceId('Microsoft.Network/virtualNetworks', parameters('vnet-name'))]", - "[resourceId('Microsoft.Network/virtualNetworks/subnets', parameters('vnet-name'), variables('data-subnet-name'))]" - ], - "location": "[parameters('location')]", - "properties": { - "addressPrefix": "[parameters('bastion-subnet-ip-prefix')]" - } - }, - { - "apiVersion": "2019-02-01", - "type": "Microsoft.Network/virtualNetworks/subnets", - "name": "[concat(parameters('vnet-name'), '/', variables('data-subnet-name'))]", - "dependsOn": [ - "[resourceId('Microsoft.Network/virtualNetworks', parameters('vnet-name'))]" - ], - "location": "[parameters('location')]", - "properties": { - "addressPrefix": "[parameters('data-subnet-ip-prefix')]", - "serviceEndpoints": [ - { - "service": "Microsoft.Sql" - } - ] - } - }, - { - "apiVersion": "2019-04-01", - "type": "Microsoft.Network/bastionHosts", - "name": "[parameters('bastion-host-name')]", - "location": "[parameters('location')]", - "dependsOn": [ - "[resourceId('Microsoft.Network/publicIpAddresses', variables('public-ip-address-name-bastion'))]", - "[resourceId('Microsoft.Network/virtualNetworks', parameters('vnet-name'))]", - "[resourceId('Microsoft.Network/virtualNetworks/subnets', parameters('vnet-name'), variables('bastion-subnet-name'))]" - ], - "properties": { - "ipConfigurations": [ - { - "name": "IpConf", - "properties": { - "subnet": { - "id": "[resourceId('Microsoft.Network/virtualNetworks/subnets', parameters('vnet-name'), variables('bastion-subnet-name'))]" - }, - "publicIPAddress": { - "id": "[resourceId('Microsoft.Network/publicIpAddresses', variables('public-ip-address-name-bastion'))]" - } - } - } - ] - } - } - - ], - "outputs": { - } -} diff --git a/solution/Deployment/arm/Storage_ADLS.bicep b/solution/Deployment/arm/Storage_ADLS.bicep deleted file mode 100644 index f4819ba8..00000000 --- a/solution/Deployment/arm/Storage_ADLS.bicep +++ /dev/null @@ -1,53 +0,0 @@ -@description('Location for all resources.') -param location string = '' - -@description('') -param storage_account_name string = 'adsgfadls' - -@description('datalakelanding') -param storage_landing_container_name string = 'datalakelanding' - -@description('datalakeraw') -param storage_raw_container_name string = 'datalakeraw' - -@description('') -param storage_account_sku string = 'Standard_GRS' - -resource storage_account_name_resource 'Microsoft.Storage/storageAccounts@2019-04-01' = { - location: location - name: storage_account_name - kind: 'StorageV2' - sku: { - name: storage_account_sku - } - properties: { - encryption: { - keySource: 'Microsoft.Storage' - services: { - blob: { - enabled: true - } - file: { - enabled: true - } - } - } - isHnsEnabled: true - supportsHttpsTrafficOnly: true - accessTier: 'Hot' - } -} - -resource storage_account_name_default_storage_landing_container_name 'Microsoft.Storage/storageAccounts/blobServices/containers@2019-06-01' = { - name: '${storage_account_name}/default/${storage_landing_container_name}' - dependsOn: [ - storage_account_name_resource - ] -} - -resource storage_account_name_default_storage_raw_container_name 'Microsoft.Storage/storageAccounts/blobServices/containers@2019-06-01' = { - name: '${storage_account_name}/default/${storage_raw_container_name}' - dependsOn: [ - storage_account_name_resource - ] -} \ No newline at end of file diff --git a/solution/Deployment/arm/Storage_ADLS.json b/solution/Deployment/arm/Storage_ADLS.json deleted file mode 100644 index 8559cebd..00000000 --- a/solution/Deployment/arm/Storage_ADLS.json +++ /dev/null @@ -1,89 +0,0 @@ -{ - "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", - "contentVersion": "1.0.0.0", - "parameters": { - "location": { - "type": "string", - "defaultValue": "", - "metadata": { - "description": "Location for all resources." - } - }, - "storage-account-name": { - "type": "String", - "defaultValue": "adsgfadls", - "metadata": { - "description": "" - } - }, - "storage-landing-container-name": { - "type": "String", - "defaultValue": "datalakelanding", - "metadata": { - "description": "datalakelanding" - } - }, - "storage-raw-container-name": { - "type": "String", - "defaultValue": "datalakeraw", - "metadata": { - "description": "datalakeraw" - } - }, - "storage-account-sku": { - "type": "String", - "defaultValue": "Standard_GRS", - "metadata": { - "description": "" - } - } - }, - "resources": [ - { - "type": "Microsoft.Storage/storageAccounts", - "apiVersion": "2019-04-01", - "location": "[parameters('location')]", - "name": "[parameters('storage-account-name')]", - "kind": "StorageV2", - "sku": { - "name": "[parameters('storage-account-sku')]" - }, - "properties": { - "encryption": { - "keySource": "Microsoft.Storage", - "services": { - "blob": { - "enabled": true - }, - "file": { - "enabled": true - } - } - }, - "isHnsEnabled": true, - "supportsHttpsTrafficOnly": true, - "accessTier": "Hot" - }, - "resources": [ - { - "type": "blobServices/containers", - "apiVersion": "2019-06-01", - "name": "[concat('default/', parameters('storage-landing-container-name'))]", - "dependsOn": [ - "[parameters('storage-account-name')]" - ] - }, - { - "type": "blobServices/containers", - "apiVersion": "2019-06-01", - "name": "[concat('default/', parameters('storage-raw-container-name'))]", - "dependsOn": [ - "[parameters('storage-account-name')]" - ] - } - ] - } - ], - "outputs": { - } -} diff --git a/solution/Deployment/arm/Storage_Blob.bicep b/solution/Deployment/arm/Storage_Blob.bicep deleted file mode 100644 index 78c3f191..00000000 --- a/solution/Deployment/arm/Storage_Blob.bicep +++ /dev/null @@ -1,53 +0,0 @@ -@description('Location for all resources.') -param location string = '' - -@description('') -param storage_account_name string = 'adsgfadls' - -@description('') -param storage_landing_container_name string = 'datalakelanding' - -@description('') -param storage_raw_container_name string = 'datalakeraw' - -@description('') -param storage_account_sku string = 'Standard_GRS' - -resource storage_account_name_resource 'Microsoft.Storage/storageAccounts@2019-04-01' = { - location: location - name: storage_account_name - kind: 'StorageV2' - sku: { - name: storage_account_sku - } - properties: { - encryption: { - keySource: 'Microsoft.Storage' - services: { - blob: { - enabled: true - } - file: { - enabled: true - } - } - } - isHnsEnabled: false - supportsHttpsTrafficOnly: true - accessTier: 'Hot' - } -} - -resource storage_account_name_default_storage_landing_container_name 'Microsoft.Storage/storageAccounts/blobServices/containers@2019-06-01' = { - name: '${storage_account_name}/default/${storage_landing_container_name}' - dependsOn: [ - storage_account_name_resource - ] -} - -resource storage_account_name_default_storage_raw_container_name 'Microsoft.Storage/storageAccounts/blobServices/containers@2019-06-01' = { - name: '${storage_account_name}/default/${storage_raw_container_name}' - dependsOn: [ - storage_account_name_resource - ] -} \ No newline at end of file diff --git a/solution/Deployment/arm/Storage_Blob.json b/solution/Deployment/arm/Storage_Blob.json deleted file mode 100644 index 541b449f..00000000 --- a/solution/Deployment/arm/Storage_Blob.json +++ /dev/null @@ -1,89 +0,0 @@ -{ - "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", - "contentVersion": "1.0.0.0", - "parameters": { - "location": { - "type": "string", - "defaultValue": "", - "metadata": { - "description": "Location for all resources." - } - }, - "storage-account-name": { - "type": "String", - "defaultValue": "adsgfadls", - "metadata": { - "description": "" - } - }, - "storage-landing-container-name": { - "type": "String", - "defaultValue": "datalakelanding", - "metadata": { - "description": "" - } - }, - "storage-raw-container-name": { - "type": "String", - "defaultValue": "datalakeraw", - "metadata": { - "description": "" - } - }, - "storage-account-sku": { - "type": "String", - "defaultValue": "Standard_GRS", - "metadata": { - "description": "" - } - } - }, - "resources": [ - { - "type": "Microsoft.Storage/storageAccounts", - "apiVersion": "2019-04-01", - "location": "[parameters('location')]", - "name": "[parameters('storage-account-name')]", - "kind": "StorageV2", - "sku": { - "name": "[parameters('storage-account-sku')]" - }, - "properties": { - "encryption": { - "keySource": "Microsoft.Storage", - "services": { - "blob": { - "enabled": true - }, - "file": { - "enabled": true - } - } - }, - "isHnsEnabled": false, - "supportsHttpsTrafficOnly": true, - "accessTier": "Hot" - }, - "resources": [ - { - "type": "blobServices/containers", - "apiVersion": "2019-06-01", - "name": "[concat('default/', parameters('storage-landing-container-name'))]", - "dependsOn": [ - "[parameters('storage-account-name')]" - ] - }, - { - "type": "blobServices/containers", - "apiVersion": "2019-06-01", - "name": "[concat('default/', parameters('storage-raw-container-name'))]", - "dependsOn": [ - "[parameters('storage-account-name')]" - ] - } - ] - } - ], - "outputs": { - } -} diff --git a/solution/Deployment/arm/Storage_Logging.bicep b/solution/Deployment/arm/Storage_Logging.bicep deleted file mode 100644 index e00121cb..00000000 --- a/solution/Deployment/arm/Storage_Logging.bicep +++ /dev/null @@ -1,21 +0,0 @@ -@description('Location for all resources.') -param location string = '' - -@description('The name of the Log Store account to create.') -param storage_log_account_name string = 'logstg' - -resource storage_log_account_name_resource 'Microsoft.Storage/storageAccounts@2019-06-01' = { - name: storage_log_account_name - location: location - kind: 'StorageV2' - sku: { - name: 'Standard_LRS' - tier: 'Standard' - } - properties: { - accessTier: 'Hot' - } -} - -output stringSubcriptionId string = subscription().id -output stringLogStorageAccount string = storage_log_account_name \ No newline at end of file diff --git a/solution/Deployment/arm/Storage_Logging.json b/solution/Deployment/arm/Storage_Logging.json deleted file mode 100644 index bc47038f..00000000 --- a/solution/Deployment/arm/Storage_Logging.json +++ /dev/null @@ -1,46 +0,0 @@ -{ - "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", - "contentVersion": "1.0.0.0", - "parameters": { - "location": { - "type": "string", - "defaultValue": "", - "metadata": { - "description": "Location for all resources." - } - }, - "storage-log-account-name": { - "type": "String", - "defaultValue": "logstg", - "metadata": { - "description": "The name of the Log Store account to create." - } - } - }, - "resources": [ - { - "type": "Microsoft.Storage/storageAccounts", - "name": "[parameters('storage-log-account-name')]", - "apiVersion": "2019-06-01", - "location": "[parameters('location')]", - "kind": "StorageV2", - "sku": { - "name": "Standard_LRS", - "tier": "Standard" - }, - "properties": { - "accessTier": "Hot" - } - } - ], - "outputs": { - "stringSubcriptionId": { - "type": "string", - "value": "[subscription().id]" - }, - "stringLogStorageAccount": { - "type": "string", - "value": "[parameters('storage-log-account-name')]" - } - } -} diff --git a/solution/Deployment/arm/VirtualMachine.bicep b/solution/Deployment/arm/VirtualMachine.bicep deleted file mode 100644 index 89eff9ec..00000000 --- a/solution/Deployment/arm/VirtualMachine.bicep +++ /dev/null @@ -1,254 +0,0 @@ -@description('Location for all resources.') -param location string = resourceGroup().location - -@description('The size of the VM') -param adf_ir_vm_size string = 'Standard_D4s_v3' - -@description('Username for the Virtual Machine.') -param adf_ir_vm_admin_username string = 'adsgofastadmin' - -@description('Password for the Virtual Machine. The password must be at least 12 characters long and have lower case, upper characters, digit and a special character (Regex match)') -@secure() -param adf_ir_vm_admin_password string - -@description('Defines the type of storage account to use for the data lake store') -@allowed([ - 'Standard_LRS' - 'Standard_ZRS' - 'Standard_GRS' - 'Standard_RAGRS' -]) -param os_disk_type_adfir_vm string = 'Standard_LRS' -param adf_ir_onp_vm_name string - -@description('Windows Server and SQL Offer') -@allowed([ - 'sql2019-ws2019' - 'sql2017-ws2019' - 'SQL2017-WS2016' - 'SQL2016SP1-WS2016' - 'SQL2016SP2-WS2016' - 'SQL2014SP3-WS2012R2' - 'SQL2014SP2-WS2012R2' -]) -param imageOffer string = 'sql2019-ws2019' - -@description('SQL Server Sku') -@allowed([ - 'Standard' - 'Enterprise' - 'SQLDEV' - 'Web' - 'Express' -]) -param sqlSku string = 'Standard' - -@description('Amount of data disks (1TB each) for SQL Data files') -@minValue(1) -@maxValue(8) -param sqlDataDisksCount int = 1 - -@description('Amount of data disks (1TB each) for SQL Log files') -@minValue(1) -@maxValue(8) -param sqlLogDisksCount int = 1 - -@description('SQL Server Workload Type') -@allowed([ - 'General' - 'OLTP' - 'DW' -]) -param storageWorkloadType string = 'General' - -@description('Path for SQL Data files. Please choose drive letter from F to Z, and other drives from A to E are reserved for system') -param dataPath string = 'F:\\SQLData' - -@description('Path for SQL Log files. Please choose drive letter from F to Z and different than the one used for SQL data. Drive letter from A to E are reserved for system') -param logPath string = 'G:\\SQLLog' - -@description('Name of Azure Bastion resource') -param vnet_name string = 'adsgofast-vnet' - -var adf_ir_vm_name_var = take('IR-Az-${uniqueString(resourceGroup().id)}-VM', 15) -var adf_ir_az_network_interface_name_var = '${adf_ir_vm_name_var}NetInt' -var adf_ir_onp_network_interface_name_var = '${adf_ir_onp_vm_name}NetInt' -var dataDisks = { - createOption: 'empty' - caching: 'ReadOnly' - writeAcceleratorEnabled: false - storageAccountType: 'Premium_LRS' - diskSizeGB: 1023 -} -var diskConfigurationType = 'NEW' -var dataDisksLuns = array(range(0, sqlDataDisksCount)) -var logDisksLuns = array(range(sqlDataDisksCount, sqlLogDisksCount)) -var tempDbPath = 'D:\\SQLTemp' -var data_subnet_name = 'Data' - -resource adf_ir_az_network_interface_name 'Microsoft.Network/networkInterfaces@2019-09-01' = { - name: adf_ir_az_network_interface_name_var - location: location - tags: { - displayName: adf_ir_az_network_interface_name_var - } - properties: { - ipConfigurations: [ - { - name: 'ipConfig1' - properties: { - privateIPAllocationMethod: 'Dynamic' - subnet: { - id: resourceId('Microsoft.Network/virtualNetworks/subnets', vnet_name, data_subnet_name) - } - } - } - ] - } - dependsOn: [] -} - -resource adf_ir_onp_network_interface_name 'Microsoft.Network/networkInterfaces@2019-09-01' = { - name: adf_ir_onp_network_interface_name_var - location: location - tags: { - displayName: adf_ir_onp_network_interface_name_var - } - properties: { - ipConfigurations: [ - { - name: 'ipConfig1' - properties: { - privateIPAllocationMethod: 'Dynamic' - subnet: { - id: resourceId('Microsoft.Network/virtualNetworks/subnets', vnet_name, data_subnet_name) - } - } - } - ] - } - dependsOn: [] -} - -resource adf_ir_vm_name 'Microsoft.Compute/virtualMachines@2019-07-01' = { - name: adf_ir_vm_name_var - location: location - tags: { - displayName: adf_ir_vm_name_var - } - properties: { - hardwareProfile: { - vmSize: adf_ir_vm_size - } - osProfile: { - computerName: adf_ir_vm_name_var - adminUsername: adf_ir_vm_admin_username - adminPassword: adf_ir_vm_admin_password - } - storageProfile: { - imageReference: { - publisher: 'MicrosoftWindowsServer' - offer: 'WindowsServer' - sku: '2019-Datacenter' - version: 'latest' - } - osDisk: { - name: '${adf_ir_vm_name_var}OsDisk' - caching: 'ReadWrite' - createOption: 'FromImage' - managedDisk: { - storageAccountType: os_disk_type_adfir_vm - } - diskSizeGB: 128 - } - } - networkProfile: { - networkInterfaces: [ - { - id: adf_ir_az_network_interface_name.id - } - ] - } - } -} - -resource adf_ir_onp_vm_name_resource 'Microsoft.Compute/virtualMachines@2019-07-01' = { - name: adf_ir_onp_vm_name - location: location - tags: { - displayName: adf_ir_onp_vm_name - } - properties: { - hardwareProfile: { - vmSize: adf_ir_vm_size - } - osProfile: { - computerName: adf_ir_onp_vm_name - adminUsername: adf_ir_vm_admin_username - adminPassword: adf_ir_vm_admin_password - windowsConfiguration: { - enableAutomaticUpdates: true - provisionVMAgent: true - } - } - storageProfile: { - imageReference: { - publisher: 'MicrosoftSQLServer' - offer: imageOffer - sku: sqlSku - version: 'latest' - } - osDisk: { - name: '${adf_ir_onp_vm_name}OsDisk' - caching: 'ReadWrite' - createOption: 'FromImage' - managedDisk: { - storageAccountType: os_disk_type_adfir_vm - } - diskSizeGB: 128 - } - dataDisks: [for j in range(0, (sqlDataDisksCount + sqlLogDisksCount)): { - lun: j - createOption: dataDisks.createOption - caching: ((j >= sqlDataDisksCount) ? 'None' : dataDisks.caching) - writeAcceleratorEnabled: dataDisks.writeAcceleratorEnabled - diskSizeGB: dataDisks.diskSizeGB - managedDisk: { - storageAccountType: dataDisks.storageAccountType - } - }] - } - networkProfile: { - networkInterfaces: [ - { - id: adf_ir_onp_network_interface_name.id - } - ] - } - } -} - -resource Microsoft_SqlVirtualMachine_SqlVirtualMachines_adf_ir_onp_vm_name 'Microsoft.SqlVirtualMachine/SqlVirtualMachines@2017-03-01-preview' = { - name: adf_ir_onp_vm_name - location: location - properties: { - virtualMachineResourceId: adf_ir_onp_vm_name_resource.id - sqlManagement: 'Full' - sqlServerLicenseType: 'PAYG' - storageConfigurationSettings: { - diskConfigurationType: diskConfigurationType - storageWorkloadType: storageWorkloadType - sqlDataSettings: { - luns: dataDisksLuns - defaultFilePath: dataPath - } - sqlLogSettings: { - luns: logDisksLuns - defaultFilePath: logPath - } - sqlTempDbSettings: { - defaultFilePath: tempDbPath - } - } - } -} diff --git a/solution/Deployment/arm/VirtualMachine.json b/solution/Deployment/arm/VirtualMachine.json deleted file mode 100644 index 6649d63d..00000000 --- a/solution/Deployment/arm/VirtualMachine.json +++ /dev/null @@ -1,342 +0,0 @@ -{ - "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", - "contentVersion": "1.0.0.0", - "parameters": { - "location": { - "type": "string", - "defaultValue": "[resourceGroup().location]", - "metadata": { - "description": "Location for all resources." - } - }, - "adf-ir-vm-size": { - "type": "string", - "defaultValue": "Standard_D4s_v3", - "metadata": { - "description": "The size of the VM" - } - }, - "adf-ir-vm-admin-username": { - "type": "string", - "defaultValue": "adsgofastadmin", - "metadata": { - "description": "Username for the Virtual Machine." - } - }, - "adf-ir-vm-admin-password": { - "type": "secureString", - "metadata": { - "description": "Password for the Virtual Machine. The password must be at least 12 characters long and have lower case, upper characters, digit and a special character (Regex match)" - } - }, - "os-disk-type-adfir-vm": { - "type": "string", - "defaultValue": "Standard_LRS", - "allowedValues": [ - "Standard_LRS", - "Standard_ZRS", - "Standard_GRS", - "Standard_RAGRS" - ], - "metadata": { - "description": "Defines the type of storage account to use for the data lake store" - } - }, - "adf-ir-onp-vm-name": { - "type": "String" - }, - "imageOffer": { - "type": "String", - "defaultValue": "sql2019-ws2019", - "allowedValues": [ - "sql2019-ws2019", - "sql2017-ws2019", - "SQL2017-WS2016", - "SQL2016SP1-WS2016", - "SQL2016SP2-WS2016", - "SQL2014SP3-WS2012R2", - "SQL2014SP2-WS2012R2" - ], - "metadata": { - "description": "Windows Server and SQL Offer" - } - }, - "sqlSku": { - "type": "String", - "defaultValue": "Standard", - "allowedValues": [ - "Standard", - "Enterprise", - "SQLDEV", - "Web", - "Express" - ], - "metadata": { - "description": "SQL Server Sku" - } - }, - "sqlDataDisksCount": { - "type": "int", - "defaultValue": 1, - "minValue": 1, - "maxValue": 8, - "metadata": { - "description": "Amount of data disks (1TB each) for SQL Data files" - } - }, - "sqlLogDisksCount": { - "type": "int", - "defaultValue": 1, - "minValue": 1, - "maxValue": 8, - "metadata": { - "description": "Amount of data disks (1TB each) for SQL Log files" - } - }, - "storageWorkloadType": { - "type": "String", - "defaultValue": "General", - "allowedValues": [ - "General", - "OLTP", - "DW" - ], - "metadata": { - "description": "SQL Server Workload Type" - } - }, - "dataPath": { - "type": "String", - "defaultValue": "F:\\SQLData", - "metadata": { - "description": "Path for SQL Data files. Please choose drive letter from F to Z, and other drives from A to E are reserved for system" - } - }, - "logPath": { - "type": "String", - "defaultValue": "G:\\SQLLog", - "metadata": { - "description": "Path for SQL Log files. Please choose drive letter from F to Z and different than the one used for SQL data. Drive letter from A to E are reserved for system" - } - }, - "vnet-name": { - "type": "string", - "defaultValue": "adsgofast-vnet", - "metadata": { - "description": "Name of Azure Bastion resource" - } - } - }, - "variables": { - "adf-ir-vm-name": "[take(concat('IR-Az-', uniqueString(resourceGroup().id),'-VM'),15)]", - "adf-ir-az-network-interface-name": "[concat(variables('adf-ir-vm-name'),'NetInt')]", - "adf-ir-onp-network-interface-name": "[concat(parameters('adf-ir-onp-vm-name'),'NetInt')]", - "dataDisks": { - "createOption": "empty", - "caching": "ReadOnly", - "writeAcceleratorEnabled": false, - "storageAccountType": "Premium_LRS", - "diskSizeGB": 1023 - }, - "diskConfigurationType": "NEW", - "dataDisksLuns": "[array(range(0 ,parameters('sqlDataDisksCount')))]", - "logDisksLuns": "[array(range(parameters('sqlDataDisksCount'), parameters('sqlLogDisksCount')))]", - "tempDbPath": "D:\\SQLTemp", - "data-subnet-name": "Data" - }, - "resources": [ - { - "type": "Microsoft.Network/networkInterfaces", - "apiVersion": "2019-09-01", - "name": "[variables('adf-ir-az-network-interface-name')]", - "location": "[parameters('location')]", - "dependsOn": [ - - ], - "tags": { - "displayName": "[variables('adf-ir-az-network-interface-name')]" - }, - "properties": { - "ipConfigurations": [ - { - "name": "ipConfig1", - "properties": { - "privateIPAllocationMethod": "Dynamic", - "subnet": { - "id": "[resourceId('Microsoft.Network/virtualNetworks/subnets', parameters('vnet-name'), variables('data-subnet-name'))]" - } - } - } - ] - } - }, - { - "type": "Microsoft.Network/networkInterfaces", - "apiVersion": "2019-09-01", - "name": "[variables('adf-ir-onp-network-interface-name')]", - "location": "[parameters('location')]", - "dependsOn": [ - - ], - "tags": { - "displayName": "[variables('adf-ir-onp-network-interface-name')]" - }, - "properties": { - "ipConfigurations": [ - { - "name": "ipConfig1", - "properties": { - "privateIPAllocationMethod": "Dynamic", - "subnet": { - "id": "[resourceId('Microsoft.Network/virtualNetworks/subnets', parameters('vnet-name'), variables('data-subnet-name'))]" - } - } - } - ] - } - }, - { - "type": "Microsoft.Compute/virtualMachines", - "apiVersion": "2019-07-01", - "name": "[variables('adf-ir-vm-name')]", - "location": "[parameters('location')]", - "dependsOn": [ - "[resourceId('Microsoft.Network/networkInterfaces', variables('adf-ir-az-network-interface-name'))]" - ], - "tags": { - "displayName": "[variables('adf-ir-vm-name')]" - }, - "properties": { - "hardwareProfile": { - "vmSize": "[parameters('adf-ir-vm-size')]" - }, - "osProfile": { - "computerName": "[variables('adf-ir-vm-name')]", - "adminUsername": "[parameters('adf-ir-vm-admin-username')]", - "adminPassword": "[parameters('adf-ir-vm-admin-password')]" - }, - "storageProfile": { - "imageReference": { - "publisher": "MicrosoftWindowsServer", - "offer": "WindowsServer", - "sku": "2019-Datacenter", - "version": "latest" - }, - "osDisk": { - "name": "[concat(variables('adf-ir-vm-name'),'OsDisk')]", - "caching": "ReadWrite", - "createOption": "FromImage", - "managedDisk": { - "storageAccountType": "[parameters('os-disk-type-adfir-vm')]" - }, - "diskSizeGB": 128 - } - }, - "networkProfile": { - "networkInterfaces": [ - { - "id": "[resourceId('Microsoft.Network/networkInterfaces', variables('adf-ir-az-network-interface-name'))]" - } - ] - } - } - }, - { - "type": "Microsoft.Compute/virtualMachines", - "apiVersion": "2019-07-01", - "name": "[parameters('adf-ir-onp-vm-name')]", - "location": "[parameters('location')]", - "dependsOn": [ - "[resourceId('Microsoft.Network/networkInterfaces', variables('adf-ir-onp-network-interface-name'))]" - ], - "tags": { - "displayName": "[parameters('adf-ir-onp-vm-name')]" - }, - "properties": { - "hardwareProfile": { - "vmSize": "[parameters('adf-ir-vm-size')]" - }, - "osProfile": { - "computerName": "[parameters('adf-ir-onp-vm-name')]", - "adminUsername": "[parameters('adf-ir-vm-admin-username')]", - "adminPassword": "[parameters('adf-ir-vm-admin-password')]", - "windowsConfiguration": { - "enableAutomaticUpdates": true, - "provisionVmAgent": true - } - }, - "storageProfile": { - "imageReference": { - "publisher": "MicrosoftSQLServer", - "offer": "[parameters('imageOffer')]", - "sku": "[parameters('sqlSku')]", - "version": "latest" - }, - "osDisk": { - "name": "[concat(parameters('adf-ir-onp-vm-name'),'OsDisk')]", - "caching": "ReadWrite", - "createOption": "FromImage", - "managedDisk": { - "storageAccountType": "[parameters('os-disk-type-adfir-vm')]" - }, - "diskSizeGB": 128 - }, - "copy": [ - { - "name": "dataDisks", - "count": "[add(parameters('sqlDataDisksCount'), parameters('sqlLogDisksCount'))]", - "input": { - "lun": "[copyIndex('dataDisks')]", - "createOption": "[variables('dataDisks').createOption]", - "caching": "[if(greaterOrEquals(copyIndex('dataDisks'), parameters('sqlDataDisksCount')) ,'None', variables('dataDisks').caching )]", - "writeAcceleratorEnabled": "[variables('dataDisks').writeAcceleratorEnabled]", - "diskSizeGB": "[variables('dataDisks').diskSizeGB]", - "managedDisk": { - "storageAccountType": "[variables('dataDisks').storageAccountType]" - } - } - } - ] - }, - "networkProfile": { - "networkInterfaces": [ - { - "id": "[resourceId('Microsoft.Network/networkInterfaces', variables('adf-ir-onp-network-interface-name'))]" - } - ] - } - } - }, - { - "type": "Microsoft.SqlVirtualMachine/SqlVirtualMachines", - "apiVersion": "2017-03-01-preview", - "name": "[parameters('adf-ir-onp-vm-name')]", - "location": "[parameters('location')]", - "dependsOn": [ - "[resourceId('Microsoft.Compute/virtualMachines', parameters('adf-ir-onp-vm-name'))]" - ], - "properties": { - "virtualMachineResourceId": "[resourceId('Microsoft.Compute/virtualMachines', parameters('adf-ir-onp-vm-name'))]", - "sqlManagement": "Full", - "SqlServerLicenseType": "PAYG", - "StorageConfigurationSettings": { - "DiskConfigurationType": "[variables('diskConfigurationType')]", - "StorageWorkloadType": "[parameters('storageWorkloadType')]", - "SQLDataSettings": { - "LUNs": "[variables('dataDisksLUNs')]", - "DefaultFilePath": "[parameters('dataPath')]" - }, - "SQLLogSettings": { - "Luns": "[variables('logDisksLUNs')]", - "DefaultFilePath": "[parameters('logPath')]" - }, - "SQLTempDbSettings": { - "DefaultFilePath": "[variables('tempDbPath')]" - } - } - } - } - ], - "outputs": { - } -} diff --git a/solution/Deployment/arm/WebApp.bicep b/solution/Deployment/arm/WebApp.bicep deleted file mode 100644 index 63aad64f..00000000 --- a/solution/Deployment/arm/WebApp.bicep +++ /dev/null @@ -1,42 +0,0 @@ -@description('Location for all resources.') -param location string = resourceGroup().location - -@description('Resource Group.') -param resource_group_name string = '' - -@description('The name of Web Application.') -param sites_AdsGoFastWebApp_name string = 'adsgofastWebApp' - -@description('') -param appservice_name string = '' - -resource sites_AdsGoFastWebApp_name_resource 'Microsoft.Web/sites@2018-11-01' = { - name: sites_AdsGoFastWebApp_name - location: location - tags: {} - properties: { - name: sites_AdsGoFastWebApp_name - siteConfig: { - appSettings: [ - { - name: 'XDT_MicrosoftApplicationInsights_Mode' - value: 'default' - } - { - name: 'ANCM_ADDITIONAL_ERROR_PAGE_LINK' - value: 'https://${sites_AdsGoFastWebApp_name}.scm.azurewebsites.net/detectors?type=tools&name=eventviewer' - } - ] - metadata: [ - { - name: 'CURRENT_STACK' - value: 'dotnetcore' - } - ] - phpVersion: 'OFF' - alwaysOn: true - } - serverFarmId: resourceId('Microsoft.Web/serverfarms', appservice_name) - clientAffinityEnabled: true - } -} \ No newline at end of file diff --git a/solution/Deployment/arm/WebApp.json b/solution/Deployment/arm/WebApp.json deleted file mode 100644 index 6e0785c8..00000000 --- a/solution/Deployment/arm/WebApp.json +++ /dev/null @@ -1,76 +0,0 @@ -{ - "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", - "contentVersion": "1.0.0.0", - "metadata": { - "_generator": { - "name": "bicep", - "version": "0.4.613.9944", - "templateHash": "4770209531422889309" - } - }, - "parameters": { - "location": { - "type": "string", - "defaultValue": "[resourceGroup().location]", - "metadata": { - "description": "Location for all resources." - } - }, - "resource_group_name": { - "type": "string", - "defaultValue": "", - "metadata": { - "description": "Resource Group." - } - }, - "sites_AdsGoFastWebApp_name": { - "type": "string", - "defaultValue": "adsgofastWebApp", - "metadata": { - "description": "The name of Web Application." - } - }, - "appservice_name": { - "type": "string", - "defaultValue": "", - "metadata": { - "description": "" - } - } - }, - "functions": [], - "resources": [ - { - "type": "Microsoft.Web/sites", - "apiVersion": "2018-11-01", - "name": "[parameters('sites_AdsGoFastWebApp_name')]", - "location": "[parameters('location')]", - "tags": {}, - "properties": { - "name": "[parameters('sites_AdsGoFastWebApp_name')]", - "siteConfig": { - "appSettings": [ - { - "name": "XDT_MicrosoftApplicationInsights_Mode", - "value": "default" - }, - { - "name": "ANCM_ADDITIONAL_ERROR_PAGE_LINK", - "value": "[format('https://{0}.scm.azurewebsites.net/detectors?type=tools&name=eventviewer', parameters('sites_AdsGoFastWebApp_name'))]" - } - ], - "metadata": [ - { - "name": "CURRENT_STACK", - "value": "dotnetcore" - } - ], - "phpVersion": "OFF", - "alwaysOn": true - }, - "serverFarmId": "[resourceId('Microsoft.Web/serverfarms', parameters('appservice_name'))]", - "clientAffinityEnabled": true - } - } - ] -} \ No newline at end of file diff --git a/solution/Deployment/environments/EditSettings.html b/solution/Deployment/environments/EditSettings.html deleted file mode 100644 index 115a1718..00000000 --- a/solution/Deployment/environments/EditSettings.html +++ /dev/null @@ -1,67 +0,0 @@ - - - -Page Title - - - - - - - -

Azure Data Services Go Fast Settings Editor

- -
- - - - diff --git a/solution/Deployment/environments/Node/package-lock.json b/solution/Deployment/environments/Node/package-lock.json deleted file mode 100644 index efafaecb..00000000 --- a/solution/Deployment/environments/Node/package-lock.json +++ /dev/null @@ -1,872 +0,0 @@ -{ - "name": "ads_gofast_configuration_app", - "version": "1.0.0", - "lockfileVersion": 2, - "requires": true, - "packages": { - "": { - "name": "ads_gofast_configuration_app", - "version": "1.0.0", - "dependencies": { - "express": "^4.16.1", - "fs": "0.0.2", - "node-static": "0.7.11", - "path": "0.12.7" - } - }, - "node_modules/accepts": { - "version": "1.3.7", - "license": "MIT", - "dependencies": { - "mime-types": "~2.1.24", - "negotiator": "0.6.2" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/array-flatten": { - "version": "1.1.1", - "license": "MIT" - }, - "node_modules/body-parser": { - "version": "1.19.0", - "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.19.0.tgz", - "integrity": "sha512-dhEPs72UPbDnAQJ9ZKMNTP6ptJaionhP5cBb541nXPlW60Jepo9RV/a4fX4XWW9CuFNK22krhrj1+rgzifNCsw==", - "dependencies": { - "bytes": "3.1.0", - "content-type": "~1.0.4", - "debug": "2.6.9", - "depd": "~1.1.2", - "http-errors": "1.7.2", - "iconv-lite": "0.4.24", - "on-finished": "~2.3.0", - "qs": "6.7.0", - "raw-body": "2.4.0", - "type-is": "~1.6.17" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/bytes": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.0.tgz", - "integrity": "sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg==", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/colors": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/colors/-/colors-1.4.0.tgz", - "integrity": "sha512-a+UqTh4kgZg/SlGvfbzDHpgRu7AAQOmmqRHJnxhRZICKFUT91brVhNNt58CMWU9PsBbv3PDCZUHbVxuDiH2mtA==", - "engines": { - "node": ">=0.1.90" - } - }, - "node_modules/content-disposition": { - "version": "0.5.3", - "license": "MIT", - "dependencies": { - "safe-buffer": "5.1.2" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/content-type": { - "version": "1.0.4", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/cookie": { - "version": "0.4.0", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/cookie-signature": { - "version": "1.0.6", - "license": "MIT" - }, - "node_modules/debug": { - "version": "2.6.9", - "license": "MIT", - "dependencies": { - "ms": "2.0.0" - } - }, - "node_modules/depd": { - "version": "1.1.2", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/destroy": { - "version": "1.0.4", - "license": "MIT" - }, - "node_modules/ee-first": { - "version": "1.1.1", - "license": "MIT" - }, - "node_modules/encodeurl": { - "version": "1.0.2", - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/escape-html": { - "version": "1.0.3", - "license": "MIT" - }, - "node_modules/etag": { - "version": "1.8.1", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/express": { - "version": "4.17.1", - "resolved": "https://registry.npmjs.org/express/-/express-4.17.1.tgz", - "integrity": "sha512-mHJ9O79RqluphRrcw2X/GTh3k9tVv8YcoyY4Kkh4WDMUYKRZUq0h1o0w2rrrxBqM7VoeUVqgb27xlEMXTnYt4g==", - "dependencies": { - "accepts": "~1.3.7", - "array-flatten": "1.1.1", - "body-parser": "1.19.0", - "content-disposition": "0.5.3", - "content-type": "~1.0.4", - "cookie": "0.4.0", - "cookie-signature": "1.0.6", - "debug": "2.6.9", - "depd": "~1.1.2", - "encodeurl": "~1.0.2", - "escape-html": "~1.0.3", - "etag": "~1.8.1", - "finalhandler": "~1.1.2", - "fresh": "0.5.2", - "merge-descriptors": "1.0.1", - "methods": "~1.1.2", - "on-finished": "~2.3.0", - "parseurl": "~1.3.3", - "path-to-regexp": "0.1.7", - "proxy-addr": "~2.0.5", - "qs": "6.7.0", - "range-parser": "~1.2.1", - "safe-buffer": "5.1.2", - "send": "0.17.1", - "serve-static": "1.14.1", - "setprototypeof": "1.1.1", - "statuses": "~1.5.0", - "type-is": "~1.6.18", - "utils-merge": "1.0.1", - "vary": "~1.1.2" - }, - "engines": { - "node": ">= 0.10.0" - } - }, - "node_modules/finalhandler": { - "version": "1.1.2", - "license": "MIT", - "dependencies": { - "debug": "2.6.9", - "encodeurl": "~1.0.2", - "escape-html": "~1.0.3", - "on-finished": "~2.3.0", - "parseurl": "~1.3.3", - "statuses": "~1.5.0", - "unpipe": "~1.0.0" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/forwarded": { - "version": "0.2.0", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/fresh": { - "version": "0.5.2", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/fs": { - "version": "0.0.2", - "resolved": "https://registry.npmjs.org/fs/-/fs-0.0.2.tgz", - "integrity": "sha1-4fJE7zkzwbKmS9R5kTYGDQ9ZFPg=" - }, - "node_modules/http-errors": { - "version": "1.7.2", - "license": "MIT", - "dependencies": { - "depd": "~1.1.2", - "inherits": "2.0.3", - "setprototypeof": "1.1.1", - "statuses": ">= 1.5.0 < 2", - "toidentifier": "1.0.0" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/iconv-lite": { - "version": "0.4.24", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", - "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", - "dependencies": { - "safer-buffer": ">= 2.1.2 < 3" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/inherits": { - "version": "2.0.3", - "license": "ISC" - }, - "node_modules/ipaddr.js": { - "version": "1.9.1", - "license": "MIT", - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/media-typer": { - "version": "0.3.0", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/merge-descriptors": { - "version": "1.0.1", - "license": "MIT" - }, - "node_modules/methods": { - "version": "1.1.2", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/mime": { - "version": "1.6.0", - "license": "MIT", - "bin": { - "mime": "cli.js" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/mime-db": { - "version": "1.51.0", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/mime-types": { - "version": "2.1.34", - "license": "MIT", - "dependencies": { - "mime-db": "1.51.0" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/minimist": { - "version": "0.0.10", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.10.tgz", - "integrity": "sha1-3j+YVD2/lggr5IrRoMfNqDYwHc8=" - }, - "node_modules/ms": { - "version": "2.0.0", - "license": "MIT" - }, - "node_modules/negotiator": { - "version": "0.6.2", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/node-static": { - "version": "0.7.11", - "resolved": "https://registry.npmjs.org/node-static/-/node-static-0.7.11.tgz", - "integrity": "sha512-zfWC/gICcqb74D9ndyvxZWaI1jzcoHmf4UTHWQchBNuNMxdBLJMDiUgZ1tjGLEIe/BMhj2DxKD8HOuc2062pDQ==", - "dependencies": { - "colors": ">=0.6.0", - "mime": "^1.2.9", - "optimist": ">=0.3.4" - }, - "bin": { - "static": "bin/cli.js" - }, - "engines": { - "node": ">= 0.4.1" - } - }, - "node_modules/on-finished": { - "version": "2.3.0", - "license": "MIT", - "dependencies": { - "ee-first": "1.1.1" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/optimist": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/optimist/-/optimist-0.6.1.tgz", - "integrity": "sha1-2j6nRob6IaGaERwybpDrFaAZZoY=", - "dependencies": { - "minimist": "~0.0.1", - "wordwrap": "~0.0.2" - } - }, - "node_modules/parseurl": { - "version": "1.3.3", - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/path": { - "version": "0.12.7", - "resolved": "https://registry.npmjs.org/path/-/path-0.12.7.tgz", - "integrity": "sha1-1NwqUGxM4hl+tIHr/NWzbAFAsQ8=", - "dependencies": { - "process": "^0.11.1", - "util": "^0.10.3" - } - }, - "node_modules/path-to-regexp": { - "version": "0.1.7", - "license": "MIT" - }, - "node_modules/process": { - "version": "0.11.10", - "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", - "integrity": "sha1-czIwDoQBYb2j5podHZGn1LwW8YI=", - "engines": { - "node": ">= 0.6.0" - } - }, - "node_modules/proxy-addr": { - "version": "2.0.7", - "license": "MIT", - "dependencies": { - "forwarded": "0.2.0", - "ipaddr.js": "1.9.1" - }, - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/qs": { - "version": "6.7.0", - "license": "BSD-3-Clause", - "engines": { - "node": ">=0.6" - } - }, - "node_modules/range-parser": { - "version": "1.2.1", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/raw-body": { - "version": "2.4.0", - "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.4.0.tgz", - "integrity": "sha512-4Oz8DUIwdvoa5qMJelxipzi/iJIi40O5cGV1wNYp5hvZP8ZN0T+jiNkL0QepXs+EsQ9XJ8ipEDoiH70ySUJP3Q==", - "dependencies": { - "bytes": "3.1.0", - "http-errors": "1.7.2", - "iconv-lite": "0.4.24", - "unpipe": "1.0.0" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/safe-buffer": { - "version": "5.1.2", - "license": "MIT" - }, - "node_modules/safer-buffer": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", - "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" - }, - "node_modules/send": { - "version": "0.17.1", - "license": "MIT", - "dependencies": { - "debug": "2.6.9", - "depd": "~1.1.2", - "destroy": "~1.0.4", - "encodeurl": "~1.0.2", - "escape-html": "~1.0.3", - "etag": "~1.8.1", - "fresh": "0.5.2", - "http-errors": "~1.7.2", - "mime": "1.6.0", - "ms": "2.1.1", - "on-finished": "~2.3.0", - "range-parser": "~1.2.1", - "statuses": "~1.5.0" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/send/node_modules/ms": { - "version": "2.1.1", - "license": "MIT" - }, - "node_modules/serve-static": { - "version": "1.14.1", - "license": "MIT", - "dependencies": { - "encodeurl": "~1.0.2", - "escape-html": "~1.0.3", - "parseurl": "~1.3.3", - "send": "0.17.1" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/setprototypeof": { - "version": "1.1.1", - "license": "ISC" - }, - "node_modules/statuses": { - "version": "1.5.0", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/toidentifier": { - "version": "1.0.0", - "license": "MIT", - "engines": { - "node": ">=0.6" - } - }, - "node_modules/type-is": { - "version": "1.6.18", - "license": "MIT", - "dependencies": { - "media-typer": "0.3.0", - "mime-types": "~2.1.24" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/unpipe": { - "version": "1.0.0", - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/util": { - "version": "0.10.4", - "resolved": "https://registry.npmjs.org/util/-/util-0.10.4.tgz", - "integrity": "sha512-0Pm9hTQ3se5ll1XihRic3FDIku70C+iHUdT/W926rSgHV5QgXsYbKZN8MSC3tJtSkhuROzvsQjAaFENRXr+19A==", - "dependencies": { - "inherits": "2.0.3" - } - }, - "node_modules/utils-merge": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", - "integrity": "sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM=", - "engines": { - "node": ">= 0.4.0" - } - }, - "node_modules/vary": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", - "integrity": "sha1-IpnwLG3tMNSllhsLn3RSShj2NPw=", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/wordwrap": { - "version": "0.0.3", - "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz", - "integrity": "sha1-o9XabNXAvAAI03I0u68b7WMFkQc=", - "engines": { - "node": ">=0.4.0" - } - } - }, - "dependencies": { - "accepts": { - "version": "1.3.7", - "requires": { - "mime-types": "~2.1.24", - "negotiator": "0.6.2" - } - }, - "array-flatten": { - "version": "1.1.1" - }, - "body-parser": { - "version": "1.19.0", - "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.19.0.tgz", - "integrity": "sha512-dhEPs72UPbDnAQJ9ZKMNTP6ptJaionhP5cBb541nXPlW60Jepo9RV/a4fX4XWW9CuFNK22krhrj1+rgzifNCsw==", - "requires": { - "bytes": "3.1.0", - "content-type": "~1.0.4", - "debug": "2.6.9", - "depd": "~1.1.2", - "http-errors": "1.7.2", - "iconv-lite": "0.4.24", - "on-finished": "~2.3.0", - "qs": "6.7.0", - "raw-body": "2.4.0", - "type-is": "~1.6.17" - } - }, - "bytes": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.0.tgz", - "integrity": "sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg==" - }, - "colors": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/colors/-/colors-1.4.0.tgz", - "integrity": "sha512-a+UqTh4kgZg/SlGvfbzDHpgRu7AAQOmmqRHJnxhRZICKFUT91brVhNNt58CMWU9PsBbv3PDCZUHbVxuDiH2mtA==" - }, - "content-disposition": { - "version": "0.5.3", - "requires": { - "safe-buffer": "5.1.2" - } - }, - "content-type": { - "version": "1.0.4" - }, - "cookie": { - "version": "0.4.0" - }, - "cookie-signature": { - "version": "1.0.6" - }, - "debug": { - "version": "2.6.9", - "requires": { - "ms": "2.0.0" - } - }, - "depd": { - "version": "1.1.2" - }, - "destroy": { - "version": "1.0.4" - }, - "ee-first": { - "version": "1.1.1" - }, - "encodeurl": { - "version": "1.0.2" - }, - "escape-html": { - "version": "1.0.3" - }, - "etag": { - "version": "1.8.1" - }, - "express": { - "version": "4.17.1", - "resolved": "https://registry.npmjs.org/express/-/express-4.17.1.tgz", - "integrity": "sha512-mHJ9O79RqluphRrcw2X/GTh3k9tVv8YcoyY4Kkh4WDMUYKRZUq0h1o0w2rrrxBqM7VoeUVqgb27xlEMXTnYt4g==", - "requires": { - "accepts": "~1.3.7", - "array-flatten": "1.1.1", - "body-parser": "1.19.0", - "content-disposition": "0.5.3", - "content-type": "~1.0.4", - "cookie": "0.4.0", - "cookie-signature": "1.0.6", - "debug": "2.6.9", - "depd": "~1.1.2", - "encodeurl": "~1.0.2", - "escape-html": "~1.0.3", - "etag": "~1.8.1", - "finalhandler": "~1.1.2", - "fresh": "0.5.2", - "merge-descriptors": "1.0.1", - "methods": "~1.1.2", - "on-finished": "~2.3.0", - "parseurl": "~1.3.3", - "path-to-regexp": "0.1.7", - "proxy-addr": "~2.0.5", - "qs": "6.7.0", - "range-parser": "~1.2.1", - "safe-buffer": "5.1.2", - "send": "0.17.1", - "serve-static": "1.14.1", - "setprototypeof": "1.1.1", - "statuses": "~1.5.0", - "type-is": "~1.6.18", - "utils-merge": "1.0.1", - "vary": "~1.1.2" - } - }, - "finalhandler": { - "version": "1.1.2", - "requires": { - "debug": "2.6.9", - "encodeurl": "~1.0.2", - "escape-html": "~1.0.3", - "on-finished": "~2.3.0", - "parseurl": "~1.3.3", - "statuses": "~1.5.0", - "unpipe": "~1.0.0" - } - }, - "forwarded": { - "version": "0.2.0" - }, - "fresh": { - "version": "0.5.2" - }, - "fs": { - "version": "0.0.2", - "resolved": "https://registry.npmjs.org/fs/-/fs-0.0.2.tgz", - "integrity": "sha1-4fJE7zkzwbKmS9R5kTYGDQ9ZFPg=" - }, - "http-errors": { - "version": "1.7.2", - "requires": { - "depd": "~1.1.2", - "inherits": "2.0.3", - "setprototypeof": "1.1.1", - "statuses": ">= 1.5.0 < 2", - "toidentifier": "1.0.0" - } - }, - "iconv-lite": { - "version": "0.4.24", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", - "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", - "requires": { - "safer-buffer": ">= 2.1.2 < 3" - } - }, - "inherits": { - "version": "2.0.3" - }, - "ipaddr.js": { - "version": "1.9.1" - }, - "media-typer": { - "version": "0.3.0" - }, - "merge-descriptors": { - "version": "1.0.1" - }, - "methods": { - "version": "1.1.2" - }, - "mime": { - "version": "1.6.0" - }, - "mime-db": { - "version": "1.51.0" - }, - "mime-types": { - "version": "2.1.34", - "requires": { - "mime-db": "1.51.0" - } - }, - "minimist": { - "version": "0.0.10", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.10.tgz", - "integrity": "sha1-3j+YVD2/lggr5IrRoMfNqDYwHc8=" - }, - "ms": { - "version": "2.0.0" - }, - "negotiator": { - "version": "0.6.2" - }, - "node-static": { - "version": "0.7.11", - "resolved": "https://registry.npmjs.org/node-static/-/node-static-0.7.11.tgz", - "integrity": "sha512-zfWC/gICcqb74D9ndyvxZWaI1jzcoHmf4UTHWQchBNuNMxdBLJMDiUgZ1tjGLEIe/BMhj2DxKD8HOuc2062pDQ==", - "requires": { - "colors": ">=0.6.0", - "mime": "^1.2.9", - "optimist": ">=0.3.4" - } - }, - "on-finished": { - "version": "2.3.0", - "requires": { - "ee-first": "1.1.1" - } - }, - "optimist": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/optimist/-/optimist-0.6.1.tgz", - "integrity": "sha1-2j6nRob6IaGaERwybpDrFaAZZoY=", - "requires": { - "minimist": "~0.0.1", - "wordwrap": "~0.0.2" - } - }, - "parseurl": { - "version": "1.3.3" - }, - "path": { - "version": "0.12.7", - "resolved": "https://registry.npmjs.org/path/-/path-0.12.7.tgz", - "integrity": "sha1-1NwqUGxM4hl+tIHr/NWzbAFAsQ8=", - "requires": { - "process": "^0.11.1", - "util": "^0.10.3" - } - }, - "path-to-regexp": { - "version": "0.1.7" - }, - "process": { - "version": "0.11.10", - "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", - "integrity": "sha1-czIwDoQBYb2j5podHZGn1LwW8YI=" - }, - "proxy-addr": { - "version": "2.0.7", - "requires": { - "forwarded": "0.2.0", - "ipaddr.js": "1.9.1" - } - }, - "qs": { - "version": "6.7.0" - }, - "range-parser": { - "version": "1.2.1" - }, - "raw-body": { - "version": "2.4.0", - "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.4.0.tgz", - "integrity": "sha512-4Oz8DUIwdvoa5qMJelxipzi/iJIi40O5cGV1wNYp5hvZP8ZN0T+jiNkL0QepXs+EsQ9XJ8ipEDoiH70ySUJP3Q==", - "requires": { - "bytes": "3.1.0", - "http-errors": "1.7.2", - "iconv-lite": "0.4.24", - "unpipe": "1.0.0" - } - }, - "safe-buffer": { - "version": "5.1.2" - }, - "safer-buffer": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", - "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" - }, - "send": { - "version": "0.17.1", - "requires": { - "debug": "2.6.9", - "depd": "~1.1.2", - "destroy": "~1.0.4", - "encodeurl": "~1.0.2", - "escape-html": "~1.0.3", - "etag": "~1.8.1", - "fresh": "0.5.2", - "http-errors": "~1.7.2", - "mime": "1.6.0", - "ms": "2.1.1", - "on-finished": "~2.3.0", - "range-parser": "~1.2.1", - "statuses": "~1.5.0" - }, - "dependencies": { - "ms": { - "version": "2.1.1" - } - } - }, - "serve-static": { - "version": "1.14.1", - "requires": { - "encodeurl": "~1.0.2", - "escape-html": "~1.0.3", - "parseurl": "~1.3.3", - "send": "0.17.1" - } - }, - "setprototypeof": { - "version": "1.1.1" - }, - "statuses": { - "version": "1.5.0" - }, - "toidentifier": { - "version": "1.0.0" - }, - "type-is": { - "version": "1.6.18", - "requires": { - "media-typer": "0.3.0", - "mime-types": "~2.1.24" - } - }, - "unpipe": { - "version": "1.0.0" - }, - "util": { - "version": "0.10.4", - "resolved": "https://registry.npmjs.org/util/-/util-0.10.4.tgz", - "integrity": "sha512-0Pm9hTQ3se5ll1XihRic3FDIku70C+iHUdT/W926rSgHV5QgXsYbKZN8MSC3tJtSkhuROzvsQjAaFENRXr+19A==", - "requires": { - "inherits": "2.0.3" - } - }, - "utils-merge": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", - "integrity": "sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM=" - }, - "vary": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", - "integrity": "sha1-IpnwLG3tMNSllhsLn3RSShj2NPw=" - }, - "wordwrap": { - "version": "0.0.3", - "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz", - "integrity": "sha1-o9XabNXAvAAI03I0u68b7WMFkQc=" - } - } -} diff --git a/solution/Deployment/environments/Node/package.json b/solution/Deployment/environments/Node/package.json deleted file mode 100644 index 4ee23d2b..00000000 --- a/solution/Deployment/environments/Node/package.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "name": "ads_gofast_configuration_app", - "version": "1.0.0", - "description": "ads_gofast_configuration_app", - "author": "john.rampono@microsoft.com", - "main": "server.js", - "scripts": { - "start": "node server.js" - }, - "dependencies": { - "express": "^4.16.1", - "node-static": "0.7.11", - "fs": "0.0.2", - "path": "0.12.7" - } - } \ No newline at end of file diff --git a/solution/Deployment/environments/Node/server.js b/solution/Deployment/environments/Node/server.js deleted file mode 100644 index eabee95e..00000000 --- a/solution/Deployment/environments/Node/server.js +++ /dev/null @@ -1,37 +0,0 @@ -var static = require('node-static'); -var http = require('http'); -// Import the path module -const path = require('path'); -var fs = require('fs'); - -var directory = path.resolve(__dirname + "/../"); -var file = new(static.Server)(directory, { cache: 1 }); - -http.createServer(function (req, res) { - - if (req.method === "GET") - { - file.serve(req, res); - } - else - { - var body = ''; - filePath = directory + '/development.json'; - req.on('data', function(data) { - body += data; - body = decodeURI(body); - body = JSON.parse(body); - body = JSON.stringify(body, null, 2); - //body = body.replace("\n", "\r\n"); - }); - - req.on('end', function (){ - fs.writeFile(filePath, body, function() { - res.end(); - }); - }); - } -}).listen(8080); - - - diff --git a/solution/Deployment/environments/development.json b/solution/Deployment/environments/development.json deleted file mode 100644 index b31918e9..00000000 --- a/solution/Deployment/environments/development.json +++ /dev/null @@ -1,197 +0,0 @@ -{ - "AdsOpts": { - "CI": { - "Enable": true, - "BuildFunctionApp": true, - "BuildWebApp": true, - "BuildAdsGoFastDatabase": true, - "BuildDataFactory": true - }, - "CD": { - "EnableDeploy": true, - "EnableConfigure": true, - "ResourceGroup": { - "Enable": true, - "Id": "/subscriptions/035a1364-f00d-48e2-b582-4fe125905ee3/resourceGroups/AdsTestNew", - "Subscription": "Jorampon Internal Consumption", - "Domain": "microsoft.com", - "TenantId": "72f988bf-86f1-41af-91ab-2d7cd011db47", - "Location": "australiaeast", - "Name": "AdsTestNew", - "AADUser": "jorampon@microsoft.com", - "Hash": "DummyValueToBeReplacedAtRuntime" - }, - "ServicePrincipals": { - "DeploymentSP": { - "Enable": true, - "Name": "AdsGoFastDeployer", - "ApplyNamePostFix": true - }, - "WebAppAuthenticationSP": { - "Enable": true, - "Name": "AdsGFWebAuthSP", - "ApplyNamePostFix": true, - "ClientId": "a6f5f6bf-25da-43c3-863e-df3588db7d56" - }, - "FunctionAppAuthenticationSP": { - "Enable": true, - "Name": "AdsGFFuncAppAuthSP", - "ApplyNamePostFix": true, - "ClientId": "584f2d03-a7e3-4fe8-9627-48f935697a68" - } - }, - "FolderPaths": { - "ArmTemplates": "./arm", - "Environments": "./../environments", - "PublishZip": "./../bin/publish/zipped", - "PublishUnZip": "./../bin/publish/unzipped" - }, - "AzureLoginOptions": { - "UseInteractiveAzCliLogin": true - }, - "EnviroInstalls": { - "PerformLocalInstalls": true, - "PerformLocalInstallsAzCli": false, - "PerformLocalInstallsAzCliAddToPath": true - }, - "ArmOptions": { - "PerformDeployment": false, - "PerformDeploymentStorageLogging": false, - "PerformDeploymentAppService": false - }, - "Services": { - "UseARMDefaults": false, - "AppInsights": { - "Enable": true, - "Name": "adsgfappin", - "ApplyNamePostFix": true - }, - "AppPlans": { - "WebApp": { - "Enable": true, - "Name": "adsgfappplanweb", - "ApplyNamePostFix": true, - "ResourceGroup": null - }, - "FunctionApp": { - "Enable": true, - "Name": "adsgfappplanfnc", - "ApplyNamePostFix": true, - "ResourceGroup": null - } - }, - "AzureSQLServer": { - "Enable": true, - "Name": "adsgfsvr", - "ApplyNamePostFix": true, - "AdminUser": "AdsAdmin", - "AdminPassword": "*********", - "AdsGoFastDB": { - "Enable": true, - "Name": "AdsGF", - "UpdateSourceAndTargetSystems": true, - "UpdateDataFactory": true, - "ApplyNamePostFix": false - }, - "StagingDB": { - "Enable": true, - "Name": "AdsGfStaging", - "ApplyNamePostFix": false - }, - "SampleDB": { - "Enable": true, - "Name": "AdsGfSample", - "ApplyNamePostFix": false - } - }, - "CoreFunctionApp": { - "Enable": true, - "Name": "adsgofastfunc", - "ApplyNamePostFix": true, - "PrincipalId": "" - }, - "DataFactory": { - "Enable": true, - "Name": "adsgfadf", - "ApplyNamePostFix": true, - "AzVnetIr": { - "Enable": true, - "Name": "IRA", - "Type": "ManagedVnet" - }, - "OnPremVnetIr": { - "Enable": false, - "Name": "IRB", - "Type": "SelfHosted", - "IrInstallConfig": { - "LocalDrive": "C:", - "LocalVMFolder": "ADFInstaller", - "IrDownloadURL": "https://download.microsoft.com/download/E/4/7/E4771905-1079-445B-8BF9-8A1A075D8A10/IntegrationRuntime_5.9.7900.1.msi", - "JDKDownloadURL": "https://github.com/adoptium/temurin11-binaries/releases/download/jdk-11.0.12%2B7/OpenJDK11U-jdk_x64_windows_hotspot_11.0.12_7.msi", - "JDKInstallFolder": "c:\\Program Files\\Eclipse Foundation\\" - } - } - }, - "WebSite": { - "Enable": true, - "Name": "adsgofastweb", - "ApplyNamePostFix": true, - "PrincipalId": "" - }, - "LogAnalytics": { - "Enable": true, - "Name": "adsgofastloganalytics", - "ApplyNamePostFix": true - }, - "KeyVault": { - "Enable": true, - "Name": "adsgfkv", - "ApplyNamePostFix": true - }, - "Vnet": { - "Enable": false, - "Name": "AdsGoFastVnet", - "ApplyNamePostFix": true, - "vNetAddressRange": "10.3.0.0/16", - "BastionSubnetAddressRange": "10.3.1.0/27", - "DataSubnetAddressRange": "10.3.2.0/27", - "WebAppSubnetAddressRange": "10.3.3.0/27", - "FuncAppSubnetAddressRange": "10.3.4.0/27", - "BastionSubnetName": "AzureBastionSubnet", - "DataSubnetName": "Data", - "WebAppSubnetName": "WebApp", - "FuncAppSubnetName": "FuncApp" - }, - "Storage": { - "Logging": { - "Name": "logstg", - "ApplyNamePostFix": true, - "Enable": true, - "Dummy": "" - }, - "ADLS": { - "Name": "adls", - "ApplyNamePostFix": true, - "Enable": true - }, - "Blob": { - "Name": "blob", - "ApplyNamePostFix": true, - "Enable": true, - "ResourceId": "" - }, - "ADLSTransient": { - "Name": "adlstran", - "ApplyNamePostFix": true, - "Enable": false - } - }, - "Bastion": { - "Name": "adsgfbastion", - "ApplyNamePostFix": true, - "Enable": true - } - } - } - } -} diff --git a/solution/Deployment/environments/environment.schema.json b/solution/Deployment/environments/environment.schema.json deleted file mode 100644 index e85d3890..00000000 --- a/solution/Deployment/environments/environment.schema.json +++ /dev/null @@ -1,2076 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema", - "$id": "http://adsgofast.com/environment.schema.json", - "type": "object", - "title": "Environment Configuration for the ADS Go Fast Framework", - "description": "Environment Configuration for the ADS Go Fast Framework.", - "default": {}, - "required": [ - "AdsOpts" - ], - "properties": { - "AdsOpts": { - "$id": "#/properties/AdsOpts", - "type": "object", - "title": "Ads Go Fast Settings", - "description": "Primary, top-level configuration element for CICD in this project.", - "default": {}, - "required": [ - "CI", - "CD" - ], - "properties": { - "CI": { - "$id": "#/properties/AdsOpts/properties/CI", - "type": "object", - "title": "Continuous Integration Settings", - "description": "//CI (Continuous Integration - Set this section to true if you want the source binaries to be built out. In ordinary operation these should be true. Switches provided for ease of debugging only)", - "default": { - "Enable": true, - "BuildFunctionApp": true, - "BuildWebApp": true, - "BuildAdsGoFastDatabase": true, - "BuildDataFactory": true - }, - "required": [ - "Enable", - "BuildFunctionApp", - "BuildWebApp", - "BuildAdsGoFastDatabase", - "BuildDataFactory" - ], - "properties": { - "Enable": { - "$id": "#/properties/AdsOpts/properties/CI/properties/Enable", - "type": "boolean", - "format": "checkbox", - "title": "Enable All", - "default": true, - "enum": [ - true, - false - ], - "options": { - "infoText": "Set this section to true if you want the source binaries to be built out. In ordinary operation these should be true. " - } - }, - "BuildFunctionApp": { - "$id": "#/properties/AdsOpts/properties/CI/properties/BuildFunctionApp", - "type": "boolean", - "format": "checkbox", - "title": "BuildFunctionApp", - "options": { - "infoText": "Set this section to true if you want the source binaries for the Function Application to be built out. In ordinary operation these should be true." - }, - "default": true, - "enum": [ - true, - false - ] - }, - "BuildWebApp": { - "$id": "#/properties/AdsOpts/properties/CI/properties/BuildWebApp", - "type": "boolean", - "format": "checkbox", - "title": "BuildWebApp", - "options": { - "infoText": "Set this section to true if you want the source binaries for the Web Appilication to be built out. In ordinary operation these should be true." - }, - "default": true, - "enum": [ - true, - false - ] - }, - "BuildAdsGoFastDatabase": { - "$id": "#/properties/AdsOpts/properties/CI/properties/BuildAdsGoFastDatabase", - "type": "boolean", - "format": "checkbox", - "title": "BuildAdsGoFastDatabase", - "options": { - "infoText": "Set this section to true if you want the source binaries for the AdsGoFastDatabase to be built out. In ordinary operation these should be true." - }, - "default": true, - "enum": [ - true, - false - ] - }, - "BuildDataFactory": { - "$id": "#/properties/AdsOpts/properties/CI/properties/BuildDataFactory", - "type": "boolean", - "format": "checkbox", - "title": "DataFactory", - "options": { - "infoText": "Set this section to true if you want the source binaries for the AdsGoFastDatabase to be built out. In ordinary operation these should be true." - }, - "default": true, - "enum": [ - true, - false - ] - } - }, - "additionalProperties": false - }, - "CD": { - "$id": "#/properties/AdsOpts/properties/CD", - "type": "object", - "title": "Continuous Deployment Settings", - "description": "This configuration element controls continous deployment specifics", - "default": {}, - "required": [ - "EnableDeploy", - "EnableConfigure", - "ServicePrincipals", - "ResourceGroup", - "FolderPaths", - "AzureLoginOptions", - "EnviroInstalls", - "ArmOptions", - "Services" - ], - "properties": { - "EnableDeploy": { - "$id": "#/properties/AdsOpts/properties/CD/properties/EnableDeploy", - "type": "boolean", - "format": "checkbox", - "title": "Enable/Disable Service Deployment", - "options": { - "infoText": "Set to false if you want no continous deployment operations to occur." - }, - "default": true, - "enum": [ - true, - false - ] - }, - "EnableConfigure": { - "$id": "#/properties/AdsOpts/properties/CD/properties/EnableConfigure", - "type": "boolean", - "format": "checkbox", - "title": "Enable/Disable Post Deployment Configuration", - "options": { - "infoText": "Set to false if you don't want post deployment configuration activities to occur." - }, - "default": true, - "enum": [ - true, - false - ] - }, - "ResourceGroup": { - "$id": "#/properties/AdsOpts/properties/CD/properties/ResourceGroup", - "type": "object", - "title": "Primary Resource Group Settings", - "options": { - "infoText": "" - }, - "default": {}, - "required": [ - "Id", - "Enable", - "Subscription", - "Domain", - "TenantId", - "Location", - "Name", - "AADUser", - "Hash" - ], - "properties": { - "Enable": { - "$id": "#/properties/AdsOpts/properties/CD/properties/ResourceGroup/properties/Enable", - "type": "boolean", - "format": "checkbox", - "title": "Enable Resource Group Deployment", - "options": { - "infoText": "Enable or Disable the Deployment of the Resource Group" - }, - "default": true, - "enum": [ - true, - false - ] - }, - "Id": { - "$id": "#/properties/AdsOpts/properties/CD/properties/ResourceGroup/properties/Id", - "type": "string", - "title": "Resource Group ID", - "options": { - "infoText": "", - "inputAttributes": { - "placeholder": "eg. /subscriptions/92f988bf-86f1-41af-91ab-2d7cd011db48/resourceGroups/AdsTestNew" - } - }, - "default": "", - "examples": [ - "/subscriptions/92f988bf-86f1-41af-91ab-2d7cd011db48/resourceGroups/AdsTestNew" - ] - }, - "Subscription": { - "$id": "#/properties/AdsOpts/properties/CD/properties/ResourceGroup/properties/Subscription", - "type": "string", - "title": "Subscription Name", - "options": { - "infoText": "name of the subscription which will host the deployment.", - "inputAttributes": { - "placeholder": "eg. Ads Go Fast Demo Subscription" - } - }, - "default": "", - "examples": [ - "Ads Go Fast Demo Subscription" - ] - }, - "Domain": { - "$id": "#/properties/AdsOpts/properties/CD/properties/ResourceGroup/properties/Domain", - "type": "string", - "title": "Domain ", - "options": { - "infoText": "Azure Active Directory Domain Name to use for the deployment.", - "inputAttributes": { - "placeholder": "eg. adventureworks.com" - } - }, - "default": "", - "examples": [ - "adventureworks.com" - ] - }, - "TenantId": { - "$id": "#/properties/AdsOpts/properties/CD/properties/ResourceGroup/properties/TenantId", - "type": "string", - "title": "TenantId", - "options": { - "infoText": "tenant Azure Subscription Tenant Id to be used for deployment", - "inputAttributes": { - "placeholder": "eg. 82f988bf-86f1-41af-91ab-2d7cd011db48" - } - }, - "default": "", - "examples": [ - "82f988bf-86f1-41af-91ab-2d7cd011db48" - ] - }, - "Location": { - "$id": "#/properties/AdsOpts/properties/CD/properties/ResourceGroup/properties/Location", - "type": "string", - "title": "Azure Region", - "options": { - "infoText": "Azure Region to deploy into.", - "inputAttributes": { - "placeholder": "eg. australiaeast" - } - }, - "default": "australiaeast", - "enum": [ - "australiaeast", - "australiasoutheast" - ] - }, - "Name": { - "$id": "#/properties/AdsOpts/properties/CD/properties/ResourceGroup/properties/Name", - "type": "string", - "title": "Resource Group Name", - "options": { - "infoText": "Name of the Resource Group to use for deployment.", - "inputAttributes": { - "placeholder": "eg. AdsTest" - } - } - }, - "AADUser": { - "$id": "#/properties/AdsOpts/properties/CD/properties/ResourceGroup/properties/AADUser", - "type": "string", - "title": "AADUser", - "options": { - "infoText": "", - "inputAttributes": { - "placeholder": "eg. admin@advetureworks.com" - } - }, - "default": "", - "examples": [ - "jorampon@microsoft.com" - ] - }, - "Hash": { - "$id": "#/properties/AdsOpts/properties/CD/properties/ResourceGroup/properties/Hash", - "type": "string", - "title": "Hash", - "description": "This is the used by the deployment scripts to store a hash value that is optionally postfixed to resource names to ensure uniqueness. Leave this item bank.", - "default": "", - "options": { - "inputAttributes": { - "placeholder": "LEAVE THIS BLANK" - } - }, - "examples": [ - "DummyValueToBeReplacedAtRuntime" - ] - } - }, - "additionalProperties": true - }, - "ServicePrincipals": { - "$id": "#/properties/AdsOpts/properties/CD/properties/ServicePrincipals", - "type": "object", - "title": "Service Principal Options", - "description": "Controls the specifics of the Service Principals and Azure App Registrations used.", - "default": { - "DeploymentSP": { - "Enable": true, - "Name": "AdsGFDeploySP", - "ApplyNamePostFix": true - }, - "WebAppAuthenticationSP": { - "Enable": true, - "Name": "AdsGFWebAuthSP", - "ApplyNamePostFix": true, - "ClientId": "#######" - }, - "FunctionAppAuthenticationSP": { - "Enable": true, - "Name": "AdsGFFuncAppAuthSP", - "ApplyNamePostFix": true, - "ClientId": "########" - } - }, - "options": { - "collapsed": true - }, - "properties": { - "DeploymentSP": { - "$id": "#/properties/AdsOpts/properties/CD/properties/ServicePrincipals/properties/DeploymentSP", - "type": "object", - "title": "DeploymentSP", - "description": "Set enable to true if you want the script to create the Deployment SP for you othwerise you need to provide the SP details in Secrets.json or if depoloying via GitHub Actions add in github secrets - https://github.com/Azure/actions-workflow-samples/blob/master/assets/create-secrets-for-GitHub-workflows.md#set-secret-with-azure-credentials", - "default": { - "Enable": true, - "Name": "AdsGFDeploySP", - "ApplyNamePostFix": true - }, - "required": [ - "Enable", - "Name", - "ApplyNamePostFix" - ], - "properties": { - "Enable": { - "$id": "#/properties/AdsOpts/properties/CD/properties/ServicePrincipals/properties/DeploymentSP/properties/Enable", - "type": "boolean", - "format": "checkbox", - "title": "Enable Deployment", - "description": "Set enable to true if you want the script to create the Deployment SP for you othwerise you need to provide the SP details in Secrets.json or if depoloying via GitHub Actions add in github secrets - https://github.com/Azure/actions-workflow-samples/blob/master/assets/create-secrets-for-GitHub-workflows.md#set-secret-with-azure-credentials", - "default": true, - "examples": [ - true, - false - ] - }, - "Name": { - "$id": "#/properties/AdsOpts/properties/CD/properties/ServicePrincipals/properties/DeploymentSP/properties/Name", - "type": "string", - "title": "Name", - "description": "Name of the Service Principal", - "default": "", - "examples": [ - "AdsGFDeploySP" - ] - }, - "ApplyNamePostFix": { - "$id": "#/properties/AdsOpts/properties/CD/properties/ServicePrincipals/properties/DeploymentSP/properties/ApplyNamePostFix", - "type": "boolean", - "format": "checkbox", - "title": "Apply Postfix to Resource Name", - "description": "If true then the script will add a hash based on the resource group name as a postfix to the objects name", - "default": true, - "examples": [ - true, - false - ] - } - }, - "additionalProperties": true - }, - "WebAppAuthenticationSP": { - "$id": "#/properties/AdsOpts/properties/CD/properties/ServicePrincipals/properties/WebAppAuthenticationSP", - "type": "object", - "title": "WebAppAuthenticationSP", - "description": "", - "default": { - "Enable": true, - "Name": "AdsGFWebAuthSP", - "ApplyNamePostFix": true, - "ClientId": "###################" - }, - "required": [ - "Enable", - "Name", - "ApplyNamePostFix", - "ClientId" - ], - "properties": { - "Enable": { - "$id": "#/properties/AdsOpts/properties/CD/properties/ServicePrincipals/properties/WebAppAuthenticationSP/properties/Enable", - "type": "boolean", - "format": "checkbox", - "title": "Enable Deployment", - "description": "Set to true if you want the script to create the App Registration Used by the Web App to validate AAD authentication requests.", - "default": true, - "examples": [ - true, - false - ] - }, - "Name": { - "$id": "#/properties/AdsOpts/properties/CD/properties/ServicePrincipals/properties/WebAppAuthenticationSP/properties/Name", - "type": "string", - "title": "Name", - "description": "Name of the App Registration", - "default": "AdsGFWebAuthSP", - "examples": [ - "AdsGFWebAuthSP" - ] - }, - "ApplyNamePostFix": { - "$id": "#/properties/AdsOpts/properties/CD/properties/ServicePrincipals/properties/WebAppAuthenticationSP/properties/ApplyNamePostFix", - "type": "boolean", - "format": "checkbox", - "title": "Apply Postfix to Resource Name", - "description": "If true then the script will add a hash based on the resource group name as a postfix to the objects name", - "default": true, - "examples": [ - true, - false - ] - }, - "ClientId": { - "$id": "#/properties/AdsOpts/properties/CD/properties/ServicePrincipals/properties/WebAppAuthenticationSP/properties/ClientId", - "type": "string", - "title": "ClientId", - "description": "client id of the App Registration. If you have enable set to true then ignore this as your script will autopopulate once it creates the App Reg", - "default": "", - "examples": [ - "904a7841-c19d-4c38-bb7e-ccd78e49a55b" - ] - } - }, - "additionalProperties": false - }, - "FunctionAppAuthenticationSP": { - "$id": "#/properties/AdsOpts/properties/CD/properties/ServicePrincipals/properties/FunctionAppAuthenticationSP", - "type": "object", - "title": "FunctionAppAuthenticationSP", - "description": "", - "default": {}, - "examples": [ - { - "Enable": true, - "Name": "AdsGFFuncAppAuthSP", - "ApplyNamePostFix": true, - "ClientId": "835b082c-6a63-4fcc-b5d8-005dba169819" - } - ], - "required": [ - "Enable", - "Name", - "ApplyNamePostFix", - "ClientId" - ], - "properties": { - "Enable": { - "$id": "#/properties/AdsOpts/properties/CD/properties/ServicePrincipals/properties/FunctionAppAuthenticationSP/properties/Enable", - "type": "boolean", - "format": "checkbox", - "title": "Enable Deployment", - "description": "", - "default": false, - "examples": [ - true - ] - }, - "Name": { - "$id": "#/properties/AdsOpts/properties/CD/properties/ServicePrincipals/properties/FunctionAppAuthenticationSP/properties/Name", - "type": "string", - "title": "Name", - "description": "", - "default": "", - "examples": [ - "AdsGFFuncAppAuthSP" - ] - }, - "ApplyNamePostFix": { - "$id": "#/properties/AdsOpts/properties/CD/properties/ServicePrincipals/properties/FunctionAppAuthenticationSP/properties/ApplyNamePostFix", - "type": "boolean", - "format": "checkbox", - "title": "Apply Postfix to Resource Name", - "description": "", - "default": false, - "examples": [ - true - ] - }, - "ClientId": { - "$id": "#/properties/AdsOpts/properties/CD/properties/ServicePrincipals/properties/FunctionAppAuthenticationSP/properties/ClientId", - "type": "string", - "title": "ClientId", - "description": "", - "default": "", - "examples": [ - "835b082c-6a63-4fcc-b5d8-005dba169819" - ] - } - }, - "additionalProperties": true - } - }, - "additionalProperties": true - }, - "FolderPaths": { - "$id": "#/properties/AdsOpts/properties/CD/properties/FolderPaths", - "type": "object", - "title": "Folder Path Options", - "description": "Controls the various folder addresses that the deployment scripts use.", - "default": {}, - "options": { - "collapsed": true - }, - "examples": [ - { - "ArmTemplates": "./arm", - "Environments": "./../environments", - "PublishZip": "./../bin/publish/zipped", - "PublishUnZip": "./../bin/publish/unzipped" - } - ], - "required": [ - "ArmTemplates", - "Environments", - "PublishZip", - "PublishUnZip" - ], - "properties": { - "ArmTemplates": { - "$id": "#/properties/AdsOpts/properties/CD/properties/FolderPaths/properties/ArmTemplates", - "type": "string", - "title": "ArmTemplates", - "description": "", - "default": "", - "examples": [ - "./arm" - ] - }, - "Environments": { - "$id": "#/properties/AdsOpts/properties/CD/properties/FolderPaths/properties/Environments", - "type": "string", - "title": "Environments", - "description": "", - "default": "", - "examples": [ - "./../environments" - ] - }, - "PublishZip": { - "$id": "#/properties/AdsOpts/properties/CD/properties/FolderPaths/properties/PublishZip", - "type": "string", - "title": "PublishZip", - "description": "", - "default": "", - "examples": [ - "./../bin/publish/zipped" - ] - }, - "PublishUnZip": { - "$id": "#/properties/AdsOpts/properties/CD/properties/FolderPaths/properties/PublishUnZip", - "type": "string", - "title": "PublishUnZip", - "description": "", - "default": "", - "examples": [ - "./../bin/publish/unzipped" - ] - } - }, - "additionalProperties": true - }, - "AzureLoginOptions": { - "$id": "#/properties/AdsOpts/properties/CD/properties/AzureLoginOptions", - "type": "object", - "title": "AzureLoginOptions", - "description": "Azure Login Options.", - "default": {}, - "options": { - "collapsed": true - }, - "examples": [ - { - "UseInteractiveAzCliLogin": true - } - ], - "required": [ - "UseInteractiveAzCliLogin" - ], - "properties": { - "UseInteractiveAzCliLogin": { - "$id": "#/properties/AdsOpts/properties/CD/properties/AzureLoginOptions/properties/UseInteractiveAzCliLogin", - "type": "boolean", - "format": "checkbox", - "title": "UseInteractiveAzCliLogin", - "description": "", - "default": false, - "examples": [ - true - ] - } - }, - "additionalProperties": true - }, - "EnviroInstalls": { - "$id": "#/properties/AdsOpts/properties/CD/properties/EnviroInstalls", - "type": "object", - "title": "Control Local Environment Installs", - "description": "", - "default": {}, - "options": { - "collapsed": true - }, - "examples": [ - { - "PerformLocalInstalls": true, - "PerformLocalInstallsAzCli": false, - "PerformLocalInstallsAzCliAddToPath": true - } - ], - "required": [ - "PerformLocalInstalls", - "PerformLocalInstallsAzCli", - "PerformLocalInstallsAzCliAddToPath" - ], - "properties": { - "PerformLocalInstalls": { - "$id": "#/properties/AdsOpts/properties/CD/properties/EnviroInstalls/properties/PerformLocalInstalls", - "type": "boolean", - "format": "checkbox", - "title": "PerformLocalInstalls", - "description": "", - "default": false, - "examples": [ - true - ] - }, - "PerformLocalInstallsAzCli": { - "$id": "#/properties/AdsOpts/properties/CD/properties/EnviroInstalls/properties/PerformLocalInstallsAzCli", - "type": "boolean", - "format": "checkbox", - "title": "PerformLocalInstallsAzCli", - "description": "", - "default": false, - "examples": [ - false - ] - }, - "PerformLocalInstallsAzCliAddToPath": { - "$id": "#/properties/AdsOpts/properties/CD/properties/EnviroInstalls/properties/PerformLocalInstallsAzCliAddToPath", - "type": "boolean", - "format": "checkbox", - "title": "PerformLocalInstallsAzCliAddToPath", - "description": "", - "default": false, - "examples": [ - true - ] - } - }, - "additionalProperties": true - }, - "ArmOptions": { - "$id": "#/properties/AdsOpts/properties/CD/properties/ArmOptions", - "type": "object", - "title": "Arm Deployment Options", - "description": "", - "default": {}, - "options": { - "collapsed": true - }, - "examples": [ - { - "PerformDeployment": false, - "PerformDeploymentStorageLogging": false, - "PerformDeploymentAppService": false - } - ], - "required": [ - "PerformDeployment", - "PerformDeploymentStorageLogging", - "PerformDeploymentAppService" - ], - "properties": { - "PerformDeployment": { - "$id": "#/properties/AdsOpts/properties/CD/properties/ArmOptions/properties/PerformDeployment", - "type": "boolean", - "format": "checkbox", - "title": "PerformDeployment", - "description": "", - "default": false, - "examples": [ - false - ] - }, - "PerformDeploymentStorageLogging": { - "$id": "#/properties/AdsOpts/properties/CD/properties/ArmOptions/properties/PerformDeploymentStorageLogging", - "type": "boolean", - "format": "checkbox", - "title": "PerformDeploymentStorageLogging", - "description": "", - "default": false, - "examples": [ - false - ] - }, - "PerformDeploymentAppService": { - "$id": "#/properties/AdsOpts/properties/CD/properties/ArmOptions/properties/PerformDeploymentAppService", - "type": "boolean", - "format": "checkbox", - "title": "PerformDeploymentAppService", - "description": "", - "default": false, - "examples": [ - false - ] - } - }, - "additionalProperties": true - }, - "Services": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services", - "type": "object", - "title": "Azure Service Options", - "description": "This configuration element controls the specifics of the Azure services deployed.", - "default": {}, - "options": { - "collapsed": true - }, - "required": [ - "UseARMDefaults", - "AppInsights", - "AppPlans", - "AzureSQLServer", - "CoreFunctionApp", - "DataFactory", - "WebSite", - "LogAnalytics", - "KeyVault", - "Vnet", - "Storage" - ], - "properties": { - "UseARMDefaults": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/UseARMDefaults", - "type": "boolean", - "format": "checkbox", - "title": "UseARMDefaults", - "description": "", - "default": false, - "examples": [ - false - ] - }, - "AppInsights": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/AppInsights", - "type": "object", - "title": "AppInsights", - "description": "", - "default": {}, - "examples": [ - { - "Enable": true, - "Name": "adsgfappin", - "ApplyNamePostFix": true - } - ], - "required": [ - "Enable", - "Name", - "ApplyNamePostFix" - ], - "properties": { - "Enable": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/AppInsights/properties/Enable", - "type": "boolean", - "format": "checkbox", - "title": "Enable Deployment", - "description": "", - "default": false, - "examples": [ - true - ] - }, - "Name": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/AppInsights/properties/Name", - "type": "string", - "title": "Name", - "description": "", - "default": "", - "examples": [ - "adsgfappin" - ] - }, - "ApplyNamePostFix": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/AppInsights/properties/ApplyNamePostFix", - "type": "boolean", - "format": "checkbox", - "title": "Apply Postfix to Resource Name", - "description": "", - "default": false, - "examples": [ - true - ] - } - }, - "additionalProperties": true - }, - "AppPlans": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/AppPlans", - "type": "object", - "title": "AppPlans", - "description": "", - "default": {}, - "examples": [ - { - "WebApp": { - "Enable": true, - "Name": "adsgfappplanweb", - "ApplyNamePostFix": true, - "ResourceGroup": null - }, - "FunctionApp": { - "Enable": true, - "Name": "adsgfappplanfnc", - "ApplyNamePostFix": true, - "ResourceGroup": null - } - } - ], - "required": [ - "WebApp", - "FunctionApp" - ], - "properties": { - "WebApp": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/AppPlans/properties/WebApp", - "type": "object", - "title": "WebApp", - "description": "", - "default": {}, - "examples": [ - { - "Enable": true, - "Name": "adsgfappplanweb", - "ApplyNamePostFix": true, - "ResourceGroup": null - } - ], - "required": [ - "Enable", - "Name", - "ApplyNamePostFix", - "ResourceGroup" - ], - "properties": { - "Enable": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/AppPlans/properties/WebApp/properties/Enable", - "type": "boolean", - "format": "checkbox", - "title": "Enable Deployment", - "description": "", - "default": false, - "examples": [ - true - ] - }, - "Name": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/AppPlans/properties/WebApp/properties/Name", - "type": "string", - "title": "Name", - "description": "", - "default": "", - "examples": [ - "adsgfappplanweb" - ] - }, - "ApplyNamePostFix": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/AppPlans/properties/WebApp/properties/ApplyNamePostFix", - "type": "boolean", - "format": "checkbox", - "title": "Apply Postfix to Resource Name", - "description": "", - "default": false, - "examples": [ - true - ] - }, - "ResourceGroup": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/AppPlans/properties/WebApp/properties/ResourceGroup", - "type": "null", - "title": "ResourceGroup", - "description": "", - "default": null, - "examples": [ - null - ] - } - }, - "additionalProperties": true - }, - "FunctionApp": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/AppPlans/properties/FunctionApp", - "type": "object", - "title": "FunctionApp", - "description": "", - "default": {}, - "examples": [ - { - "Enable": true, - "Name": "adsgfappplanfnc", - "ApplyNamePostFix": true, - "ResourceGroup": null - } - ], - "required": [ - "Enable", - "Name", - "ApplyNamePostFix", - "ResourceGroup" - ], - "properties": { - "Enable": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/AppPlans/properties/FunctionApp/properties/Enable", - "type": "boolean", - "format": "checkbox", - "title": "Enable Deployment", - "description": "", - "default": false, - "examples": [ - true - ] - }, - "Name": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/AppPlans/properties/FunctionApp/properties/Name", - "type": "string", - "title": "Name", - "description": "", - "default": "", - "examples": [ - "adsgfappplanfnc" - ] - }, - "ApplyNamePostFix": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/AppPlans/properties/FunctionApp/properties/ApplyNamePostFix", - "type": "boolean", - "format": "checkbox", - "title": "Apply Postfix to Resource Name", - "description": "", - "default": false, - "examples": [ - true - ] - }, - "ResourceGroup": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/AppPlans/properties/FunctionApp/properties/ResourceGroup", - "type": "null", - "title": "ResourceGroup", - "description": "", - "default": null, - "examples": [ - null - ] - } - }, - "additionalProperties": true - } - }, - "additionalProperties": true - }, - "AzureSQLServer": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/AzureSQLServer", - "type": "object", - "title": "AzureSQLServer", - "description": "", - "default": {}, - "examples": [ - { - "Enable": true, - "Name": "adsgfazsql", - "ApplyNamePostFix": true, - "AdminUser": "AdsAdmin", - "AdminPassword": "EPuiX2K0^T4t", - "AdsGoFastDB": { - "Enable": true, - "Name": "AdsGf", - "UpdateSourceAndTargetSystems": true, - "UpdateDataFactory": true, - "ApplyNamePostFix": false - }, - "StagingDB": { - "Enable": true, - "Name": "AdsGfStaging", - "ApplyNamePostFix": false - }, - "SampleDB": { - "Enable": true, - "Name": "AdsGfSample", - "ApplyNamePostFix": false - } - } - ], - "required": [ - "Enable", - "Name", - "ApplyNamePostFix", - "AdminUser", - "AdminPassword", - "AdsGoFastDB", - "StagingDB", - "SampleDB" - ], - "properties": { - "Enable": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/AzureSQLServer/properties/Enable", - "type": "boolean", - "format": "checkbox", - "title": "Enable Deployment", - "description": "", - "default": false, - "examples": [ - true - ] - }, - "Name": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/AzureSQLServer/properties/Name", - "type": "string", - "title": "Name", - "description": "", - "default": "", - "examples": [ - "adsgfazsql" - ] - }, - "ApplyNamePostFix": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/AzureSQLServer/properties/ApplyNamePostFix", - "type": "boolean", - "format": "checkbox", - "title": "Apply Postfix to Resource Name", - "description": "", - "default": false, - "examples": [ - true - ] - }, - "AdminUser": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/AzureSQLServer/properties/AdminUser", - "type": "string", - "title": "AdminUser", - "description": "", - "default": "", - "examples": [ - "AdsAdmin" - ] - }, - "AdminPassword": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/AzureSQLServer/properties/AdminPassword", - "type": "string", - "title": "AdminPassword", - "description": "", - "default": "", - "examples": [ - "EPuiX2K0^T4t" - ] - }, - "AdsGoFastDB": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/AzureSQLServer/properties/AdsGoFastDB", - "type": "object", - "title": "AdsGoFastDB", - "description": "", - "default": {}, - "examples": [ - { - "Enable": true, - "Name": "AdsGf", - "UpdateSourceAndTargetSystems": true, - "UpdateDataFactory": true, - "ApplyNamePostFix": false - } - ], - "required": [ - "Enable", - "Name", - "UpdateSourceAndTargetSystems", - "UpdateDataFactory", - "ApplyNamePostFix" - ], - "properties": { - "Enable": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/AzureSQLServer/properties/AdsGoFastDB/properties/Enable", - "type": "boolean", - "format": "checkbox", - "title": "Enable Deployment", - "description": "", - "default": false, - "examples": [ - true - ] - }, - "Name": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/AzureSQLServer/properties/AdsGoFastDB/properties/Name", - "type": "string", - "title": "Name", - "description": "", - "default": "", - "examples": [ - "AdsGf" - ] - }, - "UpdateSourceAndTargetSystems": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/AzureSQLServer/properties/AdsGoFastDB/properties/UpdateSourceAndTargetSystems", - "type": "boolean", - "format": "checkbox", - "title": "UpdateSourceAndTargetSystems", - "description": "", - "default": false, - "examples": [ - true - ] - }, - "UpdateDataFactory": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/AzureSQLServer/properties/AdsGoFastDB/properties/UpdateDataFactory", - "type": "boolean", - "format": "checkbox", - "title": "UpdateDataFactory", - "description": "", - "default": false, - "examples": [ - true - ] - }, - "ApplyNamePostFix": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/AzureSQLServer/properties/AdsGoFastDB/properties/ApplyNamePostFix", - "type": "boolean", - "format": "checkbox", - "title": "Apply Postfix to Resource Name", - "description": "", - "default": false, - "examples": [ - false - ] - } - }, - "additionalProperties": true - }, - "StagingDB": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/AzureSQLServer/properties/StagingDB", - "type": "object", - "title": "StagingDB", - "description": "", - "default": {}, - "examples": [ - { - "Enable": true, - "Name": "AdsGfStaging", - "ApplyNamePostFix": false - } - ], - "required": [ - "Enable", - "Name", - "ApplyNamePostFix" - ], - "properties": { - "Enable": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/AzureSQLServer/properties/StagingDB/properties/Enable", - "type": "boolean", - "format": "checkbox", - "title": "Enable Deployment", - "description": "", - "default": false, - "examples": [ - true - ] - }, - "Name": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/AzureSQLServer/properties/StagingDB/properties/Name", - "type": "string", - "title": "Name", - "description": "", - "default": "", - "examples": [ - "AdsGfStaging" - ] - }, - "ApplyNamePostFix": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/AzureSQLServer/properties/StagingDB/properties/ApplyNamePostFix", - "type": "boolean", - "format": "checkbox", - "title": "Apply Postfix to Resource Name", - "description": "", - "default": false, - "examples": [ - false - ] - } - }, - "additionalProperties": true - }, - "SampleDB": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/AzureSQLServer/properties/SampleDB", - "type": "object", - "title": "SampleDB", - "description": "", - "default": {}, - "examples": [ - { - "Enable": true, - "Name": "AdsGfSample", - "ApplyNamePostFix": false - } - ], - "required": [ - "Enable", - "Name", - "ApplyNamePostFix" - ], - "properties": { - "Enable": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/AzureSQLServer/properties/SampleDB/properties/Enable", - "type": "boolean", - "format": "checkbox", - "title": "Enable Deployment", - "description": "", - "default": false, - "examples": [ - true - ] - }, - "Name": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/AzureSQLServer/properties/SampleDB/properties/Name", - "type": "string", - "title": "Name", - "description": "", - "default": "", - "examples": [ - "AdsGfSample" - ] - }, - "ApplyNamePostFix": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/AzureSQLServer/properties/SampleDB/properties/ApplyNamePostFix", - "type": "boolean", - "format": "checkbox", - "title": "Apply Postfix to Resource Name", - "description": "", - "default": false, - "examples": [ - false - ] - } - }, - "additionalProperties": true - } - }, - "additionalProperties": true - }, - "CoreFunctionApp": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/CoreFunctionApp", - "type": "object", - "title": "CoreFunctionApp", - "description": "", - "default": {}, - "examples": [ - { - "Enable": true, - "Name": "adsgofastfunc", - "ApplyNamePostFix": true, - "PrincipalId": "" - } - ], - "required": [ - "Enable", - "Name", - "ApplyNamePostFix", - "PrincipalId" - ], - "properties": { - "Enable": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/CoreFunctionApp/properties/Enable", - "type": "boolean", - "format": "checkbox", - "title": "Enable Deployment", - "description": "", - "default": false, - "examples": [ - true - ] - }, - "Name": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/CoreFunctionApp/properties/Name", - "type": "string", - "title": "Name", - "description": "", - "default": "", - "examples": [ - "adsgofastfunc" - ] - }, - "ApplyNamePostFix": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/CoreFunctionApp/properties/ApplyNamePostFix", - "type": "boolean", - "format": "checkbox", - "title": "Apply Postfix to Resource Name", - "description": "", - "default": false, - "examples": [ - true - ] - }, - "PrincipalId": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/CoreFunctionApp/properties/PrincipalId", - "type": "string", - "title": "PrincipalId", - "description": "", - "default": "", - "examples": [ - "" - ] - } - }, - "additionalProperties": true - }, - "DataFactory": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/DataFactory", - "type": "object", - "title": "DataFactory", - "description": "", - "default": {}, - "examples": [ - { - "Enable": true, - "Name": "adsgfadf", - "ApplyNamePostFix": true, - "AzVnetIr": { - "Enable": true, - "Name": "SelfHostedIntegrationRuntime-Azure-VNET", - "Type": "ManagedVnet" - }, - "OnPremVnetIr": { - "Enable": false, - "Name": "SelfHostedIntegrationRuntime-OnPem-Net", - "Type": "ManagedVnet" - } - } - ], - "required": [ - "Enable", - "Name", - "ApplyNamePostFix", - "AzVnetIr", - "OnPremVnetIr" - ], - "properties": { - "Enable": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/DataFactory/properties/Enable", - "type": "boolean", - "format": "checkbox", - "title": "Enable Deployment", - "description": "", - "default": false, - "examples": [ - true - ] - }, - "Name": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/DataFactory/properties/Name", - "type": "string", - "title": "Name", - "description": "", - "default": "", - "examples": [ - "adsgfadf" - ] - }, - "ApplyNamePostFix": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/DataFactory/properties/ApplyNamePostFix", - "type": "boolean", - "format": "checkbox", - "title": "Apply Postfix to Resource Name", - "description": "", - "default": false, - "examples": [ - true - ] - }, - "AzVnetIr": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/DataFactory/properties/AzVnetIr", - "type": "object", - "title": "AzVnetIr", - "description": "", - "default": {}, - "examples": [ - { - "Enable": true, - "Name": "SelfHostedIntegrationRuntime-Azure-VNET", - "Type": "ManagedVnet" - } - ], - "required": [ - "Enable", - "Name", - "Type" - ], - "properties": { - "Enable": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/DataFactory/properties/AzVnetIr/properties/Enable", - "type": "boolean", - "format": "checkbox", - "title": "Enable Deployment", - "description": "", - "default": false, - "examples": [ - true - ] - }, - "Name": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/DataFactory/properties/AzVnetIr/properties/Name", - "type": "string", - "title": "Name", - "description": "", - "default": "", - "examples": [ - "SelfHostedIntegrationRuntime-Azure-VNET" - ] - }, - "Type": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/DataFactory/properties/AzVnetIr/properties/Type", - "type": "string", - "title": "Type", - "description": "", - "default": "", - "examples": [ - "ManagedVnet" - ] - } - }, - "additionalProperties": true - }, - "OnPremVnetIr": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/DataFactory/properties/OnPremVnetIr", - "type": "object", - "title": "OnPremVnetIr", - "description": "", - "default": {}, - "examples": [ - { - "Enable": false, - "Name": "SelfHostedIntegrationRuntime-OnPem-Net", - "Type": "ManagedVnet" - } - ], - "required": [ - "Enable", - "Name", - "Type" - ], - "properties": { - "Enable": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/DataFactory/properties/OnPremVnetIr/properties/Enable", - "type": "boolean", - "format": "checkbox", - "title": "Enable Deployment", - "description": "", - "default": false, - "examples": [ - false - ] - }, - "Name": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/DataFactory/properties/OnPremVnetIr/properties/Name", - "type": "string", - "title": "Name", - "description": "", - "default": "", - "examples": [ - "SelfHostedIntegrationRuntime-OnPem-Net" - ] - }, - "Type": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/DataFactory/properties/OnPremVnetIr/properties/Type", - "type": "string", - "title": "Type", - "description": "", - "default": "", - "examples": [ - "ManagedVnet" - ] - } - }, - "additionalProperties": true - } - }, - "additionalProperties": true - }, - "WebSite": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/WebSite", - "type": "object", - "title": "WebSite", - "description": "", - "default": {}, - "examples": [ - { - "Enable": true, - "Name": "adsgofastweb", - "ApplyNamePostFix": true, - "PrincipalId": "" - } - ], - "required": [ - "Enable", - "Name", - "ApplyNamePostFix", - "PrincipalId" - ], - "properties": { - "Enable": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/WebSite/properties/Enable", - "type": "boolean", - "format": "checkbox", - "title": "Enable Deployment", - "description": "", - "default": false, - "examples": [ - true - ] - }, - "Name": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/WebSite/properties/Name", - "type": "string", - "title": "Name", - "description": "", - "default": "", - "examples": [ - "adsgofastweb" - ] - }, - "ApplyNamePostFix": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/WebSite/properties/ApplyNamePostFix", - "type": "boolean", - "format": "checkbox", - "title": "Apply Postfix to Resource Name", - "description": "", - "default": false, - "examples": [ - true - ] - }, - "PrincipalId": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/WebSite/properties/PrincipalId", - "type": "string", - "title": "PrincipalId", - "description": "", - "default": "", - "examples": [ - "" - ] - } - }, - "additionalProperties": true - }, - "LogAnalytics": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/LogAnalytics", - "type": "object", - "title": "LogAnalytics", - "description": "", - "default": {}, - "examples": [ - { - "Enable": true, - "Name": "adsgofastloganalytics", - "ApplyNamePostFix": false - } - ], - "required": [ - "Enable", - "Name", - "ApplyNamePostFix" - ], - "properties": { - "Enable": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/LogAnalytics/properties/Enable", - "type": "boolean", - "format": "checkbox", - "title": "Enable Deployment", - "description": "", - "default": false, - "examples": [ - true - ] - }, - "Name": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/LogAnalytics/properties/Name", - "type": "string", - "title": "Name", - "description": "", - "default": "", - "examples": [ - "adsgofastloganalytics" - ] - }, - "ApplyNamePostFix": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/LogAnalytics/properties/ApplyNamePostFix", - "type": "boolean", - "format": "checkbox", - "title": "Apply Postfix to Resource Name", - "description": "", - "default": false, - "examples": [ - false - ] - } - }, - "additionalProperties": true - }, - "KeyVault": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/KeyVault", - "type": "object", - "title": "KeyVault", - "description": "", - "default": {}, - "examples": [ - { - "Enable": true, - "Name": "adsgfkv", - "ApplyNamePostFix": true - } - ], - "required": [ - "Enable", - "Name", - "ApplyNamePostFix" - ], - "properties": { - "Enable": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/KeyVault/properties/Enable", - "type": "boolean", - "format": "checkbox", - "title": "Enable Deployment", - "description": "", - "default": false, - "examples": [ - true - ] - }, - "Name": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/KeyVault/properties/Name", - "type": "string", - "title": "Name", - "description": "", - "default": "", - "examples": [ - "adsgfkv" - ] - }, - "ApplyNamePostFix": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/KeyVault/properties/ApplyNamePostFix", - "type": "boolean", - "format": "checkbox", - "title": "Apply Postfix to Resource Name", - "description": "", - "default": false, - "examples": [ - true - ] - } - }, - "additionalProperties": true - }, - "Vnet": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/Vnet", - "type": "object", - "title": "Vnet", - "description": "", - "default": {}, - "examples": [ - { - "Enable": true, - "Name": "AdsGoFastVnet" - } - ], - "required": [ - "Enable", - "Name" - ], - "properties": { - "Enable": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/Vnet/properties/Enable", - "type": "boolean", - "format": "checkbox", - "title": "Enable Deployment", - "description": "", - "default": false, - "examples": [ - true - ] - }, - "Name": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/Vnet/properties/Name", - "type": "string", - "title": "Name", - "description": "", - "default": "", - "options": { - "inputAttributes": { - "placeholder": "eg. AdsGoFastVnet" - } - }, - "examples": [ - "AdsGoFastVnet" - ] - }, - "ApplyNamePostFix": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/Vnet/properties/Logging/properties/ApplyNamePostFix", - "type": "boolean", - "format": "checkbox", - "title": "Apply Postfix to Resource Name", - "description": "", - "default": false, - "examples": [ - true - ] - } - }, - "additionalProperties": true - }, - "Storage": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/Storage", - "type": "object", - "title": "Storage", - "description": "", - "default": {}, - "examples": [ - { - "Logging": { - "Name": "logstg", - "ApplyNamePostFix": true, - "Enable": true, - "Dummy": "" - }, - "ADLS": { - "Name": "adls", - "ApplyNamePostFix": true, - "Enable": true - }, - "Blob": { - "Name": "blob", - "ApplyNamePostFix": true, - "Enable": true, - "ResourceId": "" - } - } - ], - "required": [ - "Logging", - "ADLS", - "Blob" - ], - "properties": { - "Logging": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/Storage/properties/Logging", - "type": "object", - "title": "Logging", - "description": "", - "default": {}, - "examples": [ - { - "Name": "logstg", - "ApplyNamePostFix": true, - "Enable": true, - "Dummy": "" - } - ], - "required": [ - "Name", - "ApplyNamePostFix", - "Enable", - "Dummy" - ], - "properties": { - "Name": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/Storage/properties/Logging/properties/Name", - "type": "string", - "title": "Name", - "description": "", - "default": "", - "examples": [ - "logstg" - ] - }, - "ApplyNamePostFix": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/Storage/properties/Logging/properties/ApplyNamePostFix", - "type": "boolean", - "format": "checkbox", - "title": "Apply Postfix to Resource Name", - "description": "", - "default": false, - "examples": [ - true - ] - }, - "Enable": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/Storage/properties/Logging/properties/Enable", - "type": "boolean", - "format": "checkbox", - "title": "Enable Deployment", - "description": "", - "default": false, - "examples": [ - true - ] - }, - "Dummy": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/Storage/properties/Logging/properties/Dummy", - "type": "string", - "title": "Dummy", - "description": "", - "default": "", - "examples": [ - "" - ] - } - }, - "additionalProperties": true - }, - "ADLS": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/Storage/properties/ADLS", - "type": "object", - "title": "ADLS", - "description": "", - "default": {}, - "examples": [ - { - "Name": "adls", - "ApplyNamePostFix": true, - "Enable": true - } - ], - "required": [ - "Name", - "ApplyNamePostFix", - "Enable" - ], - "properties": { - "Name": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/Storage/properties/ADLS/properties/Name", - "type": "string", - "title": "Name", - "description": "", - "default": "", - "examples": [ - "adls" - ] - }, - "ApplyNamePostFix": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/Storage/properties/ADLS/properties/ApplyNamePostFix", - "type": "boolean", - "format": "checkbox", - "title": "Apply Postfix to Resource Name", - "description": "", - "default": false, - "examples": [ - true - ] - }, - "Enable": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/Storage/properties/ADLS/properties/Enable", - "type": "boolean", - "format": "checkbox", - "title": "Enable Deployment", - "description": "", - "default": false, - "examples": [ - true - ] - } - }, - "additionalProperties": true - }, - "Blob": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/Storage/properties/Blob", - "type": "object", - "title": "Blob", - "description": "", - "default": {}, - "examples": [ - { - "Name": "blob", - "ApplyNamePostFix": true, - "Enable": true, - "ResourceId": "" - } - ], - "required": [ - "Name", - "ApplyNamePostFix", - "Enable", - "ResourceId" - ], - "properties": { - "Name": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/Storage/properties/Blob/properties/Name", - "type": "string", - "title": "Name", - "description": "", - "default": "", - "examples": [ - "blob" - ] - }, - "ApplyNamePostFix": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/Storage/properties/Blob/properties/ApplyNamePostFix", - "type": "boolean", - "format": "checkbox", - "title": "Apply Postfix to Resource Name", - "description": "", - "default": false, - "examples": [ - true - ] - }, - "Enable": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/Storage/properties/Blob/properties/Enable", - "type": "boolean", - "format": "checkbox", - "title": "Enable Deployment", - "description": "", - "default": false, - "examples": [ - true - ] - }, - "ResourceId": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/Storage/properties/Blob/properties/ResourceId", - "type": "string", - "title": "ResourceId", - "description": "", - "default": "", - "examples": [ - "" - ] - } - }, - "additionalProperties": true - }, - "ADLSTransient": { - "type": "object", - "title": "ADLS Transient", - "description": "", - "default": {}, - "required": [ - "Name", - "ApplyNamePostFix", - "Enable" - ], - "properties": { - "Name": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/Storage/properties/ADLSTransient/properties/Name", - "type": "string", - "title": "Name", - "description": "", - "default": "", - "examples": [ - "adls" - ] - }, - "ApplyNamePostFix": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/Storage/properties/ADLSTransient/properties/ApplyNamePostFix", - "type": "boolean", - "format": "checkbox", - "title": "Apply Postfix to Resource Name", - "description": "", - "default": false, - "examples": [ - true - ] - }, - "Enable": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/Storage/properties/ADLSTransient/properties/Enable", - "type": "boolean", - "format": "checkbox", - "title": "Enable Deployment", - "description": "", - "default": false, - "examples": [ - true - ] - } - }, - "additionalProperties": true - } - }, - "additionalProperties": true - }, - "Bastion": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/Bastion", - "type": "object", - "title": "Bastion Settings", - "description": "", - "default": {}, - "required": [ - "Name", - "ApplyNamePostFix", - "Enable" - ], - "properties": { - "Name": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/Bastion/properties/Name", - "type": "string", - "title": "Bastion Name", - "description": "", - "default": "", - "examples": [ - "logstg" - ] - }, - "ApplyNamePostFix": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/Bastion/properties/ApplyNamePostFix", - "type": "boolean", - "format": "checkbox", - "title": "ApplyNamePostFix", - "description": "", - "default": true, - "enum": [ - true - ] - }, - "Enable": { - "$id": "#/properties/AdsOpts/properties/CD/properties/Services/properties/Bastion/properties/Enable", - "type": "boolean", - "format": "checkbox", - "title": "Enable deployment of Bastion", - "description": "", - "default": true, - "enum": [ - true - ] - } - } - }, - "additionalProperties": true - } - }, - "additionalProperties": true - } - }, - "additionalProperties": false - } - }, - "additionalProperties": false - } -} \ No newline at end of file diff --git a/solution/Deployment/environments/test.ps1 b/solution/Deployment/environments/test.ps1 deleted file mode 100644 index 2409a0f2..00000000 --- a/solution/Deployment/environments/test.ps1 +++ /dev/null @@ -1,25 +0,0 @@ -$environment = Get-Content development.json | ConvertFrom-Json -Depth 10 - -foreach ($prop in $environment | Get-Member | Where-Object {$_.MemberType -eq 'NoteProperty'} ) -{ - #Write-Host $prop.Definition.ty - $property = $prop.Name - $value = $environment.$property - Write-Host $value.GetType() - if($value.GetType().Name -eq "String") - { - Write-Host "cat ../terraform_layer2/staging/vars/terragrunt.hcl | hclq set inputs.$property ""$value""" - } - else - { - if($value.GetType().Name -eq "Boolean") - { - Write-Host "cat ../terraform_layer2/staging/vars/terragrunt.hcl | hclq set inputs.$property" + $value.ToString().ToLower() - } - else - { - Write-Host "cat ../terraform_layer2/staging/vars/terragrunt.hcl | hclq set inputs.$property $value" - } - } - -} diff --git a/solution/Deployment/workflows/CD_0a_CreateServicePrincipals_AAD_Elevated.ps1 b/solution/Deployment/workflows/CD_0a_CreateServicePrincipals_AAD_Elevated.ps1 deleted file mode 100644 index b2a4b69c..00000000 --- a/solution/Deployment/workflows/CD_0a_CreateServicePrincipals_AAD_Elevated.ps1 +++ /dev/null @@ -1,82 +0,0 @@ -$error.clear() -#First Create the Resource Group -Invoke-Expression -Command ".\Steps\CD_DeployResourceGroup.ps1" -Import-Module .\Functions\Helpers.psm1 -######################################################################## - -### SetUp Service Principals Required.. Need to run this part with elevated privileges - -######################################################################### -if($env:AdsOpts_CD_ServicePrincipals_DeploymentSP_Enable -eq "True") -{ - Write-Debug "Creating Deployment Service Principal" - $subid = ((az account show -s $env:AdsOpts_CD_ResourceGroup_Subscription) | ConvertFrom-Json ).id - - $spcheck = az ad sp list --filter "displayname eq '$env:AdsOpts_CD_ServicePrincipals_DeploymentSP_Name'" | ConvertFrom-Json - if ($null -eq $spcheck) - { - Write-Debug "Deployment Principal does not exist so creating now." - $SP = az ad sp create-for-rbac --name $env:AdsOpts_CD_ServicePrincipals_DeploymentSP_Name --role contributor --scopes /subscriptions/$subid/resourceGroups/$env:AdsOpts_CD_ResourceGroup_Name | ConvertFrom-Json - } - else { - Write-Debug "Deployment Prinicpal Already Exists So Just Adding Contributor Role on Resource Group" - $SP = az role assignment create --assignee $spcheck[0].objectId --role "Contributor" --scope /subscriptions/$subid/resourceGroups/$env:AdsOpts_CD_ResourceGroup_Name | ConvertFrom-Json - } -} - - -$environmentfile = $env:AdsOpts_CD_FolderPaths_Environments + "/" + $env:ENVIRONMENT_NAME + ".json" -$envsettings = Get-Content $environmentfile | ConvertFrom-Json - -if($env:AdsOpts_CD_ServicePrincipals_WebAppAuthenticationSP_Enable -eq "True") -{ - Write-Debug "Creating WebAppAuthentication Service Principal" - - $roleid = [guid]::NewGuid() - $roles = '[{\"allowedMemberTypes\": [\"Application\"],\"description\": \"Administrator\",\"displayName\": \"Administrator\",\"id\": \"@Id\",\"isEnabled\": true,\"lang\": null,\"origin\": \"Users\\Groups\",\"value\": \"Administrator\"}]' - $roles = $roles.Replace("@Id",$roleid) - - $replyurls = "https://$env:AdsOpts_CD_Services_WebSite_Name.azurewebsites.net/signin-oidc" - - $subid = ((az account show -s $env:AdsOpts_CD_ResourceGroup_Subscription) | ConvertFrom-Json ).id - $appid = ((az ad app create --display-name $env:AdsOpts_CD_ServicePrincipals_WebAppAuthenticationSP_Name --homepage "api://$env:AdsOpts_CD_ServicePrincipals_WebAppAuthenticationSP_Name" --identifier-uris "api://$env:AdsOpts_CD_ServicePrincipals_WebAppAuthenticationSP_Name" --app-roles $roles --reply-urls $replyurls) | ConvertFrom-Json ).appId - $appid = ((az ad app show --id "api://$env:AdsOpts_CD_ServicePrincipals_WebAppAuthenticationSP_Name") | ConvertFrom-Json ).appId - $spid = ((az ad sp create --id $appid) | ConvertFrom-Json ).ObjectId - -} - -if($env:AdsOpts_CD_ServicePrincipals_FunctionAppAuthenticationSP_Enable -eq "True") -{ - Write-Debug "Creating FunctionAppAuthentication Service Principal" - - $roleid = [guid]::NewGuid() - $roles = '[{\"allowedMemberTypes\": [\"Application\"],\"description\": \"Used to applications to call the ADS Go Fast functions\",\"displayName\": \"FunctionAPICaller\",\"id\": \"@Id\",\"isEnabled\": true,\"lang\": null,\"origin\": \"Application\",\"value\": \"FunctionAPICaller\"}]' - $roles = $roles.Replace("@Id",$roleid) - - $subid = ((az account show -s $env:AdsOpts_CD_ResourceGroup_Subscription) | ConvertFrom-Json ).id - $appid = ((az ad app create --display-name $env:AdsOpts_CD_ServicePrincipals_FunctionAppAuthenticationSP_Name --homepage "api://$env:AdsOpts_CD_ServicePrincipals_FunctionAppAuthenticationSP_Name" --identifier-uris "api://$env:AdsOpts_CD_ServicePrincipals_FunctionAppAuthenticationSP_Name" --app-roles $roles) | ConvertFrom-Json ).appId - $appid = ((az ad app show --id "api://$env:AdsOpts_CD_ServicePrincipals_FunctionAppAuthenticationSP_Name") | ConvertFrom-Json ).appId - $spid = ((az ad sp create --id $appid) | ConvertFrom-Json ).ObjectId - #Will need to do below during service creation to add the Azure Function MSI to role - - #az role assignment create --assignee $appid --role $roleid --scope "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{providerName}/{resourceType}/{resourceSubType}/{resourceName}" - - #az rest --method patch --uri "https://graph.microsoft.com/beta/applications/" --headers '{"Content-Type":"application/json"}' --body '{"api":{"preAuthorizedApplications":[{"appId":"a37c1158-xxxxx94f2b","permissionIds":["5479xxxxx522869e718f0"]}]}}' - -} - - -#Update the Environment File -$appid = ((az ad app show --id "api://$env:AdsOpts_CD_ServicePrincipals_FunctionAppAuthenticationSP_Name") | ConvertFrom-Json ).appId -$envsettings.AdsOpts.CD.ServicePrincipals.FunctionAppAuthenticationSP.ClientId = $appid -[Environment]::SetEnvironmentVariable("AdsOpts_CD_ServicePrincipals_FunctionAppAuthenticationSP_ClientId", "$appid") -$appid = ((az ad app show --id "api://$env:AdsOpts_CD_ServicePrincipals_WebAppAuthenticationSP_Name") | ConvertFrom-Json ).appId -$envsettings.AdsOpts.CD.ServicePrincipals.WebAppAuthenticationSP.ClientId = $appid -[Environment]::SetEnvironmentVariable("AdsOpts_CD_ServicePrincipals_WebAppAuthenticationSP_ClientId", "$appid") -$envsettings | ConvertTo-Json -Depth 10 | set-content $environmentfile - - -#Check Status of Errors - -Write-Host "Script Complete Please Check below for Errors:" -Write-Host $error \ No newline at end of file diff --git a/solution/Deployment/workflows/CD_1a_DeployServices.ps1 b/solution/Deployment/workflows/CD_1a_DeployServices.ps1 deleted file mode 100644 index 7cbb5a54..00000000 --- a/solution/Deployment/workflows/CD_1a_DeployServices.ps1 +++ /dev/null @@ -1,56 +0,0 @@ -###################################################### -### Continuous Deployment #### -######################################################Write-Host ([Environment]::GetEnvironmentVariable("AdsOpts_CI_Enable")) -if (([Environment]::GetEnvironmentVariable("AdsOpts_CD_EnableDeploy")) -eq "True") -{ - $Scripts = @( - ".\Steps\CD_DeployKeyVault.ps1" - ,".\Steps\CD_DeployStorageForLogging.ps1" - ,".\Steps\CD_DeployStorageADLS.ps1" - ,".\Steps\CD_DeployStorageBlob.ps1" - ) - - Write-Debug "Starting CD.." - - $Scripts|ForEach-Object -Parallel { - Invoke-Expression -Command $_ - } - - $Scripts = @( - ,".\Steps\CD_DeployAppInsights.ps1" - ,".\Steps\CD_DeployLogAnalytics.ps1" - ,".\Steps\CD_DeployVnet.ps1" - ) - - $Scripts|ForEach-Object -Parallel { - Invoke-Expression -Command $_ - } - - $Scripts = @( - ,".\Steps\CD_DeployAppService.ps1" - ,".\Steps\CD_DeployAzureSqlServer.ps1" - ,".\Steps\CD_DeployADF.ps1" - ) - - $Scripts|ForEach-Object -Parallel { - Invoke-Expression -Command $_ - } - - $Scripts = @( - ,".\Steps\CD_DeployWebSite.ps1" - ,".\Steps\CD_DeployFunctionApp.ps1" - ) - - $Scripts|ForEach-Object -Parallel { - Invoke-Expression -Command $_ - } - - Write-Debug "Finishing CD.." -} -else -{ - - Write-Warning "CD_1a_DeployServices.ps1 skipped as flag in environment file is set to false" -} - - #,".\Cleanup_RemoveAll.ps1" \ No newline at end of file diff --git a/solution/Deployment/workflows/CD_2a_CreateMSIs_AAD_Elevated.ps1 b/solution/Deployment/workflows/CD_2a_CreateMSIs_AAD_Elevated.ps1 deleted file mode 100644 index 42ea81ce..00000000 --- a/solution/Deployment/workflows/CD_2a_CreateMSIs_AAD_Elevated.ps1 +++ /dev/null @@ -1,178 +0,0 @@ -az config set extension.use_dynamic_install=yes_without_prompt -#Create MSIs -if($env:AdsOpts_CD_Services_CoreFunctionApp_Enable -eq "True") -{ - $id = $null - $id = ((az functionapp identity show --name $env:AdsOpts_CD_Services_CoreFunctionApp_Name --resource-group $env:AdsOpts_CD_ResourceGroup_Name) | ConvertFrom-Json).principalId - if ($null -eq $id) { - Write-Host "Creating MSI for FunctionApp" - $id = ((az functionapp identity assign --resource-group $env:AdsOpts_CD_ResourceGroup_Name --name $env:AdsOpts_CD_Services_CoreFunctionApp_Name) | ConvertFrom-Json).principalId - } -} -else -{ - - Write-Host "AdsOpts_CD_Services_CoreFunctionApp skipped as flag in environment file is set to false" -ForegroundColor Yellow -} - -if($env:AdsOpts_CD_Services_WebSite_Enable -eq "True") -{ - $id = $null - $id = ((az webapp identity show --name $env:AdsOpts_CD_Services_WebSite_Name --resource-group $env:AdsOpts_CD_ResourceGroup_Name) | ConvertFrom-Json).principalId - if ($id -eq $null) { - Write-Host "Creating MSI for WebApp" - $id = ((az webapp identity assign --resource-group $env:AdsOpts_CD_ResourceGroup_Name --name $env:AdsOpts_CD_Services_WebSite_Name) | ConvertFrom-Json).principalId - } -} -else -{ - - Write-Host "AdsOpts_CD_Services_WebSite_Enable skipped as flag in environment file is set to false" -ForegroundColor Yellow -} - - -#Make sure we have the datafactory extension -az extension add --name datafactory - -#Get ADF MSI Id -$dfpid = ((az datafactory show --factory-name $env:AdsOpts_CD_Services_DataFactory_Name --resource-group $env:AdsOpts_CD_ResourceGroup_Name) | ConvertFrom-Json).identity.principalId -$dfoid = ((az ad sp show --id $dfpid) | ConvertFrom-Json).objectId -#Allow ADF to Read Key Vault -$result = az keyvault set-policy --name $env:AdsOpts_CD_Services_KeyVault_Name --certificate-permissions get list --key-permissions get list --object-id $dfoid --resource-group $env:AdsOpts_CD_ResourceGroup_Name --secret-permissions get list --storage-permissions get --subscription $env:AdsOpts_CD_ResourceGroup_Subscription - - - - - -#Give MSIs Required AD Privileges -#Assign SQL Admin -$cu = az ad signed-in-user show | ConvertFrom-Json -$result = az sql server ad-admin create --display-name $cu.DisplayName --object-id $cu.ObjectId --resource-group $env:AdsOpts_CD_ResourceGroup_Name --server $env:AdsOpts_CD_Services_AzureSQLServer_Name --subscription $env:AdsOpts_CD_ResourceGroup_Subscription - -#az login --service-principal --username $env:secrets_AZURE_CREDENTIALS_clientId --password $env:secrets_AZURE_CREDENTIALS_clientSecret --tenant $env:secrets_AZURE_CREDENTIALS_tenantId - - -$SqlInstalled = Get-InstalledModule SqlServer -if($null -eq $SqlInstalled) -{ - write-host "Installing SqlServer Module" - Install-Module -Name SqlServer -Scope CurrentUser -Force -} - -#Add Ip to SQL Firewall -write-host "Creating SQL Server Firewall Rules" -$myIp = (Invoke-WebRequest ifconfig.me/ip).Content -$result = az sql server firewall-rule create -g $env:AdsOpts_CD_ResourceGroup_Name -s $env:AdsOpts_CD_Services_AzureSQLServer_Name -n "MySetupIP" --start-ip-address $myIp --end-ip-address $myIp - - -#May Need to add a wait here to allow MSI creation to have propogated completely - -#ADS GO FAST DB -#Deployment SP -$sqlcommand = " - DROP USER IF EXISTS [$env:AdsOpts_CD_ServicePrincipals_DeploymentSP_Name] - CREATE USER [$env:AdsOpts_CD_ServicePrincipals_DeploymentSP_Name] FROM EXTERNAL PROVIDER; - ALTER ROLE db_owner ADD MEMBER [$env:AdsOpts_CD_ServicePrincipals_DeploymentSP_Name]; - GO" - -$sqlcommand = " - DROP USER IF EXISTS [$env:AdsOpts_CD_Services_CoreFunctionApp_Name] - CREATE USER [$env:AdsOpts_CD_Services_CoreFunctionApp_Name] FROM EXTERNAL PROVIDER; - ALTER ROLE db_datareader ADD MEMBER [$env:AdsOpts_CD_Services_CoreFunctionApp_Name]; - ALTER ROLE db_datawriter ADD MEMBER [$env:AdsOpts_CD_Services_CoreFunctionApp_Name]; - ALTER ROLE db_ddladmin ADD MEMBER [$env:AdsOpts_CD_Services_CoreFunctionApp_Name]; - GRANT EXECUTE ON SCHEMA::[dbo] TO [$env:AdsOpts_CD_Services_CoreFunctionApp_Name]; - GO" - -$sqlcommand = $sqlcommand + " - DROP USER IF EXISTS [$env:AdsOpts_CD_Services_WebSite_Name] - CREATE USER [$env:AdsOpts_CD_Services_WebSite_Name] FROM EXTERNAL PROVIDER; - ALTER ROLE db_datareader ADD MEMBER [$env:AdsOpts_CD_Services_WebSite_Name]; - ALTER ROLE db_datawriter ADD MEMBER [$env:AdsOpts_CD_Services_WebSite_Name]; - GRANT EXECUTE ON SCHEMA::[dbo] TO [$env:AdsOpts_CD_Services_WebSite_Name]; - GO -" - -$sqlcommand = $sqlcommand + " - DROP USER IF EXISTS [$env:AdsOpts_CD_Services_DataFactory_Name] - CREATE USER [$env:AdsOpts_CD_Services_DataFactory_Name] FROM EXTERNAL PROVIDER; - ALTER ROLE db_datareader ADD MEMBER [$env:AdsOpts_CD_Services_DataFactory_Name]; - ALTER ROLE db_datawriter ADD MEMBER [$env:AdsOpts_CD_Services_DataFactory_Name]; - GRANT EXECUTE ON SCHEMA::[dbo] TO [$env:AdsOpts_CD_Services_DataFactory_Name]; - GO -" - -write-host "Granting MSI Privileges on ADS Go Fast DB" -$token=$(az account get-access-token --resource=https://database.windows.net --query accessToken --output tsv) -Invoke-Sqlcmd -ServerInstance "$env:AdsOpts_CD_Services_AzureSQLServer_Name.database.windows.net,1433" -Database $env:AdsOpts_CD_Services_AzureSQLServer_AdsGoFastDB_Name -AccessToken $token -query $sqlcommand - -#SAMPLE DB -$sqlcommand = " - DROP USER IF EXISTS [$env:AdsOpts_CD_Services_DataFactory_Name] - CREATE USER [$env:AdsOpts_CD_Services_DataFactory_Name] FROM EXTERNAL PROVIDER; - ALTER ROLE db_datareader ADD MEMBER [$env:AdsOpts_CD_Services_DataFactory_Name]; - ALTER ROLE db_datawriter ADD MEMBER [$env:AdsOpts_CD_Services_DataFactory_Name]; - ALTER ROLE db_ddladmin ADD MEMBER [$env:AdsOpts_CD_Services_DataFactory_Name]; - GRANT EXECUTE ON SCHEMA::[dbo] TO [$env:AdsOpts_CD_Services_DataFactory_Name]; - GO -" - -write-host "Granting MSI Privileges on SAMPLE DB" -$token=$(az account get-access-token --resource=https://database.windows.net --query accessToken --output tsv) -Invoke-Sqlcmd -ServerInstance "$env:AdsOpts_CD_Services_AzureSQLServer_Name.database.windows.net,1433" -Database $env:AdsOpts_CD_Services_AzureSQLServer_SampleDB_Name -AccessToken $token -query $sqlcommand - - -#STAGING DB -$sqlcommand = " - DROP USER IF EXISTS [$env:AdsOpts_CD_Services_DataFactory_Name] - CREATE USER [$env:AdsOpts_CD_Services_DataFactory_Name] FROM EXTERNAL PROVIDER; - ALTER ROLE db_datareader ADD MEMBER [$env:AdsOpts_CD_Services_DataFactory_Name]; - ALTER ROLE db_datawriter ADD MEMBER [$env:AdsOpts_CD_Services_DataFactory_Name]; - ALTER ROLE db_ddladmin ADD MEMBER [$env:AdsOpts_CD_Services_DataFactory_Name]; - GRANT EXECUTE ON SCHEMA::[dbo] TO [$env:AdsOpts_CD_Services_DataFactory_Name]; - GO -" - -write-host "Granting MSI Privileges on STAGING DB" -$token=$(az account get-access-token --resource=https://database.windows.net --query accessToken --output tsv) -Invoke-Sqlcmd -ServerInstance "$env:AdsOpts_CD_Services_AzureSQLServer_Name.database.windows.net,1433" -Database $env:AdsOpts_CD_Services_AzureSQLServer_StagingDB_Name -AccessToken $token -query $sqlcommand - - -#Next Add MSIs Permissions -#Function App MSI Access to App Role to allow chained function calls -write-host "Granting Function App MSI Access to App Role to allow chained function calls" -$authapp = az ad app show --id "api://$env:AdsOpts_CD_ServicePrincipals_FunctionAppAuthenticationSP_Name" | ConvertFrom-Json -$callingappid = ((az functionapp identity show --name $env:AdsOpts_CD_Services_CoreFunctionApp_Name --resource-group $env:AdsOpts_CD_ResourceGroup_Name) | ConvertFrom-Json).principalId -$authappid = $authapp.appId -$permissionid = $authapp.oauth2Permissions.id - -$authappobjectid = (az ad sp show --id $authappid | ConvertFrom-Json).objectId - -$body = '{"principalId": "@principalid","resourceId":"@resourceId","appRoleId": "@appRoleId"}' | ConvertFrom-Json -$body.resourceId = $authappobjectid -$body.appRoleId = ($authapp.appRoles | Where-Object {$_.value -eq "FunctionAPICaller" }).id -$body.principalId = $callingappid -$body = ($body | ConvertTo-Json -compress | Out-String).Replace('"','\"') - -$result = az rest --method post --uri "https://graph.microsoft.com/v1.0/servicePrincipals/$authappobjectid/appRoleAssignedTo" --headers '{\"Content-Type\":\"application/json\"}' --body $body - - -#Web App -write-host "Adding Admin Role To WebApp" -$authapp = az ad app show --id "api://$env:AdsOpts_CD_ServicePrincipals_WebAppAuthenticationSP_Name" | ConvertFrom-Json -$callinguser = $cu.objectId -$authappid = $authapp.appId -$permissionid = $authapp.oauth2Permissions.id - -$authappobjectid = (az ad sp show --id $authapp.appId | ConvertFrom-Json).objectId - -$body = '{"principalId": "@principalid","resourceId":"@resourceId","appRoleId": "@appRoleId"}' | ConvertFrom-Json -$body.resourceId = $authappobjectid -$body.appRoleId = ($authapp.appRoles | Where-Object {$_.value -eq "Administrator" }).id -$body.principalId = $callinguser -$body = ($body | ConvertTo-Json -compress | Out-String).Replace('"','\"') - -$result = az rest --method post --uri "https://graph.microsoft.com/v1.0/servicePrincipals/$authappobjectid/appRoleAssignedTo" --headers '{\"Content-Type\":\"application/json\"}' --body $body - - -Invoke-Expression -Command ".\Steps\CD_GrantRBAC.ps1" diff --git a/solution/Deployment/workflows/CD_2b_ConfigureServices.ps1 b/solution/Deployment/workflows/CD_2b_ConfigureServices.ps1 deleted file mode 100644 index ea6a694a..00000000 --- a/solution/Deployment/workflows/CD_2b_ConfigureServices.ps1 +++ /dev/null @@ -1,35 +0,0 @@ - -#Invoke-Expression -Command ".\Steps\CD_SetResourceGroupHash.ps1" - -az config set extension.use_dynamic_install=yes_without_prompt - -#az login --service-principal --username $env:secrets_AZURE_CREDENTIALS_clientId --password $env:secrets_AZURE_CREDENTIALS_clientSecret --tenant $env:secrets_AZURE_CREDENTIALS_tenantId - - -###################################################### -### Continuous Deployment - Configure #### -######################################################Write-Host ([Environment]::GetEnvironmentVariable("AdsOpts_CI_Enable")) -if (([Environment]::GetEnvironmentVariable("AdsOpts_CD_EnableConfigure")) -eq "True") -{ - Write-Host "Starting CD.." - - Invoke-Expression -Command ".\Steps\CD_ConfigureKeyVault.ps1" - - Invoke-Expression -Command ".\Steps\CD_ConfigureAzureSQLServer.ps1" - - Invoke-Expression -Command ".\Steps\CD_ConfigureWebApp.ps1" - - Invoke-Expression -Command ".\Steps\CD_ConfigureFunctionApp.ps1" - - Invoke-Expression -Command ".\Steps\CD_ConfigureADF.ps1" - - Invoke-Expression -Command ".\Steps\CD_ConfigureVnet.ps1" - - Invoke-Expression -Command ".\Steps\CD_ConfigureAzureSqlServer_UpdateTaskTypeMappingJson.ps1" - - Invoke-Expression -Command ".\Steps\CD_ConfigureSampleData.ps1" - - Write-Host "Finishing CD.." -} - -#Invoke-Expression -Command ".\Cleanup_RemoveAll.ps1" \ No newline at end of file diff --git a/solution/Deployment/workflows/CI_1a_BuildCode.ps1 b/solution/Deployment/workflows/CI_1a_BuildCode.ps1 deleted file mode 100644 index 82dcd768..00000000 --- a/solution/Deployment/workflows/CI_1a_BuildCode.ps1 +++ /dev/null @@ -1,18 +0,0 @@ - -###################################################### -### Continuous Integration #### -###################################################### -if (([Environment]::GetEnvironmentVariable("AdsOpts_CI_Enable")) -eq "True") -{ - Write-Host "Starting CI.." - - Invoke-Expression -Command ".\Steps\CI_BuildFunctionApp.ps1" - - Invoke-Expression -Command ".\Steps\CI_BuildWebApp.ps1" - - Invoke-Expression -Command ".\Steps\CI_BuildAdsGoFastDatabase.ps1" - - Invoke-Expression -Command ".\Steps\CI_BuildDataFactory.ps1" - - Write-Host "Finishing CI.." -} \ No newline at end of file diff --git a/solution/Deployment/workflows/Cleanup_RemoveAll.ps1 b/solution/Deployment/workflows/Cleanup_RemoveAll.ps1 deleted file mode 100644 index aa56fd98..00000000 --- a/solution/Deployment/workflows/Cleanup_RemoveAll.ps1 +++ /dev/null @@ -1,17 +0,0 @@ -[Environment]::SetEnvironmentVariable("ENVIRONMENT_NAME", "development") -. .\Steps\PushEnvFileIntoVariables.ps1 -ParseEnvFile("$env:ENVIRONMENT_NAME") -Invoke-Expression -Command ".\Steps\CD_SetResourceGroupHash.ps1" - -az group delete --name $env:AdsOpts_CD_ResourceGroup_Name - -#Delete App and SP for Web App Auth -az ad app delete --id "api://$env:AdsOpts_CD_ServicePrincipals_WebAppAuthenticationSP_Name" - -#Delete App and SP for Function App Auth -az ad app delete --id "api://$env:AdsOpts_CD_ServicePrincipals_FunctionAppAuthenticationSP_Name" - -$resources = az resource list --resource-group $env:AdsOpts_CD_ResourceGroup_Name | ConvertFrom-Json -foreach ($resource in $resources) { - az resource delete --resource-group myResourceGroup --ids $resource.id --verbose -} \ No newline at end of file diff --git a/solution/Deployment/workflows/Functions/Helpers.psm1 b/solution/Deployment/workflows/Functions/Helpers.psm1 deleted file mode 100644 index 796f1f31..00000000 --- a/solution/Deployment/workflows/Functions/Helpers.psm1 +++ /dev/null @@ -1,32 +0,0 @@ -Function ConvertFrom-AzureCli { - [CmdletBinding()] - param ( - [Parameter(ValueFromPipeline)] [string] $line - ) - begin { - # Collect all lines in the input - $lines = @() - } - - process { - # 'process' is run once for each line in the input pipeline. - $lines += $line - } - - end { - # Azure Cli errors and warnings change output colors permanently. - # Reset the shell colors after each operation to keep consistent. - [Console]::ResetColor() - - # If the 'az' process exited with a non-zero exit code we have an error. - # The 'az' error message is already printed to console, and is not a part of the input. - if ($LASTEXITCODE) { - Write-Error "az exited with exit code $LASTEXITCODE" -ErrorAction 'Stop' - } - - $inputJson = $([string]::Join("`n", $lines)); - # We expect a Json result from az cli if we have no error. The json result CAN be $null. - $result = ConvertFrom-Json $inputJson - return $result - } - } \ No newline at end of file diff --git a/solution/Deployment/workflows/LocalDevOnly_EnvironmentSetUp.ps1 b/solution/Deployment/workflows/LocalDevOnly_EnvironmentSetUp.ps1 deleted file mode 100644 index 658ffcfb..00000000 --- a/solution/Deployment/workflows/LocalDevOnly_EnvironmentSetUp.ps1 +++ /dev/null @@ -1,16 +0,0 @@ -#az login -#az account set -s "jorampon internal consumption" -#$DebugPreference = "Continue" -#$DebugPreference = "SilentlyContinue" - -[Environment]::SetEnvironmentVariable("ENVIRONMENT_NAME", "development") -if (Test-Path -PathType Container -Path "../bin/"){$newitem = New-Item -ItemType Directory -Force -Path "../bin"} -$newitem = New-Item -Path "../bin/" -Name "GitEnv.txt" -type "file" -value "" -force -. .\Steps\PushEnvFileIntoVariables.ps1 -ParseEnvFile("$env:ENVIRONMENT_NAME") -Invoke-Expression -Command ".\Steps\CD_SetResourceGroupHash.ps1" - - -#Load Secrets into Environment Variables -ParseSecretsFile ($SecretFile) - diff --git a/solution/Deployment/workflows/LocalDevOnly_InstallOnPremSHIR.ps1 b/solution/Deployment/workflows/LocalDevOnly_InstallOnPremSHIR.ps1 deleted file mode 100644 index 592d64c0..00000000 --- a/solution/Deployment/workflows/LocalDevOnly_InstallOnPremSHIR.ps1 +++ /dev/null @@ -1,8 +0,0 @@ -if (([Environment]::GetEnvironmentVariable("AdsOpts_CD_Services_DataFactory_OnPremVnetIr_Enable")) -eq "True") -{ - Write-Host "Starting On Prem SHIR Installation.." - - Invoke-Expression -Command ".\Steps\CD_DeployADFOnPremSHIR.ps1" - - Write-Host "Completed On Prem SHIR Installation." -} diff --git a/solution/Deployment/workflows/Steps/CD_ConfigureADF.ps1 b/solution/Deployment/workflows/Steps/CD_ConfigureADF.ps1 deleted file mode 100644 index 6838fded..00000000 --- a/solution/Deployment/workflows/Steps/CD_ConfigureADF.ps1 +++ /dev/null @@ -1,277 +0,0 @@ -# Create ADF Diagnostic Settings -$logsSetting = "[{'category':'ActivityRuns','enabled':true,'retentionPolicy':{'days': 30,'enabled': true}},{'category':'PipelineRuns','enabled':true,'retentionPolicy':{'days': 30,'enabled': true}},{'category':'TriggerRuns','enabled':true,'retentionPolicy':{'days': 30,'enabled': true}}]".Replace("'",'\"') -$metricsSetting = "[{'category':'AllMetrics','enabled':true,'retentionPolicy':{'days': 30,'enabled': true}}]".Replace("'",'\"') - -$result = az monitor diagnostic-settings create --name ADF-Diagnostics --export-to-resource-specific true --resource "$env:AdsOpts_CD_ResourceGroup_Id/providers/Microsoft.DataFactory/factories/$env:AdsOpts_CD_Services_DataFactory_Name" --logs $logsSetting --metrics $metricsSetting --storage-account "$env:AdsOpts_CD_ResourceGroup_Id/providers/Microsoft.Storage/storageAccounts/$env:AdsOpts_CD_Services_Storage_Logging_Name" --workspace "$env:AdsOpts_CD_ResourceGroup_Id/providers/microsoft.operationalinsights/workspaces/$env:AdsOpts_CD_Services_LogAnalytics_Name" - -#Create IRs - -#Create Managed Network -$subid = (az account show -s $env:AdsOpts_CD_ResourceGroup_Subscription | ConvertFrom-Json).id -$uri = "https://management.azure.com/subscriptions/$subid/resourceGroups/$env:AdsOpts_CD_ResourceGroup_Name/providers/Microsoft.DataFactory/factories/$env:AdsOpts_CD_Services_DataFactory_Name/managedVirtualNetworks/default" + '?api-version=2018-06-01' - -$rest = az rest --method put --uri $uri --headers '{\"Content-Type\":\"application/json\"}' --body '{\"properties\": {}}' - -#VNET -$body = ' -{ - "properties": { - "type": "Managed", - "typeProperties": { - "computeProperties": { - "location": "AutoResolve", - "dataFlowProperties": { - "computeType": "General", - "coreCount": 8, - "timeToLive": 10, - "cleanup": true - } - } - }, - "managedVirtualNetwork": { - "type": "ManagedVirtualNetworkReference", - "referenceName": "default" - } - } -}' | ConvertFrom-Json -$body = ($body | ConvertTo-Json -compress -Depth 10 | Out-String).Replace('"','\"') -$uri = "https://management.azure.com/$env:AdsOpts_CD_ResourceGroup_Id/providers/Microsoft.DataFactory/factories/$env:AdsOpts_CD_Services_DataFactory_Name/integrationRuntimes/$env:AdsOpts_CD_Services_DataFactory_AzVnetIr_Name" + '?&api-version=2018-06-01' -Write-Host "Creating IR: $env:AdsOpts_CD_Services_DataFactory_AzVnetIr_Name" -$rest = az rest --method put --uri $uri --headers '{\"Content-Type\":\"application/json\"}' --body $body - -#On Prem - Note we are using a managed VNET IR to mimic on prem -# $body = ' -# { -# "properties": { -# "type": "Managed", -# "typeProperties": { -# "computeProperties": { -# "location": "AutoResolve", -# "dataFlowProperties": { -# "computeType": "General", -# "coreCount": 8, -# "timeToLive": 10, -# "cleanup": true -# } -# } -# }, -# "managedVirtualNetwork": { -# "type": "ManagedVirtualNetworkReference", -# "referenceName": "default" -# } -# } -# }' | ConvertFrom-Json - -if (([Environment]::GetEnvironmentVariable("AdsOpts_CD_Services_DataFactory_OnPremVnetIr_Enable")) -eq "True") -{ - $body = ' - { - "properties": { - "type": "SelfHosted" - } - }' | ConvertFrom-Json - - $body = ($body | ConvertTo-Json -compress -Depth 10 | Out-String).Replace('"','\"') - $uri = "https://management.azure.com/$env:AdsOpts_CD_ResourceGroup_Id/providers/Microsoft.DataFactory/factories/$env:AdsOpts_CD_Services_DataFactory_Name/integrationRuntimes/$env:AdsOpts_CD_Services_DataFactory_OnPremVnetIr_Name" + '?&api-version=2018-06-01' - Write-Host "Creating IR: $env:AdsOpts_CD_Services_DataFactory_OnPremVnetIr_Name" - $rest = az rest --method put --uri $uri --headers '{\"Content-Type\":\"application/json\"}' --body $body -} - -$IRA_PostFix = "_" + $env:AdsOpts_CD_Services_DataFactory_AzVnetIr_Name -$IRB_PostFix = "_" + $env:AdsOpts_CD_Services_DataFactory_OnPremVnetIr_Name - -$dfbase = "$env:AdsOpts_CD_FolderPaths_PublishUnZip/datafactory" - -#Data Factory - LinkedServices -Get-ChildItem "$dfbase/linkedService" -Filter *.json | -Foreach-Object { - $lsName = $_.BaseName - $fileName = $_.FullName - $jsonobject = $_ | Get-Content | ConvertFrom-Json - - #Swap out Key Vault Url for Function App Linked Service - if($lsName -eq "AdsGoFastKeyVault") - { - $jsonobject.properties.typeProperties.baseUrl = "https://$env:AdsOpts_CD_Services_KeyVault_Name.vault.azure.net/" - } - - #Swap out Function App Url - if($lsName -eq "AzureFunctionAdsGoFastDataLakeAccelFunApp") - { - $jsonobject.properties.typeProperties.functionAppUrl = "https://$env:AdsOpts_CD_Services_CoreFunctionApp_Name.azurewebsites.net" - } - - #ParseOut the Name Attribute - $name = $jsonobject.name - - #Persist File Back - $jsonobject | ConvertTo-Json -Depth 100 | set-content $_ - - #Make a copy of the file for upload - Copy-Item -Path $fileName -Destination "FileForUpload.json" - - write-host ("LinkedService:" + $lsName) -ForegroundColor Yellow -BackgroundColor DarkGreen - #Set-AzDataFactoryV2LinkedService -DataFactoryName $env:AdsOpts_CD_Services_DataFactory_Name -ResourceGroupName $env:AdsOpts_CD_ResourceGroup_Name -Name $lsName -DefinitionFile $fileName -force - $body = ($jsonobject | ConvertTo-Json -compress -Depth 10 | Out-String).Replace('"','\"') - $uri = "https://management.azure.com/$env:AdsOpts_CD_ResourceGroup_Id/providers/Microsoft.DataFactory/factories/$env:AdsOpts_CD_Services_DataFactory_Name/linkedservices/$name" - write-host $uri - $rest = az rest --method put --uri $uri --headers '{\"Content-Type\":\"application/json\"}' --body "@FileForUpload.json" --uri-parameters 'api-version=2018-06-01' - - -} - -#Data Factory - Dataset -Get-ChildItem "$dfbase/dataset" -Filter *.json | -Foreach-Object { - $lsName = $_.BaseName - $fileName = $_.FullName - - #ParseOut the Name Attribute - $jsonobject = $_ | Get-Content | ConvertFrom-Json - $name = $jsonobject.name - - #Persist File Back - $jsonobject | ConvertTo-Json -Depth 100 | set-content $_ - - #Make a copy of the file for upload - Copy-Item -Path $fileName -Destination "FileForUpload.json" - - write-host ("Dataset: " + $fileName) -ForegroundColor Yellow -BackgroundColor DarkGreen - #Set-AzDataFactoryV2Dataset -DataFactoryName $env:AdsOpts_CD_Services_DataFactory_Name -ResourceGroupName $env:AdsOpts_CD_ResourceGroup_Name -Name $lsName -DefinitionFile $fileName -Force - $body = ($jsonobject | ConvertTo-Json -compress -Depth 10 | Out-String).Replace('"','\"') - $uri = "https://management.azure.com/$env:AdsOpts_CD_ResourceGroup_Id/providers/Microsoft.DataFactory/factories/$env:AdsOpts_CD_Services_DataFactory_Name/datasets/$name" - $rest = az rest --method put --uri $uri --headers '{\"Content-Type\":\"application/json\"}' --body "@FileForUpload.json" --uri-parameters 'api-version=2018-06-01' - - -} - - -#Move to pipelines directory -$CurrentPath = (Get-Location).Path -Set-Location "..\bin\publish\unzipped\datafactory\pipeline" - - -#Data Factory - Pipelines -Write-Host "Starting Pipelines" -Write-Host "Uploading Level 0 Dependencies" -Get-ChildItem "./" -Recurse -Include "AZ_Function*.json", "AZ_SQL_Watermark_IR*.json", "SH_SQL_Watermark_IR*.json" | -Foreach-Object { - $lsName = $_.BaseName - $fileName = $_.FullName - - #ParseOut the Name Attribute - $jsonobject = $_ | Get-Content | ConvertFrom-Json - $name = $jsonobject.name - - #Persist File Back - $jsonobject | ConvertTo-Json -Depth 100 | set-content $_ - - #Make a copy of the file for upload - Copy-Item -Path $fileName -Destination "FileForUpload.json" - - - write-host $fileName -ForegroundColor Yellow -BackgroundColor DarkGreen - #Set-AzDataFactoryV2Pipeline -DataFactoryName $env:AdsOpts_CD_Services_DataFactory_Name -ResourceGroupName $env:AdsOpts_CD_ResourceGroup_Name -Name $lsName -DefinitionFile $fileName -force - #$body = ($jsonobject | ConvertTo-Json -compress -Depth 100 | Out-String).Replace('"','\"') - $uri = "https://management.azure.com/$env:AdsOpts_CD_ResourceGroup_Id/providers/Microsoft.DataFactory/factories/$env:AdsOpts_CD_Services_DataFactory_Name/pipelines/$name" - $rest = az rest --method put --uri $uri --headers '{\"Content-Type\":\"application/json\"}' --body "@FileForUpload.json" --uri-parameters 'api-version=2018-06-01' - -} - -Write-Host "Uploading Level 1 Dependencies" -Get-ChildItem "./" -Recurse -Include "AZ_SQL_Full_Load_IR*.json", "SH_SQL_Full_Load_IR*.json" | -Foreach-Object { - $lsName = $_.BaseName - $fileName = $_.FullName - - #ParseOut the Name Attribute - $jsonobject = $_ | Get-Content | ConvertFrom-Json - $name = $jsonobject.name - - #Persist File Back - $jsonobject | ConvertTo-Json -Depth 100 | set-content $_ - - #Make a copy of the file for upload - Copy-Item -Path $fileName -Destination "FileForUpload.json" - - write-host $fileName -ForegroundColor Yellow -BackgroundColor DarkGreen - #Set-AzDataFactoryV2Pipeline -DataFactoryName $env:AdsOpts_CD_Services_DataFactory_Name -ResourceGroupName $env:AdsOpts_CD_ResourceGroup_Name -Name $lsName -DefinitionFile $fileName -Force - $body = ($jsonobject | ConvertTo-Json -compress -Depth 100 | Out-String).Replace('"','\"') - $uri = "https://management.azure.com/$env:AdsOpts_CD_ResourceGroup_Id/providers/Microsoft.DataFactory/factories/$env:AdsOpts_CD_Services_DataFactory_Name/pipelines/$name" - $rest = az rest --method put --uri $uri --headers '{\"Content-Type\":\"application/json\"}' --body "@FileForUpload.json" --uri-parameters 'api-version=2018-06-01' - -} -Write-Host "Uploading Level 3 Dependencies - Chunks" -Get-ChildItem "./" -Filter *chunk*.json | -Foreach-Object { - $lsName = $_.BaseName - $fileName = $_.FullName - - #ParseOut the Name Attribute - $jsonobject = $_ | Get-Content | ConvertFrom-Json - $name = $jsonobject.name - - #Persist File Back - $jsonobject | ConvertTo-Json -Depth 100 | set-content $_ - - #Make a copy of the file for upload - Copy-Item -Path $fileName -Destination "FileForUpload.json" - - write-host $fileName -ForegroundColor Yellow -BackgroundColor DarkGreen - #Set-AzDataFactoryV2Pipeline -DataFactoryName $env:AdsOpts_CD_Services_DataFactory_Name -ResourceGroupName $env:AdsOpts_CD_ResourceGroup_Name -Name $lsName -DefinitionFile $fileName -Force - $body = ($jsonobject | ConvertTo-Json -compress -Depth 100 | Out-String).Replace('"','\"') - $uri = "https://management.azure.com/$env:AdsOpts_CD_ResourceGroup_Id/providers/Microsoft.DataFactory/factories/$env:AdsOpts_CD_Services_DataFactory_Name/pipelines/$name" - $rest = az rest --method put --uri $uri --headers '{\"Content-Type\":\"application/json\"}' --body "@FileForUpload.json" --uri-parameters 'api-version=2018-06-01' - -} - -Write-Host "Uploading Level 4 Dependencies" -Get-ChildItem "./" -Exclude "FileForUpload.json", "Master*.json","AZ_Function_Generic.json", "OnP_SQL_Watermark_IR*.json", "AZ_SQL_Watermark_IR*.json", "*Chunk*.json", "AZ_SQL_Full_Load_IR*.json", "SH_SQL_Full_Load_IR*.json", "OnP_SQL_Full_Load_IR*.json", "SH_SQL_Watermark_IR*.json" | -Foreach-Object { - $lsName = $_.BaseName - $fileName = $_.FullName - - #ParseOut the Name Attribute - $jsonobject = $_ | Get-Content | ConvertFrom-Json - $name = $jsonobject.name - - #Persist File Back - $jsonobject | ConvertTo-Json -Depth 100 | set-content $_ - - #Make a copy of the file for upload - Copy-Item -Path $fileName -Destination "FileForUpload.json" - - write-host $fileName -ForegroundColor Yellow -BackgroundColor DarkGreen - #Set-AzDataFactoryV2Pipeline -DataFactoryName $env:AdsOpts_CD_Services_DataFactory_Name -ResourceGroupName $env:AdsOpts_CD_ResourceGroup_Name -Name $lsName -DefinitionFile $fileName -Force - $body = ($jsonobject | ConvertTo-Json -compress -Depth 100 | Out-String).Replace('"','\"') - $uri = "https://management.azure.com/$env:AdsOpts_CD_ResourceGroup_Id/providers/Microsoft.DataFactory/factories/$env:AdsOpts_CD_Services_DataFactory_Name/pipelines/$name" - $rest = az rest --method put --uri $uri --headers '{\"Content-Type\":\"application/json\"}' --body "@FileForUpload.json" --uri-parameters 'api-version=2018-06-01' -} - -Write-Host "Processing Master" -Get-ChildItem "./" -Filter Master*.json | -Foreach-Object { - - $lsName = $_.BaseName - $fileName = $_.FullName - - #ParseOut the Name Attribute - $jsonobject = $_ | Get-Content | ConvertFrom-Json - $name = $jsonobject.name - - #Make a copy of the file for upload - Copy-Item -Path $fileName -Destination "FileForUpload.json" - - write-host $fileName -ForegroundColor Yellow -BackgroundColor DarkGreen - #Set-AzDataFactoryV2Pipeline -DataFactoryName $env:AdsOpts_CD_Services_DataFactory_Name -ResourceGroupName $env:AdsOpts_CD_ResourceGroup_Name -Name $lsName -DefinitionFile $fileName -Force - $body = ($jsonobject | ConvertTo-Json -compress -Depth 100 | Out-String).Replace('"','\"') - $uri = "https://management.azure.com/$env:AdsOpts_CD_ResourceGroup_Id/providers/Microsoft.DataFactory/factories/$env:AdsOpts_CD_Services_DataFactory_Name/pipelines/$name" - Write-Host "Uploading Master" - $rest = az rest --method put --uri $uri --headers '{\"Content-Type\":\"application/json\"}' --body "@FileForUpload.json" --uri-parameters 'api-version=2018-06-01' - -} - -Remove-Item -Path "FileForUpload.json" -ErrorAction SilentlyContinue - -#Change Back to Workflows dir -Set-Location $CurrentPath \ No newline at end of file diff --git a/solution/Deployment/workflows/Steps/CD_ConfigureAzureSQLServer.ps1 b/solution/Deployment/workflows/Steps/CD_ConfigureAzureSQLServer.ps1 deleted file mode 100644 index c6fc2556..00000000 --- a/solution/Deployment/workflows/Steps/CD_ConfigureAzureSQLServer.ps1 +++ /dev/null @@ -1,192 +0,0 @@ -#Function for password generator -$symbols = '!@#$%^&*'.ToCharArray() -$characterList = 'a'..'z' + 'A'..'Z' + '0'..'9' + $symbols -function GeneratePassword { - param( - [ValidateRange(12, 256)] - [int] - $length = 14 - ) - - do { - $password = -join (0..$length | % { $characterList | Get-Random }) - [int]$hasLowerChar = $password -cmatch '[a-z]' - [int]$hasUpperChar = $password -cmatch '[A-Z]' - [int]$hasDigit = $password -match '[0-9]' - [int]$hasSymbol = $password.IndexOfAny($symbols) -ne -1 - - } - until (($hasLowerChar + $hasUpperChar + $hasDigit + $hasSymbol) -ge 3) - - $password | ConvertTo-SecureString -AsPlainText -} - - -Write-Debug "Configuring Azure SQL Server" - -#Install Sql Server Module -if (Get-Module -ListAvailable -Name SqlServer) { - Write-Host "SqlServer Module exists" -} -else { - Write-Host "Module does not exist.. installing.." - Install-Module -Name SqlServer -Force -} - -#Get Access Token for SQL --Note that the deployment principal or user running locally will need rights on the database -$token=$(az account get-access-token --resource=https://database.windows.net/ --query accessToken --output tsv) - - -$targetserver = $env:AdsOpts_CD_Services_AzureSQLServer_Name + ".database.windows.net" -if($env:AdsOpts_CD_Services_AzureSQLServer_AdsGoFastDB_Enable -eq "True") -{ - - #Add Ip to SQL Firewall - $myIp = (Invoke-WebRequest ifconfig.me/ip).Content - $result = az sql server firewall-rule create -g $env:AdsOpts_CD_ResourceGroup_Name -s $env:AdsOpts_CD_Services_AzureSQLServer_Name -n "MySetupIP" --start-ip-address $myIp --end-ip-address $myIp - #Allow Azure services and resources to access this server - $result = az sql server firewall-rule create -g $env:AdsOpts_CD_ResourceGroup_Name -s $env:AdsOpts_CD_Services_AzureSQLServer_Name -n "Azure" --start-ip-address 0.0.0.0 --end-ip-address 0.0.0.0 - - $CurrentPath = (Get-Location).Path - Set-Location "..\bin\publish\unzipped\database\" - - #Create User for DBUp - $temppassword = GeneratePassword - $sql = " - DROP USER IF EXISTS DbUpUser - CREATE USER DbUpUser WITH PASSWORD=N'$temppassword', DEFAULT_SCHEMA=[dbo] - EXEC sp_addrolemember 'db_owner', 'DbUpUser' - GO " - - Invoke-Sqlcmd -ServerInstance "$targetserver,1433" -Database $env:AdsOpts_CD_Services_AzureSQLServer_AdsGoFastDB_Name -AccessToken "$token" -Query $sql - - #.\AdsGoFastDbUp.exe -c "Server=$targetserver; Database=$env:AdsOpts_CD_Services_AzureSQLServer_AdsGoFastDB_Name; user id=$env:AdsOpts_CD_Services_AzureSQLServer_AdminUser; password=$env:AdsOpts_CD_Services_AzureSQLServer_AdminPassword" -v True - dotnet AdsGoFastDbUp.dll -c "Server=$targetserver; Database=$env:AdsOpts_CD_Services_AzureSQLServer_AdsGoFastDB_Name; user id=DbUpUser; password='$temppassword'" -v True - Set-Location $CurrentPath - - - #Environment Specific Updates - $subid = (az account show -s $env:AdsOpts_CD_ResourceGroup_Subscription | ConvertFrom-Json).id - $LogAnalyticsId = az monitor log-analytics workspace show --resource-group $env:AdsOpts_CD_ResourceGroup_Name --workspace-name $env:AdsOpts_CD_Services_LogAnalytics_Name --query customerId --out tsv - - if($env:AdsOpts_CD_Services_AzureSQLServer_AdsGoFastDB_UpdateDataFactory -eq "True") - { - $sql = - "Update [dbo].[DataFactory] - Set [Name] = '$env:AdsOpts_CD_Services_DataFactory_Name', - ResourceGroup = '$env:AdsOpts_CD_ResourceGroup_Name', - SubscriptionUid = '$subid', - DefaultKeyVaultURL = 'https://$env:AdsOpts_CD_Services_KeyVault_Name.vault.azure.net/', - LogAnalyticsWorkspaceId = '$LogAnalyticsId' - where id = 1" - - Write-Debug "Updating DataFactory in ADS Go Fast DB Config - DataFactory" - Invoke-Sqlcmd -ServerInstance "$targetserver,1433" -Database $env:AdsOpts_CD_Services_AzureSQLServer_AdsGoFastDB_Name -AccessToken "$token" -Query $sql - } - if($env:AdsOpts_CD_Services_AzureSQLServer_AdsGoFastDB_UpdateSourceAndTargetSystems -eq "True") - { - $sql = - " - Update - [dbo].[SourceAndTargetSystems] - Set - SystemServer = '$env:AdsOpts_CD_Services_AzureSQLServer_Name.database.windows.net', - SystemKeyVaultBaseUrl = 'https://$env:AdsOpts_CD_Services_KeyVault_Name.vault.azure.net/', - SystemJSON = '{ ""Database"" : ""$env:AdsOpts_CD_Services_AzureSQLServer_SampleDB_Name"" }' - Where - SystemId = '1' - GO - - Update - [dbo].[SourceAndTargetSystems] - Set - SystemServer = '$env:AdsOpts_CD_Services_AzureSQLServer_Name.database.windows.net', - SystemKeyVaultBaseUrl = 'https://$env:AdsOpts_CD_Services_KeyVault_Name.vault.azure.net/', - SystemJSON = '{ ""Database"" : ""$env:AdsOpts_CD_Services_AzureSQLServer_StagingDB_Name"" }' - Where - SystemId = '2' - GO - - Update - [dbo].[SourceAndTargetSystems] - Set - SystemServer = '$env:AdsOpts_CD_Services_AzureSQLServer_Name.database.windows.net', - SystemKeyVaultBaseUrl = 'https://$env:AdsOpts_CD_Services_KeyVault_Name.vault.azure.net/', - SystemJSON = '{ ""Database"" : ""$env:AdsOpts_CD_Services_AzureSQLServer_AdsGoFastDB_Name"" }' - Where - SystemId = '11' - GO - " - - Write-Debug "Updating DataFactory in ADS Go Fast DB Config - SourceAndTargetSystems - Azure SQL Servers" - Invoke-Sqlcmd -ServerInstance "$targetserver,1433" -Database $env:AdsOpts_CD_Services_AzureSQLServer_AdsGoFastDB_Name -AccessToken "$token" -Query $sql - - $sql = - " - Update - [dbo].[SourceAndTargetSystems] - Set - SystemServer = 'https://$env:AdsOpts_CD_Services_Storage_Blob_Name.blob.core.windows.net', - SystemKeyVaultBaseUrl = 'https://$env:AdsOpts_CD_Services_KeyVault_Name.vault.azure.net/', - SystemJSON = '{ ""Container"" : ""datalakeraw"" }' - Where - SystemId = '3' - GO - - Update - [dbo].[SourceAndTargetSystems] - Set - SystemServer = 'https://$env:AdsOpts_CD_Services_Storage_Blob_Name.blob.core.windows.net', - SystemKeyVaultBaseUrl = 'https://$env:AdsOpts_CD_Services_KeyVault_Name.vault.azure.net/', - SystemJSON = '{ ""Container"" : ""datalakelanding"" }' - Where - SystemId = '7' - GO - - Update - [dbo].[SourceAndTargetSystems] - Set - SystemServer = 'https://$env:AdsOpts_CD_Services_Storage_Blob_Name.blob.core.windows.net', - SystemKeyVaultBaseUrl = 'https://$env:AdsOpts_CD_Services_KeyVault_Name.vault.azure.net/', - SystemJSON = '{ ""Container"" : ""transientin"" }' - Where - SystemId = '9' - GO - - Update - [dbo].[SourceAndTargetSystems] - Set - SystemServer = 'https://$env:AdsOpts_CD_Services_Storage_ADLS_Name.dfs.core.windows.net', - SystemKeyVaultBaseUrl = 'https://$env:AdsOpts_CD_Services_KeyVault_Name.vault.azure.net/', - SystemJSON = '{ ""Container"" : ""datalakeraw"" }' - Where - SystemId = '4' - GO - - Update - [dbo].[SourceAndTargetSystems] - Set - SystemServer = 'https://$env:AdsOpts_CD_Services_Storage_ADLS_Name.dfs.core.windows.net', - SystemKeyVaultBaseUrl = 'https://$env:AdsOpts_CD_Services_KeyVault_Name.vault.azure.net/', - SystemJSON = '{ ""Container"" : ""datalakelanding"" }' - Where - SystemId = '8' - GO - " - - Write-Debug "Updating DataFactory in ADS Go Fast DB Config - SourceAndTargetSystems - Storage Accounts" - Invoke-Sqlcmd -ServerInstance "$targetserver,1433" -Database $env:AdsOpts_CD_Services_AzureSQLServer_AdsGoFastDB_Name -AccessToken "$token" -Query $sql - - } - - #sqlpackage.exe /a:Publish /sf:'./../bin/publish/unzipped/database/AdsGoFastBuild.dacpac' /tsn:$targetserver /tdn:$env:AdsOpts_CD_Services_AzureSQLServer_AdsGoFastDB_Name /tu:$env:AdsOpts_CD_Services_AzureSQLServer_AdminUser /tp:$env:AdsOpts_CD_Services_AzureSQLServer_AdminPassword - - - #Database - Post Script Deployment - #Invoke-Sqlcmd -ServerInstance "$targetserver,1433" -Database $env:AdsOpts_CD_Services_AzureSQLServer_AdsGoFastDB_Name -Username $env:AdsOpts_CD_Services_AzureSQLServer_AdminUser -Password $env:AdsOpts_CD_Services_AzureSQLServer_AdminPassword -InputFile "./../../Database/AdsGoFastDatabase" - -} -else -{ - Write-Warning "Skipped Configuring Azure SQL Server" -} \ No newline at end of file diff --git a/solution/Deployment/workflows/Steps/CD_ConfigureAzureSqlServer_UpdateTaskTypeMappingJson.ps1 b/solution/Deployment/workflows/Steps/CD_ConfigureAzureSqlServer_UpdateTaskTypeMappingJson.ps1 deleted file mode 100644 index e10e0636..00000000 --- a/solution/Deployment/workflows/Steps/CD_ConfigureAzureSqlServer_UpdateTaskTypeMappingJson.ps1 +++ /dev/null @@ -1,31 +0,0 @@ - -#Get Access Token for SQL --Note that the deployment principal or user running locally will need rights on the database -$token=$(az account get-access-token --resource=https://database.windows.net/ --query accessToken --output tsv) -$targetserver = $env:AdsOpts_CD_Services_AzureSQLServer_Name + ".database.windows.net" - -$jsonbase = "./../../TaskTypeJson/" -Get-ChildItem "$jsonbase" -Filter *.json | -Foreach-Object { - $lsName = $_.BaseName - $fileName = $_.FullName - $jsonobject = ($_ | Get-Content).Replace("'", "''") - $Name = $_.BaseName - $sql = "Update TaskTypeMapping - Set TaskMasterJsonSchema = new.TaskMasterJsonSchema - from TaskTypeMapping ttm - inner join - ( - Select MappingName = N'$Name' , TaskMasterJsonSchema = N'$jsonobject' - ) new on ttm.MappingName = new.MappingName" - Invoke-Sqlcmd -ServerInstance "$targetserver,1433" -Database $env:AdsOpts_CD_Services_AzureSQLServer_AdsGoFastDB_Name -AccessToken "$token" -Query $sql -} - - -#Loop through all Existing Task Master JSON Entries in DB -#$tm = Invoke-Sqlcmd -ServerInstance "$targetserver,1433" -Database $env:AdsOpts_CD_Services_AzureSQLServer_AdsGoFastDB_Name -AccessToken "$token" -Query "Select * from dbo.TaskMaster" - -#$ttm = Invoke-Sqlcmd -ServerInstance "$targetserver,1433" -Database $env:AdsOpts_CD_Services_AzureSQLServer_AdsGoFastDB_Name -AccessToken "$token" -Query "Select * from dbo.TaskTypeMapping" - - - - diff --git a/solution/Deployment/workflows/Steps/CD_ConfigureFunctionApp.ps1 b/solution/Deployment/workflows/Steps/CD_ConfigureFunctionApp.ps1 deleted file mode 100644 index 2c02648b..00000000 --- a/solution/Deployment/workflows/Steps/CD_ConfigureFunctionApp.ps1 +++ /dev/null @@ -1,50 +0,0 @@ -Write-Debug "Configuring Function App" - -$SourceFile = $env:AdsOpts_CD_FolderPaths_PublishZip + "/functionapp/Publish.zip" -if($env:AdsOpts_CD_Services_CoreFunctionApp_Enable -eq "True") -{ - - #Update App Settings - $appsettingsfile = $env:AdsOpts_CD_FolderPaths_PublishUnZip + "/functionapp/appsettings.json" - $appSettings = Get-Content $appsettingsfile | ConvertFrom-Json - $appSettings.ApplicationOptions.UseMSI = $true - $appSettings.ApplicationOptions.ServiceConnections.AdsGoFastTaskMetaDataDatabaseServer = "$env:AdsOpts_CD_Services_AzureSQLServer_Name.database.windows.net" - $appSettings.ApplicationOptions.ServiceConnections.AdsGoFastTaskMetaDataDatabaseName = $env:AdsOpts_CD_Services_AzureSQLServer_AdsGoFastDB_Name - $appSettings.ApplicationOptions.ServiceConnections.CoreFunctionsURL = "https://$env:AdsOpts_CD_Services_CoreFunctionApp_Name.azurewebsites.net" - - #Get App Insights WorkspaceID - $AppInsightsWPId = (az monitor app-insights component show --app $env:AdsOpts_CD_Services_AppInsights_Name -g $env:AdsOpts_CD_ResourceGroup_Name | ConvertFrom-Json).appId - $appSettings.ApplicationOptions.ServiceConnections.AppInsightsWorkspaceId = $AppInsightsWPId - - $appSettings.AzureAdAzureServicesViaAppReg.Domain=$env:AdsOpts_CD_ResourceGroup_Domain - $appSettings.AzureAdAzureServicesViaAppReg.TenantId=$env:AdsOpts_CD_ResourceGroup_TenantId - - #Client Secret is null as this is only used to validate the claims & to authenticate get a scope specific token based on MSI - $appSettings.AzureAdAzureServicesViaAppReg.Audience = "api://$env:AdsOpts_CD_ServicePrincipals_FunctionAppAuthenticationSP_Name" - $appSettings.AzureAdAzureServicesViaAppReg.ClientSecret = $null - $appSettings.AzureAdAzureServicesViaAppReg.ClientId=$env:AdsOpts_CD_ServicePrincipals_FunctionAppAuthenticationSP_ClientId - - #Setting to null as we are using MSI - $appSettings.AzureAdAzureServicesDirect.ClientSecret = $null - $appSettings.AzureAdAzureServicesDirect.ClientId=$null - - $appSettings | ConvertTo-Json -Depth 10 | set-content $appsettingsfile - - #Repack CoreFunctionApp - $CurrentPath = Get-Location - Set-Location "./../bin/publish" - $Path = (Get-Location).Path + "/zipped/functionapp" - New-Item -ItemType Directory -Force -Path $Path - $Path = $Path + "/Publish.zip" - Compress-Archive -Path '.\unzipped\functionapp\*' -DestinationPath $Path -force - #Move back to workflows - Set-Location $CurrentPath - - # Deploy CoreFunctionApp App - $result = az functionapp deployment source config-zip --resource-group $env:AdsOpts_CD_ResourceGroup_Name --name $env:AdsOpts_CD_Services_CoreFunctionApp_Name --src $SourceFile - -} -else -{ - Write-Warning "Skipped Configuring Function App" -} diff --git a/solution/Deployment/workflows/Steps/CD_ConfigureKeyVault.ps1 b/solution/Deployment/workflows/Steps/CD_ConfigureKeyVault.ps1 deleted file mode 100644 index 9bcbe3ec..00000000 --- a/solution/Deployment/workflows/Steps/CD_ConfigureKeyVault.ps1 +++ /dev/null @@ -1,28 +0,0 @@ -Write-Debug " Configuring Key Vault" - -if($env:AdsOpts_CD_Services_KeyVault_Enable -eq "True") -{ - #Try to set user object id based on an interactive login - $AADUserId = (az ad signed-in-user show | ConvertFrom-Json).objectId - if ([string]::IsNullOrEmpty($AADUserId)) - { - #If Interactive user failed then use SP method. - $AADUserId = (az ad sp show --id (az account show | ConvertFrom-Json).user.name | ConvertFrom-Json).objectId - } - - - $functionkey = (az functionapp keys list -g $env:AdsOpts_CD_ResourceGroup_Name -n $env:AdsOpts_CD_Services_CoreFunctionApp_Name | ConvertFrom-Json).functionKeys.default - Write-Debug " Enabling Access to KeyVault and Adding Secrets" - #Set KeyVault Policy to allow logged in user to add key - $result = az keyvault set-policy --name $env:AdsOpts_CD_Services_KeyVault_Name --certificate-permissions backup create delete deleteissuers get getissuers import list listissuers managecontacts manageissuers purge recover restore setissuers update --key-permissions backup create decrypt delete encrypt get import list purge recover restore sign unwrapKey update verify wrapKey --object-id $AADUserId --resource-group $env:AdsOpts_CD_ResourceGroup_Name --secret-permissions backup delete get list purge recover restore set --storage-permissions backup delete deletesas get getsas list listsas purge recover regeneratekey restore set setsas update --subscription $env:AdsOpts_CD_ResourceGroup_Subscription - #Set KeyVault Policy to allow MSI for ADF to Retrieve Key Vault Key - #az keyvault set-policy --name $env:AdsOpts_CD_Services_KeyVault_Name --certificate-permissions backup create delete deleteissuers get getissuers import list listissuers managecontacts manageissuers purge recover restore setissuers update --key-permissions backup create decrypt delete encrypt get import list purge recover restore sign unwrapKey update verify wrapKey --object-id $AADUserId --resource-group $env:AdsOpts_CD_ResourceGroup_Name --secret-permissions backup delete get list purge recover restore set --storage-permissions backup delete deletesas get getsas list listsas purge recover regeneratekey restore set setsas update --subscription $env:AdsOpts_CD_ResourceGroup_Subscription - - #Save Function Key to KeyVault - $result = az keyvault secret set --name "AdsGfCoreFunctionAppKey" --vault-name $env:AdsOpts_CD_Services_KeyVault_Name --disabled false --subscription $env:AdsOpts_CD_ResourceGroup_Subscription --value $functionkey --output none - -} -else -{ - Write-Warning "Skipped Configuring Key Vault" -} diff --git a/solution/Deployment/workflows/Steps/CD_ConfigureSampleData.ps1 b/solution/Deployment/workflows/Steps/CD_ConfigureSampleData.ps1 deleted file mode 100644 index 23ce8e88..00000000 --- a/solution/Deployment/workflows/Steps/CD_ConfigureSampleData.ps1 +++ /dev/null @@ -1,10 +0,0 @@ - -$pathbase = "./../../SampleFiles/" -$files = @("yellow_tripdata_2017-03.xlsx","yellow_tripdata_2017-03.csv") - -$files | ForEach-Object -Parallel { - - $result = az storage blob upload --file $using:pathbase/$_ --container-name "datalakeraw" --name samples/$_ --account-name $env:AdsOpts_CD_Services_Storage_ADLS_Name - $result = az storage blob upload --file $using:pathbase/$_ --container-name "datalakeraw" --name samples/$_ --account-name $env:AdsOpts_CD_Services_Storage_Blob_Name -} - diff --git a/solution/Deployment/workflows/Steps/CD_ConfigureVnet.ps1 b/solution/Deployment/workflows/Steps/CD_ConfigureVnet.ps1 deleted file mode 100644 index 74f48029..00000000 --- a/solution/Deployment/workflows/Steps/CD_ConfigureVnet.ps1 +++ /dev/null @@ -1,203 +0,0 @@ -if($env:AdsOpts_CD_Services_Vnet_Enable -eq "True") -{ - ############################################################################################################## - #Firewall and Virtual Network Rules for Services - ############################################################################################################## - Write-Debug " Configuring VNet rules for provisioned services" - - #Enable Service Endpoints on the subnet. Required to be done before adding network rules. - $result = az network vnet subnet update --resource-group $env:AdsOpts_CD_ResourceGroup_Name --vnet-name $env:AdsOpts_CD_Services_Vnet_Name --name $env:AdsOpts_CD_Services_Vnet_DataSubnetName --service-endpoints Microsoft.Sql Microsoft.Storage Microsoft.KeyVault Microsoft.Web - Write-Debug " Configured Microsoft.Storage Service Endpoint to subnet $env:AdsOpts_CD_Services_Vnet_DataSubnetName" - - #adls - $result = az storage account network-rule add --resource-group $env:AdsOpts_CD_ResourceGroup_Name --account-name $env:AdsOpts_CD_Services_Storage_ADLS_Name --vnet-name $env:AdsOpts_CD_Services_Vnet_Name --subnet $env:AdsOpts_CD_Services_Vnet_DataSubnetName - $result = az storage account update --resource-group $env:AdsOpts_CD_ResourceGroup_Name --name $env:AdsOpts_CD_Services_Storage_ADLS_Name --default-action Deny #Default action to apply when no rule matches i.e. allow access from selected network/PE only. - Write-Debug " Completed network rule configuration for storage $env:AdsOpts_CD_Services_Storage_ADLS_Name" - - #adlstran - #Note: Skipping as it will be outside for importing data into it. - - #Logging - #Note: Commented below to allow App Insights access to Storage account. - # az storage account network-rule add --resource-group $env:AdsOpts_CD_ResourceGroup_Name --account-name $env:AdsOpts_CD_Services_Storage_Logging_Name --vnet-name $env:AdsOpts_CD_Services_Vnet_Name --subnet $env:AdsOpts_CD_Services_Vnet_DataSubnetName - # az storage account update --resource-group $env:AdsOpts_CD_ResourceGroup_Name --name $env:AdsOpts_CD_Services_Storage_Logging_Name --default-action Deny #Default action to apply when no rule matches i.e. allow access from selected network/PE only. - # Write-Debug " Completed network rule configuration for storage $env:AdsOpts_CD_Services_Storage_Logging_Name" - - #Blob - $result = az storage account network-rule add --resource-group $env:AdsOpts_CD_ResourceGroup_Name --account-name $env:AdsOpts_CD_Services_Storage_Blob_Name --vnet-name $env:AdsOpts_CD_Services_Vnet_Name --subnet $env:AdsOpts_CD_Services_Vnet_DataSubnetName - $result = az storage account update --resource-group $env:AdsOpts_CD_ResourceGroup_Name --name $env:AdsOpts_CD_Services_Storage_Blob_Name --default-action Deny #Default action to apply when no rule matches i.e. allow access from selected network/PE only. - Write-Debug " Completed network rule configuration for storage $env:AdsOpts_CD_Services_Storage_Blob_Name" - - #Azure Sql - #Note: Commenting below as will use the PE for SQL, so vNet rule is not required. - # az sql server vnet-rule create --server $env:AdsOpts_CD_Services_AzureSQLServer_Name --name $env:AdsOpts_CD_Services_Vnet_Name --resource-group $env:AdsOpts_CD_ResourceGroup_Name --vnet-name $env:AdsOpts_CD_Services_Vnet_Name --subnet $env:AdsOpts_CD_Services_Vnet_DataSubnetName - # Write-Debug " Completed network rule configuration for Azure SQL Server $env:AdsOpts_CD_Services_AzureSQLServer_Name" - - #Key Vault - #Note: Commenting below as will use the PE for AKV, so vNet rule is not required. - # az keyvault network-rule add --name $env:AdsOpts_CD_Services_KeyVault_Name --resource-group $env:AdsOpts_CD_ResourceGroup_Name --vnet-name $env:AdsOpts_CD_Services_Vnet_Name --subnet $env:AdsOpts_CD_Services_Vnet_DataSubnetName - # az keyvault update --name $env:AdsOpts_CD_Services_KeyVault_Name --resource-group $env:AdsOpts_CD_ResourceGroup_Name --default-action Deny #Default action to apply when no rule matches i.e. allow access from selected network/PE only. - # Write-Debug " Completed network rule configuration for Azure Key Vault $env:AdsOpts_CD_Services_KeyVault_Name" - - #Azure Function App - $result = az appservice plan update --name $env:AdsOpts_CD_Services_AppPlans_FunctionApp_Name --resource-group $env:AdsOpts_CD_ResourceGroup_Name --sku P1V2 #Upgrade SKU to 'P1V2' to support PE and vNet Integration. - $result = az functionapp vnet-integration add --resource-group $env:AdsOpts_CD_ResourceGroup_Name --name $env:AdsOpts_CD_Services_CoreFunctionApp_Name --vnet $env:AdsOpts_CD_Services_Vnet_Name --subnet $env:AdsOpts_CD_Services_Vnet_FuncAppSubnetName - $result = az functionapp config access-restriction add --priority 100 --rule-name "Allow FuncApp Subnet" --resource-group $env:AdsOpts_CD_ResourceGroup_Name --name $env:AdsOpts_CD_Services_CoreFunctionApp_Name --vnet-name $env:AdsOpts_CD_Services_Vnet_Name --subnet $env:AdsOpts_CD_Services_Vnet_FuncAppSubnetName --description "Allow vNet" --action Allow #Configure Access Restrictions. - $result = az functionapp config access-restriction add --priority 200 --rule-name "Allow Data Subnet" --resource-group $env:AdsOpts_CD_ResourceGroup_Name --name $env:AdsOpts_CD_Services_CoreFunctionApp_Name --vnet-name $env:AdsOpts_CD_Services_Vnet_Name --subnet $env:AdsOpts_CD_Services_Vnet_DataSubnetName --description "Allow vNet" --action Allow #Configure Access Restrictions. - $result = az functionapp config access-restriction add --priority 300 --rule-name "Allow WebApp Subnet" --resource-group $env:AdsOpts_CD_ResourceGroup_Name --name $env:AdsOpts_CD_Services_CoreFunctionApp_Name --vnet-name $env:AdsOpts_CD_Services_Vnet_Name --subnet $env:AdsOpts_CD_Services_Vnet_WebAppSubnetName --description "Allow vNet" --action Allow #Configure Access Restrictions. - $result = az functionapp config access-restriction add --priority 400 --rule-name "Allow Azure Portal" --resource-group $env:AdsOpts_CD_ResourceGroup_Name --name $env:AdsOpts_CD_Services_CoreFunctionApp_Name --service-tag AzurePortal --description "Allow vNet" --action Allow #Configure Access Restrictions. - $result = az functionapp config access-restriction add --priority 500 --rule-name "Allow ADF" --resource-group $env:AdsOpts_CD_ResourceGroup_Name --name $env:AdsOpts_CD_Services_CoreFunctionApp_Name --service-tag DataFactory --description "Allow vNet" --action Allow #Configure Access Restrictions. - Write-Debug " Completed network rule configuration for Azure Function App $env:AdsOpts_CD_Services_CoreFunctionApp_Name" - - #Azure Web App - $result = az appservice plan update --name $env:AdsOpts_CD_Services_AppPlans_WebApp_Name --resource-group $env:AdsOpts_CD_ResourceGroup_Name --sku P1V2 #Upgrade SKU to 'P1V2' to support PE. - $result = az webapp vnet-integration add --resource-group $env:AdsOpts_CD_ResourceGroup_Name --name $env:AdsOpts_CD_Services_Website_Name --vnet $env:AdsOpts_CD_Services_Vnet_Name --subnet $env:AdsOpts_CD_Services_Vnet_WebAppSubnetName - #Note: Commented below to keep Web App accessible over internet. For customers who have VPN/Express Route/use Bastion -> Uncomment below. - #az functionapp config access-restriction add --priority 100 --resource-group $env:AdsOpts_CD_ResourceGroup_Name --name $env:AdsOpts_CD_Services_Website_Name --vnet-name $env:AdsOpts_CD_Services_Vnet_Name --subnet $env:AdsOpts_CD_Services_Vnet_WebAppSubnetName --description "Allow vNet" --action Allow #Configure Access Restrictions. - Write-Debug " Completed network rule configuration for Azure Web App $env:AdsOpts_CD_Services_Website_Name" -} -else -{ - Write-Warning "Skipped Configuration of Vnet rules for provisioned services" -} - -if($env:AdsOpts_CD_Services_Vnet_Enable -eq "True") -{ - ############################################################################################################## - #Private Endpoints for Services (ADF Managed and vNet Managed) - ############################################################################################################## - Write-Debug " Configuring Private Endpoints for provisioned services" - - ########## ADF Managed PE - - $apiVersion = "2018-06-01" - - #$adfId = az datafactory show --factory-name $env:AdsOpts_CD_Services_DataFactory_Name --resource-group $env:AdsOpts_CD_ResourceGroup_Name --query '[id][0]' --output tsv - - #adls (ADF Managed PE) - $managedPrivateEndpointName = "ADF-Managed-PE-"+$env:AdsOpts_CD_Services_Storage_ADLS_Name - $privateEndpointResourceId = "$env:AdsOpts_CD_ResourceGroup_Id/providers/Microsoft.DataFactory/factories/$env:AdsOpts_CD_Services_DataFactory_Name/managedVirtualNetworks/default/managedprivateendpoints/${managedPrivateEndpointName}" - $privateLinkResourceId = az storage account show --name $env:AdsOpts_CD_Services_Storage_ADLS_Name --resource-group $env:AdsOpts_CD_ResourceGroup_Name --query '[id][0]' --output tsv - Write-Debug " Creating $managedPrivateEndpointName" - New-AzResource -Force -ApiVersion "${apiVersion}" -ResourceId "${privateEndpointResourceId}" -Properties @{ - privateLinkResourceId = "${privateLinkResourceId}" - groupId = "dfs" - } - Write-Debug " Created $managedPrivateEndpointName" - - #Blob (ADF Managed PE) - $managedPrivateEndpointName = "ADF-Managed-PE-"+$env:AdsOpts_CD_Services_Storage_Blob_Name - $privateEndpointResourceId = "$env:AdsOpts_CD_ResourceGroup_Id/providers/Microsoft.DataFactory/factories/$env:AdsOpts_CD_Services_DataFactory_Name/managedVirtualNetworks/default/managedprivateendpoints/${managedPrivateEndpointName}" - $privateLinkResourceId = az storage account show --name $env:AdsOpts_CD_Services_Storage_Blob_Name --resource-group $env:AdsOpts_CD_ResourceGroup_Name --query '[id][0]' --output tsv - Write-Debug " Creating $managedPrivateEndpointName" - New-AzResource -Force -ApiVersion "${apiVersion}" -ResourceId "${privateEndpointResourceId}" -Properties @{ - privateLinkResourceId = "${privateLinkResourceId}" - groupId = "blob" - } - Write-Debug " Created $managedPrivateEndpointName" - - #Note: Commented below to allow On-Prem SHIR to communicate with AKV (Non-VPN,Non-ExpressRoute, Non-Peering scenario) - # #Key Vault (ADF Managed PE) - # $managedPrivateEndpointName = "ADF-Managed-PE-"+$env:AdsOpts_CD_Services_KeyVault_Name - # $privateEndpointResourceId = "$env:AdsOpts_CD_ResourceGroup_Id/providers/Microsoft.DataFactory/factories/$env:AdsOpts_CD_Services_DataFactory_Name/managedVirtualNetworks/default/managedprivateendpoints/${managedPrivateEndpointName}" - # $privateLinkResourceId = az keyvault show --name $env:AdsOpts_CD_Services_KeyVault_Name --resource-group $env:AdsOpts_CD_ResourceGroup_Name --query '[id][0]' --output tsv - # Write-Debug " Creating $managedPrivateEndpointName" - # New-AzResource -Force -ApiVersion "${apiVersion}" -ResourceId "${privateEndpointResourceId}" -Properties @{ - # privateLinkResourceId = "${privateLinkResourceId}" - # groupId = "vault" - # } - # Write-Debug " Created $managedPrivateEndpointName" - - - ########## PEs - - #Note: Not using to allow SHIR to communicate with AKV (Non-VPN,Non-ExpressRoute, Peering scenario) - #Key Vault (vNet Managed PE) - Write-Debug " Creating PE for Azure Key Vault" - $PE_Name = "PE-AKV" - $Private_DNS_Zone_Name = "privatelink.vaultcore.azure.net" - $id= az keyvault list --resource-group $env:AdsOpts_CD_ResourceGroup_Name --query '[].[id]' --output tsv - - #PE - $result = az network private-endpoint create ` - --name $PE_Name ` - --resource-group $env:AdsOpts_CD_ResourceGroup_Name ` - --vnet-name $env:AdsOpts_CD_Services_Vnet_Name --subnet $env:AdsOpts_CD_Services_Vnet_DataSubnetName ` - --private-connection-resource-id $id ` - --group-id "vault" ` - --connection-name "PE-AKV-Connection" - - #Private DNS Zone - $result = az network private-dns zone create ` - --resource-group $env:AdsOpts_CD_ResourceGroup_Name ` - --name $Private_DNS_Zone_Name - - $result = az network private-dns link vnet create ` - --resource-group $env:AdsOpts_CD_ResourceGroup_Name ` - --zone-name $Private_DNS_Zone_Name ` - --name "DnsLink-AzKeyVault" ` - --virtual-network $env:AdsOpts_CD_Services_Vnet_Name ` - --registration-enabled false - - $result = az network private-endpoint dns-zone-group create ` - --resource-group $env:AdsOpts_CD_ResourceGroup_Name ` - --endpoint-name $PE_Name ` - --name "AkvZoneGroup" ` - --private-dns-zone $Private_DNS_Zone_Name ` - --zone-name "akv" - - Write-Debug " Completed PE creation for Azure Key Vault" - - - #Azure Sql (vNet Managed) - Write-Debug " Creating PE for Azure SQL Server" - $PE_Name = "PE-SQL" - $Private_DNS_Zone_Name = "privatelink.database.windows.net" - $id= az sql server list --resource-group $env:AdsOpts_CD_ResourceGroup_Name --query '[].[id]' --output tsv - - #PE - $result = az network private-endpoint create ` - --name $PE_Name ` - --resource-group $env:AdsOpts_CD_ResourceGroup_Name ` - --vnet-name $env:AdsOpts_CD_Services_Vnet_Name --subnet $env:AdsOpts_CD_Services_Vnet_DataSubnetName ` - --private-connection-resource-id $id ` - --group-id "sqlServer" ` - --connection-name "PE-SQL-Connection" - - #Private DNS Zone - $result = az network private-dns zone create ` - --resource-group $env:AdsOpts_CD_ResourceGroup_Name ` - --name $Private_DNS_Zone_Name - - $result = az network private-dns link vnet create ` - --resource-group $env:AdsOpts_CD_ResourceGroup_Name ` - --zone-name $Private_DNS_Zone_Name ` - --name "DnsLink-AzSQL" ` - --virtual-network $env:AdsOpts_CD_Services_Vnet_Name ` - --registration-enabled false - - $result = az network private-endpoint dns-zone-group create ` - --resource-group $env:AdsOpts_CD_ResourceGroup_Name ` - --endpoint-name $PE_Name ` - --name "SqlZoneGroup" ` - --private-dns-zone $Private_DNS_Zone_Name ` - --zone-name "sql" - - Write-Debug " Completed PE creation for Azure SQL Server" - - ############################## - - #Post PE Addition Task - $result = az sql server update --name $env:AdsOpts_CD_Services_AzureSQLServer_Name --resource-group $env:AdsOpts_CD_ResourceGroup_Name --enable-public-network false #Disable Public Access (Requires PE to be enabled first). - Write-Debug " Disabled Public Access to Azure SQL" - - #Enable soft-delete on AKV - $result = az keyvault update --name $env:AdsOpts_CD_Services_KeyVault_Name --enable-soft-delete true - Write-Debug " Soft-delete enabled on AKV" - - ############################## -} -else -{ - Write-Warning "Skipped Configuration of Private Endpoints for provisioned services" -} \ No newline at end of file diff --git a/solution/Deployment/workflows/Steps/CD_ConfigureWebApp.ps1 b/solution/Deployment/workflows/Steps/CD_ConfigureWebApp.ps1 deleted file mode 100644 index 80a2725f..00000000 --- a/solution/Deployment/workflows/Steps/CD_ConfigureWebApp.ps1 +++ /dev/null @@ -1,47 +0,0 @@ -Write-Debug " Configuring Web App" - -$SourceFile = $env:AdsOpts_CD_FolderPaths_PublishZip + "/webapplication/Publish.zip" -if($env:AdsOpts_CD_Services_WebSite_Enable -eq "True") -{ - - #Update App Settings - - $appsettingsfile = $env:AdsOpts_CD_FolderPaths_PublishUnZip + "/webapplication/appsettings.json" - $appSettings = Get-Content $appsettingsfile | ConvertFrom-Json - - $appSettings.ApplicationOptions.UseMSI = $true - $appSettings.ApplicationOptions.AdsGoFastTaskMetaDataDatabaseServer = "$env:AdsOpts_CD_Services_AzureSQLServer_Name.database.windows.net" - $appSettings.ApplicationOptions.AdsGoFastTaskMetaDataDatabaseName = $env:AdsOpts_CD_Services_AzureSQLServer_AdsGoFastDB_Name - - $AppInsightsWPId = (az monitor app-insights component show --app $env:AdsOpts_CD_Services_AppInsights_Name -g $env:AdsOpts_CD_ResourceGroup_Name | ConvertFrom-Json).appId - $appSettings.ApplicationOptions.AppInsightsWorkspaceId = $AppInsightsWPId - - $LogAnalyticsWorkspaceId = (az monitor log-analytics workspace show --workspace-name $env:AdsOpts_CD_Services_LogAnalytics_Name -g $env:AdsOpts_CD_ResourceGroup_Name | ConvertFrom-Json -AsHashtable).customerId - $appSettings.ApplicationOptions.LogAnalyticsWorkspaceId = $LogAnalyticsWorkspaceId - - $appSettings.AzureAdAuth.Domain=$env:AdsOpts_CD_ResourceGroup_Domain - $appSettings.AzureAdAuth.TenantId=$env:AdsOpts_CD_ResourceGroup_TenantId - $appSettings.AzureAdAuth.ClientId= $env:AdsOpts_CD_ServicePrincipals_WebAppAuthenticationSP_ClientId - $appSettings | ConvertTo-Json -Depth 10 | set-content $appsettingsfile - - #Repack WebApp - $CurrentPath = Get-Location - Set-Location "./../bin/publish" - $Path = (Get-Location).Path + "/zipped/webapplication" - New-Item -ItemType Directory -Force -Path $Path - $Path = $Path + "/Publish.zip" - Compress-Archive -Path '.\unzipped\webapplication\*' -DestinationPath $Path -force - #Move back to workflows - Set-Location $CurrentPath - - # Deploy Web App - $result = az webapp deployment source config-zip --resource-group $env:AdsOpts_CD_ResourceGroup_Name --name $env:AdsOpts_CD_Services_WebSite_Name --src $SourceFile - - #Enable App Insights - #az resource create --resource-group $env:AdsOpts_CD_ResourceGroup_Name --resource-type "Microsoft.Insights/components" --name $env:AdsOpts_CD_Services_WebSite_Name --location $env:AdsOpts_CD_ResourceGroup_Location --properties '{\"Application_Type\":\"web\"}' - -} -else -{ - Write-Warning "Skipped Configuring Web App" -} \ No newline at end of file diff --git a/solution/Deployment/workflows/Steps/CD_DeployADF.ps1 b/solution/Deployment/workflows/Steps/CD_DeployADF.ps1 deleted file mode 100644 index 1099ae63..00000000 --- a/solution/Deployment/workflows/Steps/CD_DeployADF.ps1 +++ /dev/null @@ -1,10 +0,0 @@ - -if ($env:AdsOpts_CD_Services_DataFactory_Enable -eq "True") -{ - Write-Debug " Creating Data Factory" - $result = az deployment group create -g $env:AdsOpts_CD_ResourceGroup_Name --template-file ./../arm/DataFactory.json --parameters location=$env:AdsOpts_CD_ResourceGroup_Location adf-name=$env:AdsOpts_CD_Services_DataFactory_Name -} -else -{ - Write-Warning "Skipped Creation of Data Factory" -} diff --git a/solution/Deployment/workflows/Steps/CD_DeployADFOnPremSHIR.ps1 b/solution/Deployment/workflows/Steps/CD_DeployADFOnPremSHIR.ps1 deleted file mode 100644 index 89b68b70..00000000 --- a/solution/Deployment/workflows/Steps/CD_DeployADFOnPremSHIR.ps1 +++ /dev/null @@ -1,60 +0,0 @@ - -$ProgressPreference = 'SilentlyContinue' #Turn-off the progress bar and speed up the download via Invoke-WebRequest - -$ADFLocalDrive = $env:AdsOpts_CD_Services_DataFactory_OnPremVnetIr_IrInstallConfig_LocalDrive #"C:" #Drive where the below directory will be created. -$ADFLocalVMFolder = $env:AdsOpts_CD_Services_DataFactory_OnPremVnetIr_IrInstallConfig_LocalVMFolder #"ADFInstaller" #Directory in which the .msi files will be downloaded. - -$ADFIRDownloadURL = $env:AdsOpts_CD_Services_DataFactory_OnPremVnetIr_IrInstallConfig_IrDownloadURL #"https://download.microsoft.com/download/E/4/7/E4771905-1079-445B-8BF9-8A1A075D8A10/IntegrationRuntime_5.9.7900.1.msi" -$ADFIRLocalFileName = $ADFIRDownloadURL.Split("/")[$ADFIRDownloadURL.Split("/").Length-1] #Get the .msi filename. -$ADFIRInstallerLocalFileLocation = $ADFLocalDrive + '\' + $ADFLocalVMFolder + '\' + $ADFIRLocalFileName #Local Path of downloaded installer. - -$ADFJDKDownloadURL = $env:AdsOpts_CD_Services_DataFactory_OnPremVnetIr_IrInstallConfig_JDKDownloadURL #"https://github.com/adoptium/temurin11-binaries/releases/download/jdk-11.0.12%2B7/OpenJDK11U-jdk_x64_windows_hotspot_11.0.12_7.msi" -$ADFJDKLocalFileName = $ADFJDKDownloadURL.Split("/")[$ADFJDKDownloadURL.Split("/").Length-1] #Get the .msi filename. -$ADFJDKInstallerLocalFileLocation = $ADFLocalDrive + '\' + $ADFLocalVMFolder + '\' + $ADFJDKLocalFileName #Local Path of downloaded installer. - -Write-Debug " Creating directory to download the SHIR installable." -New-Item -Path $ADFLocalDrive -Name $ADFLocalVMFolder -ItemType Directory -Force #'-Force' Ok if directory already exists. - -Write-Debug " Downloading the SHIR installable at $ADFIRInstallerLocalFileLocation." -Invoke-WebRequest -Uri $ADFIRDownloadURL -OutFile $ADFIRInstallerLocalFileLocation #Download SHIR installable. - -Write-Debug " Downloading the OpenJDK for SHIR at $ADFJDKInstallerLocalFileLocation." -Invoke-WebRequest -Uri $ADFJDKDownloadURL -OutFile $ADFJDKInstallerLocalFileLocation #Download OpenJDK. - -Write-Debug " Installing OpenJDK." -#msiexec /i $ADFJDKInstallerLocalFileLocation ADDLOCAL=FeatureMain,FeatureEnvironment,FeatureJarFileRunWith,FeatureJavaHome /quiet - -#Ensure command prompt is run as administrator -$MSIInstallArguments = @( - "/i" - "$ADFJDKInstallerLocalFileLocation" - 'ADDLOCAL=FeatureMain,FeatureEnvironment,FeatureJarFileRunWith,FeatureJavaHome' - # 'INSTALLDIR="c:\Program Files\Eclipse Foundation\"' - 'INSTALLDIR="""$env:AdsOpts_CD_Services_DataFactory_OnPremVnetIr_IrInstallConfig_JDKInstallFolder"""' - "/qb!" - "/norestart" -) -Write-Debug $MSIInstallArguments -Start-Process "msiexec.exe" -ArgumentList $MSIInstallArguments -Wait -NoNewWindow - -Write-Debug " Installing SHIR." -# Data Factory - VM AZ IR - Install IR -# $irKey1 = az datafactory integration-runtime list-auth-key --factory-name $DataFactoryName --name "SelfHostedIntegrationRuntime-Azure-VNET" --resource-group $ResourceGroupName --query authKey1 --out tsv -# az vm run-command invoke --command-id RunPowerShellScript --name $VMAzIR -g $ResourceGroupName --scripts "$ADFIRInstallerLocalFileLocation -path $ADFIRLocalFileLocation -authKey '$irKey1'" -# - -$irKey1 = az datafactory integration-runtime list-auth-key --factory-name $env:AdsOpts_CD_Services_DataFactory_Name --name $env:AdsOpts_CD_Services_DataFactory_OnPremVnetIr_Name --resource-group $env:AdsOpts_CD_ResourceGroup_Name --query authKey1 --out tsv -Write-Debug " irKey1 retrieved." - -# #Ensure command prompt is run as administrator -# $MSIInstallArguments = @( -# "/i" -# "$ADFIRInstallerLocalFileLocation" -# "authKey='$irKey1'" -# "/qb!" -# "/norestart" -# ) -# Write-Debug $MSIInstallArguments -# Start-Process "msiexec.exe" -ArgumentList $MSIInstallArguments -Wait -NoNewWindow - -. .\Steps\InstallGatewayFunctions.ps1 -path "$ADFIRInstallerLocalFileLocation" -authKey "$irKey1" \ No newline at end of file diff --git a/solution/Deployment/workflows/Steps/CD_DeployAppInsights.ps1 b/solution/Deployment/workflows/Steps/CD_DeployAppInsights.ps1 deleted file mode 100644 index 1403813f..00000000 --- a/solution/Deployment/workflows/Steps/CD_DeployAppInsights.ps1 +++ /dev/null @@ -1,12 +0,0 @@ - -if ($env:AdsOpts_CD_Services_AppInsights_Enable -eq "True") -{ - Write-Debug " Creating App Insights" - $storageaccountkey = (az storage account keys list -g $env:AdsOpts_CD_ResourceGroup_Name -n $env:AdsOpts_CD_Services_Storage_Logging_Name | ConvertFrom-Json)[0].value - - $result = az deployment group create -g $env:AdsOpts_CD_ResourceGroup_Name --template-file ./../arm/ApplicationInsights.json --parameters location=$env:AdsOpts_CD_ResourceGroup_Location appinsights_name=$env:AdsOpts_CD_Services_AppInsights_Name -} -else -{ - Write-Warning "Skipped Creation of App Insights" -} diff --git a/solution/Deployment/workflows/Steps/CD_DeployAppService.ps1 b/solution/Deployment/workflows/Steps/CD_DeployAppService.ps1 deleted file mode 100644 index 94d8a1bf..00000000 --- a/solution/Deployment/workflows/Steps/CD_DeployAppService.ps1 +++ /dev/null @@ -1,27 +0,0 @@ - -if ($env:AdsOpts_CD_Services_AppPlans_WebApp_Enable -eq "True") -{ - Write-Debug " Creating App Service for Web App" - #App Service (Includes both functions and web) - $storageaccountkey = (az storage account keys list -g $env:AdsOpts_CD_ResourceGroup_Name -n $env:AdsOpts_CD_Services_Storage_Logging_Name | ConvertFrom-Json)[0].value - - $result = az deployment group create -g $env:AdsOpts_CD_ResourceGroup_Name --template-file ./../arm/AppService_Web.json --parameters location=$env:AdsOpts_CD_ResourceGroup_Location asp_name=$env:AdsOpts_CD_Services_AppPlans_WebApp_Name -} -else -{ - Write-Warning "Skipped Creation of App Service for Web App" -} - - -if ($env:AdsOpts_CD_Services_AppPlans_FunctionApp_Enable -eq "True") -{ - Write-Debug " Creating App Service for Function App" - #App Service (Includes both functions and web) - $storageaccountkey = (az storage account keys list -g $env:AdsOpts_CD_ResourceGroup_Name -n $env:AdsOpts_CD_Services_Storage_Logging_Name | ConvertFrom-Json)[0].value - - $result = az deployment group create -g $env:AdsOpts_CD_ResourceGroup_Name --template-file ./../arm/AppService_Func.json --parameters location=$env:AdsOpts_CD_ResourceGroup_Location asp_name=$env:AdsOpts_CD_Services_AppPlans_FunctionApp_Name -} -else -{ - Write-Warning "Skipped Creation of App Service For Func App" -} \ No newline at end of file diff --git a/solution/Deployment/workflows/Steps/CD_DeployAzureSqlServer.ps1 b/solution/Deployment/workflows/Steps/CD_DeployAzureSqlServer.ps1 deleted file mode 100644 index d67c39b1..00000000 --- a/solution/Deployment/workflows/Steps/CD_DeployAzureSqlServer.ps1 +++ /dev/null @@ -1,45 +0,0 @@ -Write-Debug " Creating Azure SQL Server" -Write-Debug $env:AdsOpts_CD_Services_AzureSQLServer_Name -Write-Debug $env:AdsOpts_CD_Services_AzureSQLServer_SampleDB_Name -Write-Debug $env:AdsOpts_CD_Services_AzureSQLServer_AdsGoFastDB_Name -Write-Debug $env:AdsOpts_CD_Services_AzureSQLServer_AdminUser -Write-Debug $env:AdsOpts_CD_Services_AzureSQLServer_StagingDB_Name - -$symbols = '!@#$%^&*'.ToCharArray() -$characterList = 'a'..'z' + 'A'..'Z' + '0'..'9' + $symbols -function GeneratePassword { - param( - [ValidateRange(12, 256)] - [int] - $length = 14 - ) - - do { - $password = -join (0..$length | % { $characterList | Get-Random }) - [int]$hasLowerChar = $password -cmatch '[a-z]' - [int]$hasUpperChar = $password -cmatch '[A-Z]' - [int]$hasDigit = $password -match '[0-9]' - [int]$hasSymbol = $password.IndexOfAny($symbols) -ne -1 - - } - until (($hasLowerChar + $hasUpperChar + $hasDigit + $hasSymbol) -ge 3) - - $password | ConvertTo-SecureString -AsPlainText -} - - -if($env:AdsOpts_CD_Services_AzureSQLServer_Enable -eq "True") -{ - Write-Debug " Creating Azure SQL Server" - $temppassword = GeneratePassword - [Environment]::SetEnvironmentVariable("AdsOpts_CD_Services_AzureSQLServer_AdminPassword", "$temppassword") - #StorageAccount For Logging - $result = az deployment group create -g $env:AdsOpts_CD_ResourceGroup_Name --template-file ./../arm/AzureSQLServer.json --parameters location=$env:AdsOpts_CD_ResourceGroup_Location sql_server_name=$env:AdsOpts_CD_Services_AzureSQLServer_Name sql_admin_login=$env:AdsOpts_CD_Services_AzureSQLServer_AdminUser sql_admin_password=$env:AdsOpts_CD_Services_AzureSQLServer_AdminPassword sample_db_name=$env:AdsOpts_CD_Services_AzureSQLServer_SampleDB_Name ads_go_fast_db_name=$env:AdsOpts_CD_Services_AzureSQLServer_AdsGoFastDB_Name staging_db_name=$env:AdsOpts_CD_Services_AzureSQLServer_StagingDB_Name vnet_name=$env:AdsOpts_CD_Services_Vnet_Name - - #Make sure password is correct - $result = az sql server update -n $env:AdsOpts_CD_Services_AzureSQLServer_Name -g $env:AdsOpts_CD_ResourceGroup_Name -p ($env:AdsOpts_CD_Services_AzureSQLServer_AdminPassword | Out-String) -} -else -{ - Write-Warning "Skipped Creation of Azure SQL Server" -} \ No newline at end of file diff --git a/solution/Deployment/workflows/Steps/CD_DeployFunctionApp.ps1 b/solution/Deployment/workflows/Steps/CD_DeployFunctionApp.ps1 deleted file mode 100644 index 1ff86481..00000000 --- a/solution/Deployment/workflows/Steps/CD_DeployFunctionApp.ps1 +++ /dev/null @@ -1,27 +0,0 @@ - -Write-Debug " Creating Function App" -if ($env:AdsOpts_CD_Services_CoreFunctionApp_Enable -eq "True") -{ - if ($env:AdsOpts_CD_Services_AppPlans_FunctionApp_ResourceGroup -eq $null) - { - $rg = $env:AdsOpts_CD_ResourceGroup_Name - } - else - { - $rg = $env:AdsOpts_CD_Services_AppPlans_FunctionApp_ResourceGroup - } - - $sn = $env:AdsOpts_CD_Services_AppPlans_FunctionApp_Name - - - #$appserviceid = ((az appservice plan show --name $sn --resource-group $rg) | ConvertFrom-Json).id - - $key = az storage account keys list -g $env:AdsOpts_CD_ResourceGroup_Name -n $env:AdsOpts_CD_Services_Storage_Logging_Name --query [0].value -o tsv - - $result = az deployment group create -g $rg --template-file ./../arm/FunctionApp.json --parameters azure-function-site-name=$env:AdsOpts_CD_Services_CoreFunctionApp_Name app-insights-name=$env:AdsOpts_CD_Services_AppInsights_Name storage-log-account-name=$env:AdsOpts_CD_Services_Storage_Logging_Name storage-log-account-key=$key appservice-name=$sn -} -else -{ - Write-Warning "Skipped Creation of Function App" -} - diff --git a/solution/Deployment/workflows/Steps/CD_DeployKeyVault.ps1 b/solution/Deployment/workflows/Steps/CD_DeployKeyVault.ps1 deleted file mode 100644 index db02cff0..00000000 --- a/solution/Deployment/workflows/Steps/CD_DeployKeyVault.ps1 +++ /dev/null @@ -1,10 +0,0 @@ - -if ($env:AdsOpts_CD_Services_KeyVault_Enable -eq "True") -{ - Write-Debug "Creating Key Vault" - $result = az deployment group create -g $env:AdsOpts_CD_ResourceGroup_Name --template-file ./../arm/KeyVault.json --parameters location=$env:AdsOpts_CD_ResourceGroup_Location keyvault-name=$env:AdsOpts_CD_Services_KeyVault_Name tenant-id=$env:AdsOpts_CD_ResourceGroup_TenantId -} -else -{ - Write-Warning "Skipped Creation of Key Vault" -} diff --git a/solution/Deployment/workflows/Steps/CD_DeployLogAnalytics.ps1 b/solution/Deployment/workflows/Steps/CD_DeployLogAnalytics.ps1 deleted file mode 100644 index 25eef11d..00000000 --- a/solution/Deployment/workflows/Steps/CD_DeployLogAnalytics.ps1 +++ /dev/null @@ -1,10 +0,0 @@ - -if ($env:AdsOpts_CD_Services_DataFactory_Enable -eq "True") -{ - Write-Debug " Creating Log Analyticss" - $results = az deployment group create -g $env:AdsOpts_CD_ResourceGroup_Name --template-file ./../arm/LogAnalytics.json --parameters location=$env:AdsOpts_CD_ResourceGroup_Location workspaces_adsgofastloganalytics_name=$env:AdsOpts_CD_Services_LogAnalytics_Name -} -else -{ - Write-Warning "Skipped Creation of Log Analytics" -} diff --git a/solution/Deployment/workflows/Steps/CD_DeployResourceGroup.ps1 b/solution/Deployment/workflows/Steps/CD_DeployResourceGroup.ps1 deleted file mode 100644 index 917ecace..00000000 --- a/solution/Deployment/workflows/Steps/CD_DeployResourceGroup.ps1 +++ /dev/null @@ -1,14 +0,0 @@ -if($env:AdsOpts_CD_ResourceGroup_Enable -eq "True") -{ - Write-Debug "Creating Resource Group" - $groupcreate = az group create --name $env:AdsOpts_CD_ResourceGroup_Name --location $env:AdsOpts_CD_ResourceGroup_Location -} - -#Get ResourceGroup Object ID -$id = ((az group show --name $env:AdsOpts_CD_ResourceGroup_Name) | ConvertFrom-Json).id -#Save to Environment File -$environmentfile = $env:AdsOpts_CD_FolderPaths_Environments + "/" + $env:ENVIRONMENT_NAME + ".json" -$envsettings = Get-Content $environmentfile | ConvertFrom-Json -$envsettings.AdsOpts.CD.ResourceGroup.Id = $id -$envsettings | ConvertTo-Json -Depth 10 | set-content $environmentfile - diff --git a/solution/Deployment/workflows/Steps/CD_DeployStorageADLS.ps1 b/solution/Deployment/workflows/Steps/CD_DeployStorageADLS.ps1 deleted file mode 100644 index a1d1dd46..00000000 --- a/solution/Deployment/workflows/Steps/CD_DeployStorageADLS.ps1 +++ /dev/null @@ -1,23 +0,0 @@ -Write-Debug " Creating Storage Account (ADLS) For Data Lake" -if($env:AdsOpts_CD_Services_Storage_ADLS_Enable -eq "True") -{ - #StorageAccount For Logging - $result = az deployment group create -g $env:AdsOpts_CD_ResourceGroup_Name --template-file ./../arm/Storage_ADLS.json --parameters location=$env:AdsOpts_CD_ResourceGroup_Location storage-account-name=$env:AdsOpts_CD_Services_Storage_ADLS_Name - Write-Debug " Creating Storage Account (ADLS) For Data Lake" -} -else -{ - Write-Warning "Skipped Creation of Storage (ADLS) For Data Lake" -} - -#Transient Storage Account -if($env:AdsOpts_CD_Services_Storage_ADLSTransient_Enable -eq "True") -{ - Write-Debug " Creating Transient Storage Account (ADLS)" - - $result = az deployment group create -g $env:AdsOpts_CD_ResourceGroup_Name --template-file ./../arm/Storage_ADLS.json --parameters location=$env:AdsOpts_CD_ResourceGroup_Location storage-account-name=$env:AdsOpts_CD_Services_Storage_ADLSTransient_Name storage-raw-container-name=transient -} -else -{ - Write-Warning "Skipped Creation of Transient Storage (ADLS)" -} \ No newline at end of file diff --git a/solution/Deployment/workflows/Steps/CD_DeployStorageBlob.ps1 b/solution/Deployment/workflows/Steps/CD_DeployStorageBlob.ps1 deleted file mode 100644 index f125d152..00000000 --- a/solution/Deployment/workflows/Steps/CD_DeployStorageBlob.ps1 +++ /dev/null @@ -1,11 +0,0 @@ -Write-Debug " Creating Storage Account (Blob) For Data Lake" -if($env:AdsOpts_CD_Services_Storage_Blob_Enable -eq "True") -{ - #StorageAccount For Logging - $result = az deployment group create -g $env:AdsOpts_CD_ResourceGroup_Name --template-file ./../arm/Storage_Blob.json --parameters location=$env:AdsOpts_CD_ResourceGroup_Location storage-account-name=$env:AdsOpts_CD_Services_Storage_Blob_Name - Write-Debug " Creating Storage Account (Blob) For Data Lake" -} -else -{ - Write-Warning "Skipped Creation of Storage (Blob) For Data Lake" -} \ No newline at end of file diff --git a/solution/Deployment/workflows/Steps/CD_DeployStorageForLogging.ps1 b/solution/Deployment/workflows/Steps/CD_DeployStorageForLogging.ps1 deleted file mode 100644 index e724fc67..00000000 --- a/solution/Deployment/workflows/Steps/CD_DeployStorageForLogging.ps1 +++ /dev/null @@ -1,12 +0,0 @@ -Write-Debug " Creating Storage Account For Logging" -Write-Debug $env:AdsOpts_CD_Services_Storage_Logging_Name -if($env:AdsOpts_CD_Services_Storage_Logging_Enable -eq "True") -{ - #StorageAccount For Logging - $result = az deployment group create -g $env:AdsOpts_CD_ResourceGroup_Name --template-file ./../arm/Storage_Logging.json --parameters location=$env:AdsOpts_CD_ResourceGroup_Location storage-log-account-name=$env:AdsOpts_CD_Services_Storage_Logging_Name - Write-Debug " Creating Storage Account For Logging" -} -else -{ - Write-Warning "Skipped Creation of Storage Account For Logging" -} \ No newline at end of file diff --git a/solution/Deployment/workflows/Steps/CD_DeployVnet.ps1 b/solution/Deployment/workflows/Steps/CD_DeployVnet.ps1 deleted file mode 100644 index f6553d6e..00000000 --- a/solution/Deployment/workflows/Steps/CD_DeployVnet.ps1 +++ /dev/null @@ -1,25 +0,0 @@ -#SetServiceName -RootElement "AdsOpts_CD_Services_Vnet" - -if($env:AdsOpts_CD_Services_Vnet_Enable -eq "True") -{ - Write-Debug " Creating Vnet + Subnets (Bastion, Data, WebApp)" - Write-Debug $env:AdsOpts_CD_Services_Vnet_Name - - #vNet - $result = az deployment group create -g $env:AdsOpts_CD_ResourceGroup_Name --template-file ./../arm/Networking.json --parameters location=$env:AdsOpts_CD_ResourceGroup_Location vnet-name=$env:AdsOpts_CD_Services_Vnet_Name vnet-address-prefix=$env:AdsOpts_CD_Services_Vnet_vNetAddressRange ` - bastion-subnet-ip-prefix=$env:AdsOpts_CD_Services_Vnet_BastionSubnetAddressRange ` - data-subnet-ip-prefix=$env:AdsOpts_CD_Services_Vnet_DataSubnetAddressRange ` - webapp-subnet-ip-prefix=$env:AdsOpts_CD_Services_Vnet_WebAppSubnetAddressRange ` - funcapp-subnet-ip-prefix=$env:AdsOpts_CD_Services_Vnet_FuncAppSubnetAddressRange ` - bastion-host-name=$env:AdsOpts_CD_Services_Bastion_Name ` - bastion-subnet-name=$env:AdsOpts_CD_Services_Vnet_BastionSubnetName ` - data-subnet-name=$env:AdsOpts_CD_Services_Vnet_DataSubnetName ` - webapp-subnet-name=$env:AdsOpts_CD_Services_Vnet_WebAppSubnetName ` - funcapp-subnet-name=$env:AdsOpts_CD_Services_Vnet_FuncAppSubnetName - - Write-Debug " Creating Vnet" -} -else -{ - Write-Warning "Skipped Creation of Vnet" -} \ No newline at end of file diff --git a/solution/Deployment/workflows/Steps/CD_DeployWebSite.ps1 b/solution/Deployment/workflows/Steps/CD_DeployWebSite.ps1 deleted file mode 100644 index 7f92a8c4..00000000 --- a/solution/Deployment/workflows/Steps/CD_DeployWebSite.ps1 +++ /dev/null @@ -1,22 +0,0 @@ - -if ($env:AdsOpts_CD_Services_WebSite_Enable -eq "True") -{ - if ($env:AdsOpts_CD_Services_AppPlans_WebApp_ResourceGroup -eq $null) - { - $rg = $env:AdsOpts_CD_ResourceGroup_Name - } - else - { - $rg = $env:AdsOpts_CD_Services_AppPlans_WebApp_ResourceGroup - } - - - $sn = $env:AdsOpts_CD_Services_AppPlans_WebApp_Name - - Write-Debug " Deploying Wesite to $sn in resource group $rg" - $result = az deployment group create -g $rg --template-file ./../arm/WebApp.json --parameters resource_group_name=$rg sites_AdsGoFastWebApp_name=$env:AdsOpts_CD_Services_WebSite_Name appservice_name=$sn} -else -{ - Write-Warning "Skipped Creation of Web Site" -} - diff --git a/solution/Deployment/workflows/Steps/CD_GrantRBAC.ps1 b/solution/Deployment/workflows/Steps/CD_GrantRBAC.ps1 deleted file mode 100644 index c7c3553a..00000000 --- a/solution/Deployment/workflows/Steps/CD_GrantRBAC.ps1 +++ /dev/null @@ -1,58 +0,0 @@ - $subid = (az account show -s $env:AdsOpts_CD_ResourceGroup_Subscription | ConvertFrom-Json).id - $basescope = "/subscriptions/$subid/resourceGroups/$env:AdsOpts_CD_ResourceGroup_Name/providers" - $DataFactoryId = az ad sp list --display-name $env:AdsOpts_CD_Services_DataFactory_Name --output tsv --query "[].{id:objectId}" - $AzureFunctionId = ((az webapp identity show --resource-group $env:AdsOpts_CD_ResourceGroup_Name --name $env:AdsOpts_CD_Services_CoreFunctionApp_Name) | ConvertFrom-Json).principalId - $DeploymentSpId = (az ad sp list --filter "displayname eq '$env:AdsOpts_CD_ServicePrincipals_DeploymentSP_Name'" | ConvertFrom-Json).appId - $WebAppID = ((az webapp identity show --resource-group $env:AdsOpts_CD_ResourceGroup_Name --name $env:AdsOpts_CD_Services_WebSite_Name) | ConvertFrom-Json).principalId - $AADUserId = (az ad signed-in-user show | ConvertFrom-Json).objectId - - #RBAC Rights - # MSI Access from Azure Function to ADF - $result = az role assignment create --assignee $AzureFunctionId --role "Contributor" --scope "$basescope/Microsoft.DataFactory/factories/$env:AdsOpts_CD_Services_DataFactory_Name" - # Deployment Principal to ADF - $result = az role assignment create --assignee $DeploymentSpId --role "Contributor" --scope "$basescope/Microsoft.DataFactory/factories/$env:AdsOpts_CD_Services_DataFactory_Name" - - # MSI Access from Azure Function to ADF Log Analytics - $result = az role assignment create --assignee $AzureFunctionId --role "Contributor" --scope "$basescope/microsoft.operationalinsights/workspaces/$env:AdsOpts_CD_Services_LogAnalytics_Name" - # MSI Access from AF to ADLS Gen2 - $result = az role assignment create --assignee $AzureFunctionId --role "Storage Blob Data Contributor" --scope "$basescope/Microsoft.Storage/storageAccounts/$env:AdsOpts_CD_Services_Storage_ADLS_Name" - # MSI Access from AF to Blob Gen2 - $result = az role assignment create --assignee $AzureFunctionId --role "Storage Blob Data Contributor" --scope "$basescope/Microsoft.Storage/storageAccounts/$env:AdsOpts_CD_Services_Storage_Blob_Name" - - # MSI Access from ADF to ADLS Gen2 - $result = az role assignment create --assignee $DataFactoryId --role "Storage Blob Data Contributor" --scope "$basescope/Microsoft.Storage/storageAccounts/$env:AdsOpts_CD_Services_Storage_ADLS_Name" - - # MSI Access from ADF to Blob - $result = az role assignment create --assignee $DataFactoryId --role "Storage Blob Data Contributor" --scope "$basescope/Microsoft.Storage/storageAccounts/$env:AdsOpts_CD_Services_Storage_Blob_Name" - - # MSI Access from ADF to KeyVault - $result = az keyvault set-policy --name $env:AdsOpts_CD_Services_KeyVault_Name --object-id $DataFactoryId --certificate-permissions get list --key-permissions get list --resource-group $env:AdsOpts_CD_ResourceGroup_Name --secret-permissions get list --subscription $subid - # MSI Access from WebApp to ADLS Gen2 - $result = az role assignment create --assignee $WebAppID --role "Storage Blob Data Contributor" --scope "$basescope/Microsoft.Storage/storageAccounts/$env:AdsOpts_CD_Services_Storage_ADLS_Name" - # MSI Access from WebApp to ADF Log Analytics - $result = az role assignment create --assignee $WebAppID --role "Contributor" --scope "$basescope/microsoft.operationalinsights/workspaces/$env:AdsOpts_CD_Services_LogAnalytics_Name" - # MSI Access from WebApp to ADF App Insights - $result = az role assignment create --assignee $WebAppID --role "Contributor" --scope "$basescope/microsoft.insights/components/$env:AdsOpts_CD_Services_AppInsights_Name" - # AAD User Access to ADLS Gen2 - to upload sample data files - $result = az role assignment create --assignee $AADUserId --role "Storage Blob Data Contributor" --scope "$basescope/Microsoft.Storage/storageAccounts/$env:AdsOpts_CD_Services_Storage_ADLS_Name" - $result = az role assignment create --assignee $AADUserId --role "Owner" --scope "$basescope/Microsoft.Storage/storageAccounts/$env:AdsOpts_CD_Services_Storage_ADLS_Name" - - -#Transient Storage Account -if($env:AdsOpts_CD_Services_Storage_ADLSTransient_Enable -eq "True") -{ - Write-Debug " Granting RBAC on Transient Storage Account (ADLS)" - - # MSI Access from AF to ADLS Gen2 - $result = az role assignment create --assignee $AzureFunctionId --role "Storage Blob Data Contributor" --scope "$basescope/Microsoft.Storage/storageAccounts/$env:AdsOpts_CD_Services_Storage_ADLSTransient_Name" - - # MSI Access from ADF to ADLS Gen2 - $result = az role assignment create --assignee $DataFactoryId --role "Storage Blob Data Contributor" --scope "$basescope/Microsoft.Storage/storageAccounts/$env:AdsOpts_CD_Services_Storage_ADLSTransient_Name" - - # MSI Access from WebApp to ADLS Gen2 - $result = az role assignment create --assignee $WebAppID --role "Storage Blob Data Contributor" --scope "$basescope/Microsoft.Storage/storageAccounts/$env:AdsOpts_CD_Services_Storage_ADLSTransient_Name" -} -else -{ - Write-Warning "Skipped RBAC on Transient Storage Account (ADLS)" -} \ No newline at end of file diff --git a/solution/Deployment/workflows/Steps/CD_GrantWebAppAccess.ps1 b/solution/Deployment/workflows/Steps/CD_GrantWebAppAccess.ps1 deleted file mode 100644 index b1ea53ae..00000000 --- a/solution/Deployment/workflows/Steps/CD_GrantWebAppAccess.ps1 +++ /dev/null @@ -1,21 +0,0 @@ -#Description - Run this script per user who require the Web App UI access. - -#Note: --id parameter is principal name of the user for which to get information. -$cu = az ad user show --id 'xxxxx@xxxxx.com' | ConvertFrom-Json - -#Script from CD_2a_ file -#Web App -$authapp = az ad app show --id "api://$env:AdsOpts_CD_ServicePrincipals_WebAppAuthenticationSP_Name" | ConvertFrom-Json -$callinguser = $cu.objectId -$authappid = $authapp.appId -$permissionid = $authapp.oauth2Permissions.id - -$authappobjectid = (az ad sp show --id $authapp.appId | ConvertFrom-Json).objectId - -$body = '{"principalId": "@principalid","resourceId":"@resourceId","appRoleId": "@appRoleId"}' | ConvertFrom-Json -$body.resourceId = $authappobjectid -$body.appRoleId = ($authapp.appRoles | Where-Object {$_.value -eq "Administrator" }).id -$body.principalId = $callinguser -$body = ($body | ConvertTo-Json -compress | Out-String).Replace('"','\"') - -$result = az rest --method post --uri "https://graph.microsoft.com/v1.0/servicePrincipals/$authappobjectid/appRoleAssignedTo" --headers '{\"Content-Type\":\"application/json\"}' --body $body diff --git a/solution/Deployment/workflows/Steps/CD_SetResourceGroupHash.ps1 b/solution/Deployment/workflows/Steps/CD_SetResourceGroupHash.ps1 deleted file mode 100644 index 8947dee6..00000000 --- a/solution/Deployment/workflows/Steps/CD_SetResourceGroupHash.ps1 +++ /dev/null @@ -1,37 +0,0 @@ - - -function Get-UniqueString ([string]$id, $length=13) -{ - $hashArray = (new-object System.Security.Cryptography.SHA512Managed).ComputeHash($id.ToCharArray()) - -join ($hashArray[1..$length] | ForEach-Object { [char]($_ % 26 + [byte][char]'a') }) -} - -Write-Debug "Creating RG Hash" -$ResourceGroupHash = Get-UniqueString ($id=$env:AdsOpts_CD_ResourceGroup_Name+$env:AdsOpts_CD_ResourceGroup_TenantId) #Resource Group Name + TenantId to make hash more unique -Write-Debug $ResourceGroupHash - -PersistEnvVariable -Name "AdsOpts_CD_ResourceGroup_Hash" -Value $ResourceGroupHash -Write-Debug "Created RG Hash" -Write-Debug "Setting Service Names" -SetServiceName -RootElement "AdsOpts_CD_Services_AzureSQLServer" -SetServiceName -RootElement "AdsOpts_CD_Services_CoreFunctionApp" -SetServiceName -RootElement "AdsOpts_CD_Services_WebSite" -SetServiceName -RootElement "AdsOpts_CD_Services_AppInsights" -SetServiceName -RootElement "AdsOpts_CD_Services_Storage_Logging" -SetServiceName -RootElement "AdsOpts_CD_Services_Storage_ADLS" -SetServiceName -RootElement "AdsOpts_CD_Services_Storage_ADLSTransient" #Added for Transient Storage -SetServiceName -RootElement "AdsOpts_CD_Services_Storage_Blob" -SetServiceName -RootElement "AdsOpts_CD_Services_DataFactory" -SetServiceName -RootElement "AdsOpts_CD_Services_AppPlans_WebApp" -SetServiceName -RootElement "AdsOpts_CD_Services_AppPlans_FunctionApp" -SetServiceName -RootElement "AdsOpts_CD_Services_LogAnalytics" -SetServiceName -RootElement "AdsOpts_CD_Services_KeyVault" -SetServiceName -RootElement "AdsOpts_CD_ServicePrincipals_DeploymentSP" -SetServiceName -RootElement "AdsOpts_CD_ServicePrincipals_WebAppAuthenticationSP" -SetServiceName -RootElement "AdsOpts_CD_ServicePrincipals_FunctionAppAuthenticationSP" - -SetServiceName -RootElement "AdsOpts_CD_Services_Vnet" #Added for enabling vNet Integration -SetServiceName -RootElement "AdsOpts_CD_Services_Bastion" #Added for enabling vNet Integration - - - diff --git a/solution/Deployment/workflows/Steps/CI_BuildAdsGoFastDatabase.ps1 b/solution/Deployment/workflows/Steps/CI_BuildAdsGoFastDatabase.ps1 deleted file mode 100644 index 13c9815e..00000000 --- a/solution/Deployment/workflows/Steps/CI_BuildAdsGoFastDatabase.ps1 +++ /dev/null @@ -1,14 +0,0 @@ -#Move From Workflows to Function App -$CurrentPath = (Get-Location).Path -Set-Location "..\..\Database\ADSGoFastDbUp\AdsGoFastDbUp\" -dotnet restore -dotnet build --configuration Release --output ".\..\..\..\Deployment\bin\publish\unzipped\database\" -#Move back to workflows -Set-Location $CurrentPath -Set-Location "../bin/publish" -$Path = (Get-Location).Path + "/zipped/database" -New-Item -ItemType Directory -Force -Path $Path -$Path = $Path + "/Publish.zip" -Compress-Archive -Path '.\unzipped\database\*' -DestinationPath $Path -force -#Move back to workflows -Set-Location $CurrentPath \ No newline at end of file diff --git a/solution/Deployment/workflows/Steps/CI_BuildDataFactory.ps1 b/solution/Deployment/workflows/Steps/CI_BuildDataFactory.ps1 deleted file mode 100644 index f321d56b..00000000 --- a/solution/Deployment/workflows/Steps/CI_BuildDataFactory.ps1 +++ /dev/null @@ -1,22 +0,0 @@ -#Move From Workflows to Function App -$CurrentPath = (Get-Location).Path -Set-Location "..\..\DataFactory\ADF\" -if (-Not (Test-Path "..\..\Deployment\bin\publish\unzipped\datafactory\")) -{ - New-Item -ItemType Directory -Force -Path "..\..\Deployment\bin\publish\unzipped\datafactory\" -} - -Get-ChildItem -Path "..\..\Deployment\bin\publish\unzipped\datafactory\" | Remove-Item -Force -Recurse - -Copy-Item -Path ".\*" -Destination "..\..\Deployment\bin\publish\unzipped\datafactory\" -PassThru -Force -Recurse -#Move back to workflows -Set-Location $CurrentPath -Set-Location "..\bin\publish" -$Path = (Get-Location).Path + "\zipped\datafactory" -New-Item -ItemType Directory -Force -Path $Path -$Path = $Path + "\Publish.zip" -Compress-Archive -Path '.\unzipped\datafactory\*' -DestinationPath $Path -force -#Move back to workflows -Set-Location $CurrentPath - - diff --git a/solution/Deployment/workflows/Steps/CI_BuildFunctionApp.ps1 b/solution/Deployment/workflows/Steps/CI_BuildFunctionApp.ps1 deleted file mode 100644 index aaf57798..00000000 --- a/solution/Deployment/workflows/Steps/CI_BuildFunctionApp.ps1 +++ /dev/null @@ -1,14 +0,0 @@ -#Move From Workflows to Function App -$CurrentPath = (Get-Location).Path -Set-Location "..\..\FunctionApp" -dotnet restore -dotnet publish --no-restore --configuration Release --output '..\Deployment\bin\publish\unzipped\functionapp\' -#Move back to workflows -Set-Location $CurrentPath -Set-Location "../bin/publish" -$Path = (Get-Location).Path + "/zipped/functionapp" -New-Item -ItemType Directory -Force -Path $Path -$Path = $Path + "/Publish.zip" -Compress-Archive -Path '.\unzipped\functionapp\*' -DestinationPath $Path -force -#Move back to workflows -Set-Location $CurrentPath \ No newline at end of file diff --git a/solution/Deployment/workflows/Steps/CI_BuildWebApp.ps1 b/solution/Deployment/workflows/Steps/CI_BuildWebApp.ps1 deleted file mode 100644 index a2195d25..00000000 --- a/solution/Deployment/workflows/Steps/CI_BuildWebApp.ps1 +++ /dev/null @@ -1,14 +0,0 @@ -#Move From Workflows to Function App -$CurrentPath = (Get-Location).Path -Set-Location "..\..\WebApplication" -dotnet restore -dotnet publish --no-restore --configuration Release --output '..\Deployment\bin\publish\unzipped\webapplication\' -#Move back to workflows -Set-Location $CurrentPath -Set-Location "../bin/publish" -$Path = (Get-Location).Path + "/zipped/webapplication" -New-Item -ItemType Directory -Force -Path $Path -$Path = $Path + "/Publish.zip" -Compress-Archive -Path '.\unzipped\webapplication\*' -DestinationPath $Path -force -#Move back to workflows -Set-Location $CurrentPath \ No newline at end of file diff --git a/solution/Deployment/workflows/Steps/InstallGatewayFunctions.ps1 b/solution/Deployment/workflows/Steps/InstallGatewayFunctions.ps1 deleted file mode 100644 index 135b11e5..00000000 --- a/solution/Deployment/workflows/Steps/InstallGatewayFunctions.ps1 +++ /dev/null @@ -1,107 +0,0 @@ -#### Here is the usage doc: -#### PS D:\GitHub> .\InstallGatewayOnLocalMachine.ps1 E:\shared\bugbash\IntegrationRuntime.msi -#### - -param([string]$path, [string]$authKey) -function Install-Gateway([string] $gwPath) -{ - # uninstall any existing gateway - UnInstall-Gateway - - Write-Debug " Start Gateway installation" - - Start-Process "msiexec.exe" "/i $path /quiet /passive" -Wait - Start-Sleep -Seconds 30 - - Write-Debug " Succeed to install gateway" -} - -function Register-Gateway([string] $key) -{ - Write-Debug " Start to register gateway with key: $key" - $cmd = Get-CmdFilePath - Start-Process $cmd "-k $key" -Wait - Write-Debug " Succeed to register gateway" - -} - -function Check-WhetherGatewayInstalled([string]$name) -{ - $installedSoftwares = Get-ChildItem "hklm:\SOFTWARE\Microsoft\Windows\CurrentVersion\Uninstall" - foreach ($installedSoftware in $installedSoftwares) - { - $displayName = $installedSoftware.GetValue("DisplayName") - if($DisplayName -eq "$name Preview" -or $DisplayName -eq "$name") - { - return $true - } - } - - return $false -} - - -function UnInstall-Gateway() -{ - $installed = $false - if (Check-WhetherGatewayInstalled("Microsoft Integration Runtime")) - { - [void](Get-WmiObject -Class Win32_Product -Filter "Name='Microsoft Integration Runtime Preview' or Name='Microsoft Integration Runtime'" -ComputerName $env:COMPUTERNAME).Uninstall() - $installed = $true - } - - if (Check-WhetherGatewayInstalled("Microsoft Integration Runtime")) - { - [void](Get-WmiObject -Class Win32_Product -Filter "Name='Microsoft Integration Runtime Preview' or Name='Microsoft Integration Runtime'" -ComputerName $env:COMPUTERNAME).Uninstall() - $installed = $true - } - - if ($installed -eq $false) - { - Write-Debug " Microsoft Integration Runtime Preview is not installed." - return - } - - Write-Debug " Microsoft Integration Runtime has been uninstalled from this machine." -} - -function Get-CmdFilePath() -{ - $filePath = Get-ItemPropertyValue "hklm:\Software\Microsoft\DataTransfer\DataManagementGateway\ConfigurationManager" "DiacmdPath" - if ([string]::IsNullOrEmpty($filePath)) - { - throw "Get-InstalledFilePath: Cannot find installed File Path" - } - - return $filePath -} - -function Validate-Input([string]$path, [string]$key) -{ - if ([string]::IsNullOrEmpty($path)) - { - throw "Gateway path is not specified" - } - - if (!(Test-Path -Path $path)) - { - throw "Invalid gateway path: $path" - } - - if ([string]::IsNullOrEmpty($key)) - { - throw "Gateway Auth key is empty" - } -} - -If (-NOT ([Security.Principal.WindowsPrincipal] [Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole(` - [Security.Principal.WindowsBuiltInRole] "Administrator")) -{ - Write-Warning "You do not have Administrator rights to run this script!`nPlease re-run this script as an Administrator!" - Break -} - -Validate-Input $path $authKey - -Install-Gateway $path -Register-Gateway $authKey \ No newline at end of file diff --git a/solution/Deployment/workflows/Steps/PushEnvFileIntoVariables.ps1 b/solution/Deployment/workflows/Steps/PushEnvFileIntoVariables.ps1 deleted file mode 100644 index 300cf9a2..00000000 --- a/solution/Deployment/workflows/Steps/PushEnvFileIntoVariables.ps1 +++ /dev/null @@ -1,100 +0,0 @@ - -function SetServiceName($RootElement) -{ - $PostFixHash = [Environment]::GetEnvironmentVariable(($RootElement + "_ApplyNamePostFix")) - $Value = [Environment]::GetEnvironmentVariable(($RootElement + "_Name")) - if ($PostFixHash -eq "True") - { - $Value = $Value + $env:AdsOpts_CD_ResourceGroup_Hash - } - PersistEnvVariable -Name ($RootElement + "_Name") -Value $Value -} - -function PersistEnvVariable($Name, $Value) -{ - Write-Debug "Writing $Name to env file" - echo "$Name=$Value" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append - #Also Push Variables to the Session Env Variables for local testing - [Environment]::SetEnvironmentVariable($Name, "$Value") - -} - - -function ParseEnvFragment([object]$Json, [string]$NamePrefix) -{ - #$Json - foreach($p in ($Json.psobject.properties.where({$_.MemberType -eq "NoteProperty"}))) - { - $Name = $p.Name - Write-Debug "Parsing $($Name)" - if($NamePrefix -ne "") - { - Write-Debug "Prefix is $NamePrefix" - $Name = $NamePrefix + "_" + $p.Name - } - $Value = $p.Value - if($p.TypeNameOfValue -ne "System.Management.Automation.PSCustomObject") - { - - PersistEnvVariable -Name $Name -Value $Value - - ##Push Variables to the GitHub Actions Compatible Store - #Write-Debug "Writing $Name to env file" - #Write-Debug $p.TypeNameOfValue - #echo "$Name=$Value" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append - ##Also Push Variables to the Session Env Variables for local testing - #[Environment]::SetEnvironmentVariable($Name, "$Value") - } - else { - Write-Debug "Further Parsing of $Name required" - $JsonString = $p.Value #| ConvertTo-Json - ParseEnvFragment -Json $JsonString -NamePrefix $Name - } - } -} - - -function ParseEnvFile ($EnvFile) -{ - if ($null -eq $Env:GITHUB_ENV) - { - [Environment]::SetEnvironmentVariable("GITHUB_ENV","..\bin\GitEnv.txt") - $FileNameOnly = Split-Path $env:GITHUB_ENV -leaf - $PathOnly = Split-Path $env:GITHUB_ENV - if ((Test-Path $env:GITHUB_ENV)) - { - # Remove-Item -Path $env:GITHUB_ENV - } - else - { - - New-Item -Path $PathOnly -Name $FileNameOnly -type "file" -value "" - } - - } - - $Json = Get-Content -Path "..\environments\$($EnvFile).json" | ConvertFrom-Json - #$JsonForSchemaValidation = $Json | ConvertTo-Json -Depth 100 - #$schema = Get-Content "..\environments\environment.schema.json" - #$schema = ($schema | ConvertFrom-Json | ConvertTo-Json -depth 100) - #$SchemaValidation = $JsonForSchemaValidation | Test-Json -Schema $schema - #if($SchemaValidation -eq $false) - #{ - # Write-Warning "..\environments\$($EnvFile).json does not adhere to environment.schema.json!" - #} - - ParseEnvFragment -Json $Json -NamePrefix "" - -} - -function ParseSecretsFile () -{ - $path = "..\bin\Secrets.json" - $test = (Test-Path -Path $path) - if($test -eq $true) - { - $Json = Get-Content -Path $path | ConvertFrom-Json - ParseEnvFragment -Json $Json -NamePrefix "secrets" - } - -} \ No newline at end of file diff --git a/solution/DeploymentV2/Cleanup_RemoveAll.ps1 b/solution/DeploymentV2/Cleanup_RemoveAll.ps1 index 9da26f78..17089531 100644 --- a/solution/DeploymentV2/Cleanup_RemoveAll.ps1 +++ b/solution/DeploymentV2/Cleanup_RemoveAll.ps1 @@ -31,4 +31,15 @@ az ad app delete --id $tout.aad_funcreg_id #Remove Terraform State and Backend Files rm ./terraform.tfstate -rm ./backend.tf \ No newline at end of file +rm ./backend.tf + +<# +If you want to delete all ADS Go Fast App Registrations owned by you.. RUN AT YOUR OWN RISK!!!: + +$apps = (az ad sp list --show-mine | ConvertFrom-Json | Where-Object {$_.displayName.StartsWith("ADS GoFast")}).id +foreach($app in $apps) {az ad sp delete --id $app} + + + + + #> \ No newline at end of file diff --git a/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 b/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 index 67069e8f..f6b734a6 100644 --- a/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 +++ b/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 @@ -17,11 +17,41 @@ $Environment = $Environment.ToLower() #First Convert Terraform Commons to YAML #Install-Module powershell-yaml -Force $GithubEnvTemplate = "" +$GithubEnvTemplateSensitive = "" + +#Preprocessing common_vars_template.jsonnet +#Feature Templates +$cvjns = Get-Content "./common_vars_template.jsonnet" -raw +$cvjnss = $cvjns.Split("/*DONOTREMOVETHISCOMMENT:SOFTS*/") +$fts = (Get-ChildItem -Path ./../featuretemplates | Select-Object -Property Name).Name.replace(".jsonc","") +$str = "/*DONOTREMOVETHISCOMMENT:SOFTS*/" + [System.Environment]::NewLine +foreach($ft in $fts) +{ + $str = $str + " " + "'$ft' : import './../featuretemplates/$ft.jsonc'," + [System.Environment]::NewLine +} +$str = $str + "/*DONOTREMOVETHISCOMMENT:SOFTS*/" +($cvjnss[0] + $Str + $cvjnss[2]) | Set-Content "./common_vars_template.jsonnet" +#Environments +$cvjns = Get-Content "./common_vars_template.jsonnet" -raw +$cvjnss = $cvjns.Split("/*DONOTREMOVETHISCOMMENT:ENVS*/") +$fts = (Get-ChildItem -Directory | Select-Object -Property Name).Name +$str = "/*DONOTREMOVETHISCOMMENT:ENVS*/" + [System.Environment]::NewLine +foreach($ft in $fts) +{ + $str = $str + " " + "'$ft' : import './$ft/common_vars_values.jsonc'," + [System.Environment]::NewLine +} +$str = $str + "/*DONOTREMOVETHISCOMMENT:ENVS*/" +($cvjnss[0] + $Str + $cvjnss[2]) | Set-Content "./common_vars_template.jsonnet" + Write-Host "Preparing Environment: $Environment Using $FeatureTemplate Template" -(jsonnet "./common_vars_template.jsonnet" --tla-str featuretemplatename=$FeatureTemplate --tla-str environment=$Environment ) | Set-Content("./$Environment/common_vars.json") -$obj = Get-Content ./$Environment/common_vars.json | ConvertFrom-Json +#Prep Output Folder +$newfolder = "./../../bin/environments/$Environment/" +$hiddenoutput = !(Test-Path $newfolder) ? ($F = New-Item -itemType Directory -Name $newfolder) : ($F = "") + +(jsonnet "./common_vars_template.jsonnet" --tla-str featuretemplatename=$FeatureTemplate --tla-str environment=$Environment ) | Set-Content($newfolder +"/common_vars.json") +$obj = Get-Content ($newfolder + "/common_vars.json") | ConvertFrom-Json foreach($t in ($obj.ForEnvVar | Get-Member | Where-Object {$_.MemberType -eq "NoteProperty"})) { @@ -42,15 +72,24 @@ foreach($t in ($obj.ForSecretFile | Get-Member | Where-Object {$_.MemberType -eq { $Name = $t.Name $Value = $obj.ForSecretFile[0].$Name - #Add to GitHubSecretFile + #Add to GitHubSecretFile $GithubEnvTemplate = $GithubEnvTemplate + "$Name=$Value" + [System.Environment]::NewLine } +foreach($t in ($obj.ForSecretFileSensitive | Get-Member | Where-Object {$_.MemberType -eq "NoteProperty"})) +{ + $Name = $t.Name + $Value = $obj.ForSecretFile[0].$Name + #Add to GitHubSecretFile + $GithubEnvTemplateSensitive = $GithubEnvTemplateSensitive + "$Name=$Value" + [System.Environment]::NewLine +} + + #Write the Terraform Element common_vars_for_hcl.json - this is then injected into the hcl file -($obj.ForHCL | ConvertTo-Json -Depth 10) | Set-Content ./$Environment/common_vars_for_hcl.json +($obj.ForHCL | ConvertTo-Json -Depth 10) | Set-Content ($newfolder+"/common_vars_for_hcl.json") if($gitDeploy -eq $false) { #Write the Git Secrets to the Git Template .env - $GithubEnvTemplate|Set-Content ./$Environment/GetSecretsTemplate.env + ($GithubEnvTemplateSensitive + [System.Environment]::NewLine + $GithubEnvTemplate)|Set-Content ($newfolder+"/GetSecretsTemplate.env") } \ No newline at end of file diff --git a/solution/DeploymentV2/environments/vars/admz/common_vars.yaml b/solution/DeploymentV2/environments/vars/admz/common_vars.yaml deleted file mode 100644 index bb9d67f9..00000000 --- a/solution/DeploymentV2/environments/vars/admz/common_vars.yaml +++ /dev/null @@ -1,10 +0,0 @@ -resource_group_name: gft2 # Theresourcegroupallresourceswillbedeployedto -tenant_id: 72f988bf-86f1-41af-91ab-2d7cd011db47 # ThisistheAzureADtenantID -prefix: ads # Allazureresourceswillbeprefixedwiththis -domain: microsoft.com # UsedwhenconfiguringAADconfigforAzurefunctions -subscription_id: 035a1364-f00d-48e2-b582-4fe125905ee3 # Theazuresubscriptionidtodeployto -resource_location: AustraliaEast # Thelocationoftheresources -owner_tag: Contoso # OwnertagvalueforAzureresources -environment_tag: stg # ThisisusedonAzuretagsaswellasallresourcenames -ip_address: 144.138.148.220 # Thisistheipaddressoftheagent/currentIP.Usedtocreatefirewallexemptions. -deployment_principal_layers1and3: "ccbdbba4-669c-48d6-86b8-75c9ab2ee578" diff --git a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet index 37f3e3f2..1b2b715f 100644 --- a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet +++ b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet @@ -1,15 +1,20 @@ function (featuretemplatename="full_deployment",environment="staging") local locals = { - "staging": import './staging/common_vars_values.jsonc', - "production": './production/common_vars_values.jsonc', - "local": './local/common_vars_values.jsonc', - "admz": './admz/common_vars_values.jsonc', +/*DONOTREMOVETHISCOMMENT:ENVS*/ + 'admz' : import './admz/common_vars_values.jsonc', + 'local' : import './local/common_vars_values.jsonc', + 'production' : import './production/common_vars_values.jsonc', + 'staging' : import './staging/common_vars_values.jsonc', +/*DONOTREMOVETHISCOMMENT:ENVS*/ }; local featuretemplates = { - "basic_deployment" : import './../featuretemplates/basic_deployment.jsonc', - "full_deployment" : import './../featuretemplates/full_deployment.jsonc', - "functional_tests" : import './../featuretemplates/functional_tests.jsonc', +/*DONOTREMOVETHISCOMMENT:SOFTS*/ + 'basic_deployment' : import './../featuretemplates/basic_deployment.jsonc', + 'full_deployment_no_purview' : import './../featuretemplates/full_deployment_no_purview.jsonc', + 'full_deployment' : import './../featuretemplates/full_deployment.jsonc', + 'functional_tests' : import './../featuretemplates/functional_tests.jsonc', +/*DONOTREMOVETHISCOMMENT:SOFTS*/ }; local featuretemplate = [ // Object comprehension. @@ -37,25 +42,29 @@ local AllVariables = [ "CICDSecretName": "WEB_APP_ADMIN_USER", "EnvVarName": "WEB_APP_ADMIN_USER", "HCLName": "", - "Value": locals[environment].WEB_APP_ADMIN_USER + "Value": locals[environment].WEB_APP_ADMIN_USER, + "Sensitive": false }, { "CICDSecretName": "ARM_SYNAPSE_WORKSPACE_NAME", "EnvVarName": "ARM_SYNAPSE_WORKSPACE_NAME", "HCLName": "", - "Value": locals[environment].ARM_SYNAPSE_WORKSPACE_NAME + "Value": locals[environment].ARM_SYNAPSE_WORKSPACE_NAME, + "Sensitive": false }, { "CICDSecretName": "ARM_KEYVAULT_NAME", "EnvVarName": "keyVaultName", "HCLName": "", - "Value": locals[environment].ARM_KEYVAULT_NAME + "Value": locals[environment].ARM_KEYVAULT_NAME, + "Sensitive": false }, { "CICDSecretName": "ARM_DATALAKE_NAME", "EnvVarName": "datalakeName", "HCLName": "", - "Value": locals[environment].ARM_DATALAKE_NAME + "Value": locals[environment].ARM_DATALAKE_NAME, + "Sensitive": false }, /* Required for Automated CICD Deployment @@ -64,37 +73,43 @@ local AllVariables = [ "CICDSecretName": "ARM_CLIENT_ID", "EnvVarName": "ARM_CLIENT_ID", "HCLName": "", - "Value": "#####" + "Value": "#####", + "Sensitive": true }, { "CICDSecretName": "ARM_PAL_PARTNER_ID", "EnvVarName": "ARM_PAL_PARTNER_ID", "HCLName": "", - "Value": "#####" + "Value": locals[environment].ARM_PAL_PARTNER_ID, + "Sensitive": false }, { "CICDSecretName": "ARM_CLIENT_SECRET", "EnvVarName": "ARM_CLIENT_SECRET", "HCLName": "", - "Value": "#####" + "Value": "#####", + "Sensitive": true }, { "CICDSecretName": "ARM_SUBSCRIPTION_ID", "EnvVarName": "ARM_SUBSCRIPTION_ID", "HCLName": "", - "Value": "#####" + "Value": locals[environment].subscription_id, + "Sensitive": false }, { "CICDSecretName": "ARM_TENANT_ID", "EnvVarName": "ARM_TENANT_ID", "HCLName": "tenant_id", - "Value": locals[environment].tenant_id + "Value": locals[environment].tenant_id, + "Sensitive": false }, { "CICDSecretName": "", "EnvVarName": "TF_VAR_tenant_id", "HCLName": "", - "Value": locals[environment].tenant_id + "Value": locals[environment].tenant_id, + "Sensitive": false }, /* @@ -104,103 +119,120 @@ local AllVariables = [ "CICDSecretName": "", "EnvVarName": "", "HCLName": "owner_tag", - "Value": locals[environment].owner_tag + "Value": locals[environment].owner_tag, + "Sensitive": false }, { "CICDSecretName": "", "EnvVarName": "", "HCLName": "deployment_principal_layers1and3", - "Value": locals[environment].deployment_principal_layers1and3 + "Value": locals[environment].deployment_principal_layers1and3, + "Sensitive": false }, { "CICDSecretName": "", "EnvVarName": "", "HCLName": "resource_location", - "Value": locals[environment].resource_location + "Value": locals[environment].resource_location, + "Sensitive": false }, { "CICDSecretName": "ENVIRONMENT_TAG", "EnvVarName": "TF_VAR_environment_tag", "HCLName": "environment_tag", - "Value": locals[environment].environment_tag + "Value": locals[environment].environment_tag, + "Sensitive": false }, { "CICDSecretName": "ARM_DOMAIN", "EnvVarName": "TF_VAR_domain", "HCLName": "domain", - "Value": locals[environment].domain + "Value": locals[environment].domain, + "Sensitive": false }, { "CICDSecretName": "", "EnvVarName": "TF_VAR_subscription_id", "HCLName": "subscription_id", - "Value": locals[environment].subscription_id + "Value": locals[environment].subscription_id, + "Sensitive": false }, { "CICDSecretName": "", "EnvVarName": "", "HCLName": "prefix", - "Value": locals[environment].prefix + "Value": locals[environment].prefix, + "Sensitive": false }, { "CICDSecretName": "ARM_RESOURCE_GROUP_NAME", "EnvVarName": "TF_VAR_resource_group_name", "HCLName": "resource_group_name", - "Value": locals[environment].resource_group_name + "Value": locals[environment].resource_group_name, + "Sensitive": false }, { "CICDSecretName": "ARM_IP_ADDRESS", "EnvVarName": "TF_VAR_ip_address", "HCLName": "ip_address", - "Value": locals[environment].ip_address + "Value": locals[environment].ip_address, + "Sensitive": false }, { "CICDSecretName": "ARM_IP_ADDRESS2", "EnvVarName": "TF_VAR_ip_address2", "HCLName": "ip_address2", - "Value": locals[environment].ip_address2 + "Value": locals[environment].ip_address2, + "Sensitive": false }, { "CICDSecretName": "", "EnvVarName": "", "HCLName": "synapse_administrators", - "Value": locals[environment].synapse_administrators + "Value": locals[environment].synapse_administrators, + "Sensitive": false }, { "CICDSecretName": "", "EnvVarName": "", "HCLName": "resource_owners", - "Value": locals[environment].resource_owners + "Value": locals[environment].resource_owners, + "Sensitive": false }, { "CICDSecretName": "ARM_FEATURE_TEMPLATE", - "EnvVarName": "", + "EnvVarName": "ARM_FEATURE_TEMPLATE", "HCLName": "", - "Value": "#####" + "Value": featuretemplatename, + "Sensitive": false }, { "CICDSecretName": "ARM_STORAGE_NAME", "EnvVarName": "TF_VAR_state_storage_account_name", "HCLName": "", - "Value": "#####" + "Value": locals[environment].resource_group_name + "state", + "Sensitive": false }, { "CICDSecretName": "ARM_SYNAPSE_PASSWORD", "EnvVarName": "TF_VAR_synapse_sql_password", "HCLName": "", - "Value": "#####" + "Value": "#####", + "Sensitive": true }, { "CICDSecretName": "ARM_JUMPHOST_PASSWORD", "EnvVarName": "TF_VAR_jumphost_password", "HCLName": "", - "Value": "#####" + "Value": "#####", + "Sensitive": true }, { "CICDSecretName": "WEB_APP_ADMIN_SECURITY_GROUP", "EnvVarName": "TF_VAR_web_app_admin_security_group", "HCLName": "", - "Value": "#####" + "Value": locals[environment].WEB_APP_ADMIN_SECURITY_GROUP, + "Sensitive": false }, /* Git Integration Set-Up @@ -209,61 +241,71 @@ local AllVariables = [ "CICDSecretName": "GIT_REPOSITORY_NAME", "EnvVarName": "TF_VAR_synapse_git_repository_name", "HCLName": "", - "Value": "#####" + "Value": locals[environment].GIT_REPOSITORY_NAME, + "Sensitive": false }, { "CICDSecretName": "GIT_SYNAPSE_REPOSITORY_BRANCH_NAME", "EnvVarName": "TF_VAR_synapse_git_repository_branch_name", "HCLName": "", - "Value": "#####" + "Value": locals[environment].GIT_SYNAPSE_REPOSITORY_BRANCH_NAME, + "Sensitive": false }, { "CICDSecretName": "GIT_PAT", "EnvVarName": "TF_VAR_synapse_git_pat", "HCLName": "", - "Value": "#####" + "Value": "#####", + "Sensitive": true }, { "CICDSecretName": "GIT_USER_NAME", "EnvVarName": "TF_VAR_synapse_git_user_name", "HCLName": "", - "Value": "#####" + "Value": locals[environment].GIT_USER_NAME, + "Sensitive": false }, { "CICDSecretName": "GIT_EMAIL_ADDRESS", "EnvVarName": "TF_VAR_synapse_git_email_address", "HCLName": "", - "Value": "#####" + "Value": locals[environment].GIT_EMAIL_ADDRESS, + "Sensitive": false }, { "CICDSecretName": "GIT_ADF_REPOSITORY_NAME", "EnvVarName": "TF_VAR_adf_git_repository_name", "HCLName": "", - "Value": "#####" + "Value": locals[environment].GIT_ADF_REPOSITORY_NAME, + "Sensitive": false }, { "CICDSecretName": "GIT_ADF_REPOSITORY_BRANCH_NAME", "EnvVarName": "TF_VAR_adf_git_repository_branch_name", "HCLName": "", - "Value": "#####" + "Value": locals[environment].GIT_ADF_REPOSITORY_BRANCH_NAME, + "Sensitive": false }, { "CICDSecretName": "GIT_ADF_PAT", "EnvVarName": "TF_VAR_adf_git_pat", "HCLName": "", - "Value": "#####" + "Value": "#####", + "Sensitive": true }, { "CICDSecretName": "GIT_ADF_USER_NAME", "EnvVarName": "TF_VAR_adf_git_user_name", "HCLName": "", - "Value": "#####" + "Value": locals[environment].GIT_ADF_USER_NAME, + "Sensitive": false }, { "CICDSecretName": "GIT_ADF_EMAIL_ADDRESS", "EnvVarName": "TF_VAR_adf_git_email_address", "HCLName": "", - "Value": "#####" + "Value": locals[environment].GIT_ADF_EMAIL_ADDRESS, + "Sensitive": false } ]+featuretemplate; @@ -284,10 +326,44 @@ local EnvironmentVariables = { // Object comprehension. local SecretFileVars = { // Object comprehension. [sd.CICDSecretName]: sd.Value for sd in AllVariables - if sd.CICDSecretName != "" + if sd.CICDSecretName != "" && sd.Sensitive == false }; + +local SecretFileSensitiveVars = { // Object comprehension. + [sd.CICDSecretName]: sd.Value + for sd in AllVariables + if sd.CICDSecretName != "" && sd.Sensitive == true +}; + { "ForHCL": HCLVariables, "ForEnvVar": EnvironmentVariables, - "ForSecretFile": SecretFileVars -} \ No newline at end of file + "ForSecretFile": SecretFileVars, + "ForSecretFileSensitive": SecretFileSensitiveVars +} + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/solution/DeploymentV2/environments/vars/local/common_vars.yaml b/solution/DeploymentV2/environments/vars/local/common_vars.yaml deleted file mode 100644 index bb9d67f9..00000000 --- a/solution/DeploymentV2/environments/vars/local/common_vars.yaml +++ /dev/null @@ -1,10 +0,0 @@ -resource_group_name: gft2 # Theresourcegroupallresourceswillbedeployedto -tenant_id: 72f988bf-86f1-41af-91ab-2d7cd011db47 # ThisistheAzureADtenantID -prefix: ads # Allazureresourceswillbeprefixedwiththis -domain: microsoft.com # UsedwhenconfiguringAADconfigforAzurefunctions -subscription_id: 035a1364-f00d-48e2-b582-4fe125905ee3 # Theazuresubscriptionidtodeployto -resource_location: AustraliaEast # Thelocationoftheresources -owner_tag: Contoso # OwnertagvalueforAzureresources -environment_tag: stg # ThisisusedonAzuretagsaswellasallresourcenames -ip_address: 144.138.148.220 # Thisistheipaddressoftheagent/currentIP.Usedtocreatefirewallexemptions. -deployment_principal_layers1and3: "ccbdbba4-669c-48d6-86b8-75c9ab2ee578" diff --git a/solution/DeploymentV2/environments/vars/production/common_vars.yaml b/solution/DeploymentV2/environments/vars/production/common_vars.yaml deleted file mode 100644 index bb9d67f9..00000000 --- a/solution/DeploymentV2/environments/vars/production/common_vars.yaml +++ /dev/null @@ -1,10 +0,0 @@ -resource_group_name: gft2 # Theresourcegroupallresourceswillbedeployedto -tenant_id: 72f988bf-86f1-41af-91ab-2d7cd011db47 # ThisistheAzureADtenantID -prefix: ads # Allazureresourceswillbeprefixedwiththis -domain: microsoft.com # UsedwhenconfiguringAADconfigforAzurefunctions -subscription_id: 035a1364-f00d-48e2-b582-4fe125905ee3 # Theazuresubscriptionidtodeployto -resource_location: AustraliaEast # Thelocationoftheresources -owner_tag: Contoso # OwnertagvalueforAzureresources -environment_tag: stg # ThisisusedonAzuretagsaswellasallresourcenames -ip_address: 144.138.148.220 # Thisistheipaddressoftheagent/currentIP.Usedtocreatefirewallexemptions. -deployment_principal_layers1and3: "ccbdbba4-669c-48d6-86b8-75c9ab2ee578" diff --git a/solution/DeploymentV2/environments/vars/staging/GetSecretsTemplate.env b/solution/DeploymentV2/environments/vars/staging/GetSecretsTemplate.env deleted file mode 100644 index 5349b4ed..00000000 --- a/solution/DeploymentV2/environments/vars/staging/GetSecretsTemplate.env +++ /dev/null @@ -1,30 +0,0 @@ -ARM_CLIENT_ID=##### -ARM_CLIENT_SECRET=##### -ARM_DATALAKE_NAME=adsstgdlsadsvrojadsl -ARM_DOMAIN=microsoft.com -ARM_FEATURE_TEMPLATE=##### -ARM_IP_ADDRESS=144.138.148.220 -ARM_IP_ADDRESS2=144.138.148.220 -ARM_JUMPHOST_PASSWORD=##### -ARM_KEYVAULT_NAME=ads-stg-kv-ads-vroj -ARM_PAL_PARTNER_ID=##### -ARM_RESOURCE_GROUP_NAME=gft3 -ARM_STORAGE_NAME=##### -ARM_SUBSCRIPTION_ID=##### -ARM_SYNAPSE_PASSWORD=##### -ARM_SYNAPSE_WORKSPACE_NAME=adsstgsynwadsvroj -ARM_TENANT_ID=72f988bf-86f1-41af-91ab-2d7cd011db47 -ENVIRONMENT_TAG=stg -GIT_ADF_EMAIL_ADDRESS=##### -GIT_ADF_PAT=##### -GIT_ADF_REPOSITORY_BRANCH_NAME=##### -GIT_ADF_REPOSITORY_NAME=##### -GIT_ADF_USER_NAME=##### -GIT_EMAIL_ADDRESS=##### -GIT_PAT=##### -GIT_REPOSITORY_NAME=##### -GIT_SYNAPSE_REPOSITORY_BRANCH_NAME=##### -GIT_USER_NAME=##### -WEB_APP_ADMIN_SECURITY_GROUP=##### -WEB_APP_ADMIN_USER=ccbdbba4-669c-48d6-86b8-75c9ab2ee578 - diff --git a/solution/DeploymentV2/environments/vars/staging/common_vars.json b/solution/DeploymentV2/environments/vars/staging/common_vars.json deleted file mode 100644 index fcea6cff..00000000 --- a/solution/DeploymentV2/environments/vars/staging/common_vars.json +++ /dev/null @@ -1,106 +0,0 @@ -{ - "ForEnvVar": { - "ARM_CLIENT_ID": "#####", - "ARM_CLIENT_SECRET": "#####", - "ARM_PAL_PARTNER_ID": "#####", - "ARM_SUBSCRIPTION_ID": "#####", - "ARM_SYNAPSE_WORKSPACE_NAME": "adsstgsynwadsvroj", - "ARM_TENANT_ID": "72f988bf-86f1-41af-91ab-2d7cd011db47", - "TF_VAR_adf_git_email_address": "#####", - "TF_VAR_adf_git_pat": "#####", - "TF_VAR_adf_git_repository_branch_name": "#####", - "TF_VAR_adf_git_repository_name": "#####", - "TF_VAR_adf_git_user_name": "#####", - "TF_VAR_configure_networking": false, - "TF_VAR_deploy_app_service_plan": true, - "TF_VAR_deploy_custom_terraform": false, - "TF_VAR_deploy_data_factory": true, - "TF_VAR_deploy_function_app": true, - "TF_VAR_deploy_metadata_database": true, - "TF_VAR_deploy_purview": false, - "TF_VAR_deploy_selfhostedsql": false, - "TF_VAR_deploy_sentinel": true, - "TF_VAR_deploy_synapse": true, - "TF_VAR_deploy_web_app": true, - "TF_VAR_domain": "microsoft.com", - "TF_VAR_environment_tag": "stg", - "TF_VAR_ip_address": "144.138.148.220", - "TF_VAR_ip_address2": "144.138.148.220", - "TF_VAR_is_onprem_datafactory_ir_registered": false, - "TF_VAR_is_vnet_isolated": false, - "TF_VAR_jumphost_password": "#####", - "TF_VAR_publish_datafactory_pipelines": true, - "TF_VAR_publish_function_app": true, - "TF_VAR_publish_metadata_database": true, - "TF_VAR_publish_sample_files": true, - "TF_VAR_publish_sif_database": true, - "TF_VAR_publish_web_app": true, - "TF_VAR_publish_web_app_addcurrentuserasadmin": true, - "TF_VAR_resource_group_name": "gft3", - "TF_VAR_state_storage_account_name": "#####", - "TF_VAR_subscription_id": "035a1364-f00d-48e2-b582-4fe125905ee3", - "TF_VAR_synapse_git_email_address": "#####", - "TF_VAR_synapse_git_pat": "#####", - "TF_VAR_synapse_git_repository_branch_name": "#####", - "TF_VAR_synapse_git_repository_name": "#####", - "TF_VAR_synapse_git_user_name": "#####", - "TF_VAR_synapse_sql_password": "#####", - "TF_VAR_tenant_id": "72f988bf-86f1-41af-91ab-2d7cd011db47", - "TF_VAR_web_app_admin_security_group": "#####", - "WEB_APP_ADMIN_USER": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", - "datalakeName": "adsstgdlsadsvrojadsl", - "keyVaultName": "ads-stg-kv-ads-vroj" - }, - "ForHCL": { - "deployment_principal_layers1and3": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", - "domain": "microsoft.com", - "environment_tag": "stg", - "ip_address": "144.138.148.220", - "ip_address2": "144.138.148.220", - "owner_tag": "Contoso", - "prefix": "ads", - "resource_group_name": "gft3", - "resource_location": "AustraliaEast", - "resource_owners": [ - "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", - "4c732d19-4076-4a76-87f3-6fbfd77f007d" - ], - "subscription_id": "035a1364-f00d-48e2-b582-4fe125905ee3", - "synapse_administrators": { - "deploy_agent": "4c732d19-4076-4a76-87f3-6fbfd77f007d", - "deploy_user": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578" - }, - "tenant_id": "72f988bf-86f1-41af-91ab-2d7cd011db47" - }, - "ForSecretFile": { - "ARM_CLIENT_ID": "#####", - "ARM_CLIENT_SECRET": "#####", - "ARM_DATALAKE_NAME": "adsstgdlsadsvrojadsl", - "ARM_DOMAIN": "microsoft.com", - "ARM_FEATURE_TEMPLATE": "#####", - "ARM_IP_ADDRESS": "144.138.148.220", - "ARM_IP_ADDRESS2": "144.138.148.220", - "ARM_JUMPHOST_PASSWORD": "#####", - "ARM_KEYVAULT_NAME": "ads-stg-kv-ads-vroj", - "ARM_PAL_PARTNER_ID": "#####", - "ARM_RESOURCE_GROUP_NAME": "gft3", - "ARM_STORAGE_NAME": "#####", - "ARM_SUBSCRIPTION_ID": "#####", - "ARM_SYNAPSE_PASSWORD": "#####", - "ARM_SYNAPSE_WORKSPACE_NAME": "adsstgsynwadsvroj", - "ARM_TENANT_ID": "72f988bf-86f1-41af-91ab-2d7cd011db47", - "ENVIRONMENT_TAG": "stg", - "GIT_ADF_EMAIL_ADDRESS": "#####", - "GIT_ADF_PAT": "#####", - "GIT_ADF_REPOSITORY_BRANCH_NAME": "#####", - "GIT_ADF_REPOSITORY_NAME": "#####", - "GIT_ADF_USER_NAME": "#####", - "GIT_EMAIL_ADDRESS": "#####", - "GIT_PAT": "#####", - "GIT_REPOSITORY_NAME": "#####", - "GIT_SYNAPSE_REPOSITORY_BRANCH_NAME": "#####", - "GIT_USER_NAME": "#####", - "WEB_APP_ADMIN_SECURITY_GROUP": "#####", - "WEB_APP_ADMIN_USER": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578" - } -} diff --git a/solution/DeploymentV2/environments/vars/staging/common_vars_for_hcl.json b/solution/DeploymentV2/environments/vars/staging/common_vars_for_hcl.json deleted file mode 100644 index 75527720..00000000 --- a/solution/DeploymentV2/environments/vars/staging/common_vars_for_hcl.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "deployment_principal_layers1and3": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", - "domain": "microsoft.com", - "environment_tag": "stg", - "ip_address": "144.138.148.220", - "ip_address2": "144.138.148.220", - "owner_tag": "Contoso", - "prefix": "ads", - "resource_group_name": "gft3", - "resource_location": "AustraliaEast", - "resource_owners": [ - "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", - "4c732d19-4076-4a76-87f3-6fbfd77f007d" - ], - "subscription_id": "035a1364-f00d-48e2-b582-4fe125905ee3", - "synapse_administrators": { - "deploy_agent": "4c732d19-4076-4a76-87f3-6fbfd77f007d", - "deploy_user": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578" - }, - "tenant_id": "72f988bf-86f1-41af-91ab-2d7cd011db47" -} diff --git a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc index ba5fc00c..455c069e 100644 --- a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc +++ b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc @@ -1,24 +1,47 @@ { + //Core "owner_tag": "Contoso", - "deployment_principal_layers1and3": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", "resource_location": "AustraliaEast", "environment_tag": "stg", "domain": "microsoft.com", "subscription_id": "035a1364-f00d-48e2-b582-4fe125905ee3", "prefix": "ads", - "resource_group_name": "gft3", + "resource_group_name": "gft4", "ip_address": "144.138.148.220", "ip_address2": "144.138.148.220", "tenant_id": "72f988bf-86f1-41af-91ab-2d7cd011db47", - "WEB_APP_ADMIN_USER": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", - "ARM_SYNAPSE_WORKSPACE_NAME": "adsstgsynwadsvroj", - "ARM_KEYVAULT_NAME": "ads-stg-kv-ads-vroj", - "ARM_DATALAKE_NAME": "adsstgdlsadsvrojadsl", + + //Owners & User Access + "deployment_principal_layers1and3": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", "resource_owners": [ "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", "4c732d19-4076-4a76-87f3-6fbfd77f007d" ], - "synapse_administrators": {"deploy_user": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", "deploy_agent": "4c732d19-4076-4a76-87f3-6fbfd77f007d"}, + "synapse_administrators": { + //"deploy_user": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", + "deploy_agent": "4c732d19-4076-4a76-87f3-6fbfd77f007d" + }, "synapse_publishers": {}, - "synapse_contributors": {} + "synapse_contributors": {}, + "WEB_APP_ADMIN_USER": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", + "WEB_APP_ADMIN_SECURITY_GROUP": "#####", + //Post Layer 1 Reources + "ARM_SYNAPSE_WORKSPACE_NAME": "adsstgsynwadsvroj", + "ARM_KEYVAULT_NAME": "ads-stg-kv-ads-vroj", + "ARM_DATALAKE_NAME": "adsstgdlsadsvrojadsl", + + //PAL + "ARM_PAL_PARTNER_ID": "0", + + //GIT + "GIT_REPOSITORY_NAME": "#####", + "GIT_SYNAPSE_REPOSITORY_BRANCH_NAME": "#####", + "GIT_USER_NAME": "#####", + "GIT_EMAIL_ADDRESS": "#####", + "GIT_ADF_REPOSITORY_NAME": "#####", + "GIT_ADF_REPOSITORY_BRANCH_NAME": "#####", + "GIT_ADF_USER_NAME": "#####", + "GIT_ADF_EMAIL_ADDRESS": "#####" + + } diff --git a/solution/DeploymentV2/terraform_layer1/vars/local/terragrunt.hcl b/solution/DeploymentV2/terraform_layer1/vars/local/terragrunt.hcl index 5a332505..65ee1397 100644 --- a/solution/DeploymentV2/terraform_layer1/vars/local/terragrunt.hcl +++ b/solution/DeploymentV2/terraform_layer1/vars/local/terragrunt.hcl @@ -1,5 +1,5 @@ locals { - common_vars = jsondecode(file("../../../environments/vars/local/common_vars_for_hcl.json")) + common_vars = jsondecode(file("../../../bin/environments/local/common_vars_for_hcl.json")) } diff --git a/solution/DeploymentV2/terraform_layer1/vars/production/terragrunt.hcl b/solution/DeploymentV2/terraform_layer1/vars/production/terragrunt.hcl index 6f634c3e..7ea447ec 100644 --- a/solution/DeploymentV2/terraform_layer1/vars/production/terragrunt.hcl +++ b/solution/DeploymentV2/terraform_layer1/vars/production/terragrunt.hcl @@ -1,5 +1,5 @@ locals { - common_vars = jsondecode(file("../../../environments/vars/production/common_vars_for_hcl.json") + common_vars = jsondecode(file("../../../bin/environments/production/common_vars_for_hcl.json") } diff --git a/solution/DeploymentV2/terraform_layer1/vars/staging/terragrunt.hcl b/solution/DeploymentV2/terraform_layer1/vars/staging/terragrunt.hcl index 7964b99b..e03ce41c 100644 --- a/solution/DeploymentV2/terraform_layer1/vars/staging/terragrunt.hcl +++ b/solution/DeploymentV2/terraform_layer1/vars/staging/terragrunt.hcl @@ -1,5 +1,5 @@ locals { - common_vars = jsondecode(file("../../../environments/vars/staging/common_vars_for_hcl.json")) + common_vars = jsondecode(file("../../../bin/environments/staging/common_vars_for_hcl.json")) } diff --git a/solution/DeploymentV2/terraform_layer2/layer1.tf b/solution/DeploymentV2/terraform_layer2/layer1.tf index 7eef56af..7cb68c42 100644 --- a/solution/DeploymentV2/terraform_layer2/layer1.tf +++ b/solution/DeploymentV2/terraform_layer2/layer1.tf @@ -7,7 +7,7 @@ config = { container_name = "tstate" key = "terraform_layer1.tfstate" - resource_group_name = "gft3" - storage_account_name = "gft3state" + resource_group_name = "gft4" + storage_account_name = "gft4state" } } diff --git a/solution/DeploymentV2/terraform_layer2/vars/local/terragrunt.hcl b/solution/DeploymentV2/terraform_layer2/vars/local/terragrunt.hcl index e79bc119..cb5d834c 100644 --- a/solution/DeploymentV2/terraform_layer2/vars/local/terragrunt.hcl +++ b/solution/DeploymentV2/terraform_layer2/vars/local/terragrunt.hcl @@ -1,5 +1,5 @@ locals { - common_vars = jsondecode(file("../../../environments/vars/local/common_vars_for_hcl.json")) + common_vars = jsondecode(file("../../../bin/environments/local/common_vars_for_hcl.json")) } diff --git a/solution/DeploymentV2/terraform_layer2/vars/production/terragrunt.hcl b/solution/DeploymentV2/terraform_layer2/vars/production/terragrunt.hcl index 02fb6fae..532e8e17 100644 --- a/solution/DeploymentV2/terraform_layer2/vars/production/terragrunt.hcl +++ b/solution/DeploymentV2/terraform_layer2/vars/production/terragrunt.hcl @@ -1,5 +1,5 @@ locals { - common_vars = jsondecode(file("../../../environments/vars/production/common_vars_for_hcl.json")) + common_vars = jsondecode(file("../../../bin/environments/production/common_vars_for_hcl.json")) } diff --git a/solution/DeploymentV2/terraform_layer2/vars/staging/terragrunt.hcl b/solution/DeploymentV2/terraform_layer2/vars/staging/terragrunt.hcl index 5cde3485..5fc9eb64 100644 --- a/solution/DeploymentV2/terraform_layer2/vars/staging/terragrunt.hcl +++ b/solution/DeploymentV2/terraform_layer2/vars/staging/terragrunt.hcl @@ -1,5 +1,5 @@ locals { - common_vars = jsondecode(file("../../../environments/vars/staging/common_vars_for_hcl.json")) + common_vars = jsondecode(file("../../../bin/environments/staging/common_vars_for_hcl.json")) } diff --git a/solution/DeploymentV2/terraform_layer3/layer2.tf b/solution/DeploymentV2/terraform_layer3/layer2.tf index 0467deaa..beccd25e 100644 --- a/solution/DeploymentV2/terraform_layer3/layer2.tf +++ b/solution/DeploymentV2/terraform_layer3/layer2.tf @@ -7,7 +7,7 @@ config = { container_name = "tstate" key = "terraform_layer2.tfstate" - resource_group_name = "gft3" - storage_account_name = "gft3state" + resource_group_name = "gft4" + storage_account_name = "gft4state" } } diff --git a/solution/DeploymentV2/terraform_layer3/vars/local/terragrunt.hcl b/solution/DeploymentV2/terraform_layer3/vars/local/terragrunt.hcl index db88118a..993d298f 100644 --- a/solution/DeploymentV2/terraform_layer3/vars/local/terragrunt.hcl +++ b/solution/DeploymentV2/terraform_layer3/vars/local/terragrunt.hcl @@ -1,5 +1,5 @@ locals { - common_vars = jsondecode(file("../../../environments/vars/local/common_vars_for_hcl.json")) + common_vars = jsondecode(file("../../../bin/environments/local/common_vars_for_hcl.json")) } generate "layer2.tf" { diff --git a/solution/DeploymentV2/terraform_layer3/vars/production/terragrunt.hcl b/solution/DeploymentV2/terraform_layer3/vars/production/terragrunt.hcl index c5b5da40..956bec98 100644 --- a/solution/DeploymentV2/terraform_layer3/vars/production/terragrunt.hcl +++ b/solution/DeploymentV2/terraform_layer3/vars/production/terragrunt.hcl @@ -1,5 +1,5 @@ locals { - common_vars = jsondecode(file("../../../environments/vars/production/common_vars_for_hcl.json")) + common_vars = jsondecode(file("../../../bin/environments/production/common_vars_for_hcl.json")) } generate "layer2.tf" { diff --git a/solution/DeploymentV2/terraform_layer3/vars/staging/terragrunt.hcl b/solution/DeploymentV2/terraform_layer3/vars/staging/terragrunt.hcl index b5152d8d..7c54a09c 100644 --- a/solution/DeploymentV2/terraform_layer3/vars/staging/terragrunt.hcl +++ b/solution/DeploymentV2/terraform_layer3/vars/staging/terragrunt.hcl @@ -1,5 +1,5 @@ locals { - common_vars = jsondecode(file("../../../environments/vars/staging/common_vars_for_hcl.json")) + common_vars = jsondecode(file("../../../bin/environments/staging/common_vars_for_hcl.json")) } generate "layer2.tf" { From d601fd1c11b2118b760af640e5b273f5482007f0 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Thu, 28 Jul 2022 16:29:27 +0800 Subject: [PATCH 056/151] CICD Test --- .github/workflows/continuous-delivery.yml | 2 +- .../environments/vars/admz/common_vars.yaml | 10 ++++++++++ 2 files changed, 11 insertions(+), 1 deletion(-) create mode 100644 solution/DeploymentV2/environments/vars/admz/common_vars.yaml diff --git a/.github/workflows/continuous-delivery.yml b/.github/workflows/continuous-delivery.yml index a75c3f48..61e52fdb 100644 --- a/.github/workflows/continuous-delivery.yml +++ b/.github/workflows/continuous-delivery.yml @@ -3,7 +3,7 @@ name: Continuous Delivery on: workflow_dispatch: push: - branches: main + branches: feature-1.0.4 jobs: deploy-to-env-one: diff --git a/solution/DeploymentV2/environments/vars/admz/common_vars.yaml b/solution/DeploymentV2/environments/vars/admz/common_vars.yaml new file mode 100644 index 00000000..bb9d67f9 --- /dev/null +++ b/solution/DeploymentV2/environments/vars/admz/common_vars.yaml @@ -0,0 +1,10 @@ +resource_group_name: gft2 # Theresourcegroupallresourceswillbedeployedto +tenant_id: 72f988bf-86f1-41af-91ab-2d7cd011db47 # ThisistheAzureADtenantID +prefix: ads # Allazureresourceswillbeprefixedwiththis +domain: microsoft.com # UsedwhenconfiguringAADconfigforAzurefunctions +subscription_id: 035a1364-f00d-48e2-b582-4fe125905ee3 # Theazuresubscriptionidtodeployto +resource_location: AustraliaEast # Thelocationoftheresources +owner_tag: Contoso # OwnertagvalueforAzureresources +environment_tag: stg # ThisisusedonAzuretagsaswellasallresourcenames +ip_address: 144.138.148.220 # Thisistheipaddressoftheagent/currentIP.Usedtocreatefirewallexemptions. +deployment_principal_layers1and3: "ccbdbba4-669c-48d6-86b8-75c9ab2ee578" From 36d5e4ac487502b54ee9a1edb394fb7a24c625b1 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Thu, 28 Jul 2022 16:43:10 +0800 Subject: [PATCH 057/151] CICD Test --- solution/DeploymentV2/Deploy.ps1 | 18 ++++++++++-------- solution/DeploymentV2/Deploy_3_Infra1.ps1 | 8 ++++++++ 2 files changed, 18 insertions(+), 8 deletions(-) diff --git a/solution/DeploymentV2/Deploy.ps1 b/solution/DeploymentV2/Deploy.ps1 index 41f38aa3..a8b3d4b7 100644 --- a/solution/DeploymentV2/Deploy.ps1 +++ b/solution/DeploymentV2/Deploy.ps1 @@ -94,14 +94,16 @@ $skipTerraformDeployment = ([System.Environment]::GetEnvironmentVariable('skipTe #------------------------------------------------------------------------------------------------------------ # Run Each SubModule #------------------------------------------------------------------------------------------------------------ -#Invoke-Expression ./Deploy_3_Infra1.ps1 -#Invoke-Expression ./Deploy_4_PrivateLinks.ps1 -#Invoke-Expression ./Deploy_5_WebApp.ps1 -#Invoke-Expression ./Deploy_6_FuncApp.ps1 -#Invoke-Expression ./Deploy_7_MetadataDB.ps1 -#Invoke-Expression ./Deploy_8_SQLLogins.ps1 -#Invoke-Expression ./Deploy_9_DataFactory.ps1 -#Invoke-Expression ./Deploy_10_SampleFiles.ps1 +./Deploy_3_Infra1.ps1 -deploymentFolderPath $deploymentFolderPath -skipTerraformDeployment $skipTerraformDeployment -skipCustomTerraform $skipCustomTerraform + +Invoke-Expression ./Deploy_4_PrivateLinks.ps1 + +Invoke-Expression ./Deploy_5_WebApp.ps1 +Invoke-Expression ./Deploy_6_FuncApp.ps1 +Invoke-Expression ./Deploy_7_MetadataDB.ps1 +Invoke-Expression ./Deploy_8_SQLLogins.ps1 +Invoke-Expression ./Deploy_9_DataFactory.ps1 +Invoke-Expression ./Deploy_10_SampleFiles.ps1 #---------------------------------------------------------------------------------------------------------------- # Set up Purview diff --git a/solution/DeploymentV2/Deploy_3_Infra1.ps1 b/solution/DeploymentV2/Deploy_3_Infra1.ps1 index c97a3c4e..1279bf08 100644 --- a/solution/DeploymentV2/Deploy_3_Infra1.ps1 +++ b/solution/DeploymentV2/Deploy_3_Infra1.ps1 @@ -1,3 +1,11 @@ +param ( + [Parameter(Mandatory=$true)] + [String]$deploymentFolderPath, + [Parameter(Mandatory=$true)] + [bool]$skipCustomTerraform=$true, + [Parameter(Mandatory=$true)] + [bool]$skipTerraformDeployment=$true +) #------------------------------------------------------------------------------------------------------------ # Deploy the customisable terraform layer From d0099a3f34a4b1da634dd4f9f8176d8d1cb455bf Mon Sep 17 00:00:00 2001 From: John Rampono Date: Thu, 28 Jul 2022 18:52:00 +0800 Subject: [PATCH 058/151] Separated out SQL Related Publishing from agent deployment --- solution/DeploymentV2/Deploy.ps1 | 23 +++++++++++++++---- solution/DeploymentV2/Deploy_7_MetadataDB.ps1 | 6 ++++- solution/DeploymentV2/Deploy_8_SQLLogins.ps1 | 15 ++++++++---- .../vars/common_vars_template.jsonnet | 4 ++++ .../vars/staging/common_vars_values.jsonc | 16 ++++++++----- 5 files changed, 49 insertions(+), 15 deletions(-) diff --git a/solution/DeploymentV2/Deploy.ps1 b/solution/DeploymentV2/Deploy.ps1 index a8b3d4b7..7250b57f 100644 --- a/solution/DeploymentV2/Deploy.ps1 +++ b/solution/DeploymentV2/Deploy.ps1 @@ -28,7 +28,12 @@ param ( [Parameter(Mandatory=$false)] [bool]$RunTerraformLayer3=0, [Parameter(Mandatory=$false)] + [bool]$PublishMetadataDatabase=0, + [Parameter(Mandatory=$false)] + [bool]$PublishSQLLogins=0, + [Parameter(Mandatory=$false)] [string]$FeatureTemplate="basic_deployment" + ) #------------------------------------------------------------------------------------------------------------ # Preparation #Mandatory @@ -36,7 +41,8 @@ param ( $deploymentFolderPath = (Get-Location).Path $gitDeploy = ([System.Environment]::GetEnvironmentVariable('gitDeploy') -eq 'true') $skipTerraformDeployment = ([System.Environment]::GetEnvironmentVariable('skipTerraformDeployment') -eq 'true') - +$ipaddress = $env:TF_VAR_ip_address +$ipaddress2 = $env:TF_VAR_ip_address2 ./Deploy_0_Prep.ps1 -gitDeploy $gitDeploy -deploymentFolderPath $deploymentFolderPath -FeatureTemplate $FeatureTemplate #------------------------------------------------------------------------------------------------------------ @@ -97,11 +103,20 @@ $skipTerraformDeployment = ([System.Environment]::GetEnvironmentVariable('skipTe ./Deploy_3_Infra1.ps1 -deploymentFolderPath $deploymentFolderPath -skipTerraformDeployment $skipTerraformDeployment -skipCustomTerraform $skipCustomTerraform Invoke-Expression ./Deploy_4_PrivateLinks.ps1 - Invoke-Expression ./Deploy_5_WebApp.ps1 Invoke-Expression ./Deploy_6_FuncApp.ps1 -Invoke-Expression ./Deploy_7_MetadataDB.ps1 -Invoke-Expression ./Deploy_8_SQLLogins.ps1 + +#------------------------------------------------------------------------------------------------------------ +# SQL Deployment and Users +# In order for a deployment agent service principal to execute the two scripts below you need to give directory read to the Azure SQL Instance Managed Identity and the Synapse Managed Identity +#------------------------------------------------------------------------------------------------------------ +./Deploy_7_MetadataDB.ps1 -publish_metadata_database $PublishMetadataDatabase +./Deploy_8_SQLLogins.ps1 -PublishSQLLogins $PublishSQLLogins + +#------------------------------------------------------------------------------------------------------------ +# Data Factory & Synapse Artefacts and Samplefiles +#------------------------------------------------------------------------------------------------------------ + Invoke-Expression ./Deploy_9_DataFactory.ps1 Invoke-Expression ./Deploy_10_SampleFiles.ps1 diff --git a/solution/DeploymentV2/Deploy_7_MetadataDB.ps1 b/solution/DeploymentV2/Deploy_7_MetadataDB.ps1 index 6701da32..a1960985 100644 --- a/solution/DeploymentV2/Deploy_7_MetadataDB.ps1 +++ b/solution/DeploymentV2/Deploy_7_MetadataDB.ps1 @@ -1,7 +1,11 @@ +param ( + [Parameter(Mandatory=$false)] + [bool]$publish_metadata_database=$false +) #---------------------------------------------------------------------------------------------------------------- # Populate the Metadata Database #---------------------------------------------------------------------------------------------------------------- -if($skipDatabase) { +if($publish_metadata_database -eq $false) { Write-Host "Skipping Populating Metadata Database" } else { diff --git a/solution/DeploymentV2/Deploy_8_SQLLogins.ps1 b/solution/DeploymentV2/Deploy_8_SQLLogins.ps1 index 362c689f..ca2011c5 100644 --- a/solution/DeploymentV2/Deploy_8_SQLLogins.ps1 +++ b/solution/DeploymentV2/Deploy_8_SQLLogins.ps1 @@ -1,8 +1,11 @@ - +param ( + [Parameter(Mandatory=$false)] + [bool]$PublishSQLLogins=$false +) #---------------------------------------------------------------------------------------------------------------- # Configure SQL Server Logins #---------------------------------------------------------------------------------------------------------------- -if($skipSQLLogins) { +if($PublishSQLLogins -eq $false) { Write-Host "Skipping configuration of SQL Server Users" } else { @@ -52,15 +55,19 @@ else { #---------------------------------------------------------------------------------------------------------------- # Configure Synapse Logins #---------------------------------------------------------------------------------------------------------------- -if($skipSynapseLogins) { +if($PublishSQLLogins -eq $false) { Write-Host "Skipping configuration of Synapse SQL Users" } else { Write-Host "Configuring Synapse SQL Users" + $myIp = $env:TF_VAR_ip_address + $myIp2 = $env:TF_VAR_ip_address2 + #Add Ip to SQL Firewall #$result = az synapse workspace update -n $synapse_workspace_name -g $resource_group_name --set publicNetworkAccess="Enabled" - $result = az synapse workspace firewall-rule create --resource-group $resource_group_name --workspace-name $synapse_workspace_name --name "Deploy.ps1" --start-ip-address $myIp --end-ip-address $myIp + $result = az synapse workspace firewall-rule create --resource-group $resource_group_name --workspace-name $synapse_workspace_name --name "DeploymentAgent" --start-ip-address $myIp --end-ip-address $myIp + $result = az synapse workspace firewall-rule create --resource-group $resource_group_name --workspace-name $synapse_workspace_name --name "DeploymentUser" --start-ip-address $myIp2 --end-ip-address $myIp2 if ($tout.is_vnet_isolated -eq $false) { diff --git a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet index 1b2b715f..28d3924b 100644 --- a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet +++ b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet @@ -362,6 +362,10 @@ local SecretFileSensitiveVars = { // Object comprehension. + + + + diff --git a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc index 455c069e..c1dc1939 100644 --- a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc +++ b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc @@ -14,8 +14,11 @@ //Owners & User Access "deployment_principal_layers1and3": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", "resource_owners": [ - "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", - "4c732d19-4076-4a76-87f3-6fbfd77f007d" + //Deployment Agent - Note that the first element here will become the SQL server AAD Admin + "4c732d19-4076-4a76-87f3-6fbfd77f007d", + //Admin User - Jorampon + "ccbdbba4-669c-48d6-86b8-75c9ab2ee578" + ], "synapse_administrators": { //"deploy_user": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", @@ -25,10 +28,11 @@ "synapse_contributors": {}, "WEB_APP_ADMIN_USER": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", "WEB_APP_ADMIN_SECURITY_GROUP": "#####", - //Post Layer 1 Reources - "ARM_SYNAPSE_WORKSPACE_NAME": "adsstgsynwadsvroj", - "ARM_KEYVAULT_NAME": "ads-stg-kv-ads-vroj", - "ARM_DATALAKE_NAME": "adsstgdlsadsvrojadsl", + + //Post Layer 1 Reources. Deployment of Terraform Layer One will give you these values. They are required to allow subsequent agent deployments to open firewall rules to allow deployment updates. + "ARM_SYNAPSE_WORKSPACE_NAME": "adsstgsynwadslwra", + "ARM_KEYVAULT_NAME": "ads-stg-kv-ads-lwra", + "ARM_DATALAKE_NAME": "adsstgdlsadslwraadsl", //PAL "ARM_PAL_PARTNER_ID": "0", From a1b6eaa67792af3713a3a1d2f476d624c0b45fb4 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Thu, 28 Jul 2022 19:14:36 +0800 Subject: [PATCH 059/151] Fixed GatherOutputsFromTerraform. Invalid reference to old terraform folder --- .github/workflows/continuous-delivery.yml | 2 +- .../Patterns/GatherOutputsFromTerraform.psm1 | 2 +- solution/DeploymentV2/Deploy.ps1 | 19 +++++++++++++------ 3 files changed, 15 insertions(+), 8 deletions(-) diff --git a/.github/workflows/continuous-delivery.yml b/.github/workflows/continuous-delivery.yml index 61e52fdb..42205eab 100644 --- a/.github/workflows/continuous-delivery.yml +++ b/.github/workflows/continuous-delivery.yml @@ -111,7 +111,7 @@ jobs: TF_VAR_ip_address : ${{steps.ip.outputs.ipv4}} run: | git update-index --chmod=+x ./Deploy.ps1 - ./Deploy.ps1 -RunTerraformLayer2 $true -FeatureTemplate ${{ env.featureTemplate}} + ./Deploy.ps1 -RunTerraformLayer2 $true -FeatureTemplate ${{ env.featureTemplate}} -PerformPostIACPublishing $true #PROD ENVIRONMENT diff --git a/solution/DataFactory/Patterns/GatherOutputsFromTerraform.psm1 b/solution/DataFactory/Patterns/GatherOutputsFromTerraform.psm1 index 08a5d2d2..dac9d0b2 100644 --- a/solution/DataFactory/Patterns/GatherOutputsFromTerraform.psm1 +++ b/solution/DataFactory/Patterns/GatherOutputsFromTerraform.psm1 @@ -4,7 +4,7 @@ function GatherOutputsFromTerraform() $myIp = (Invoke-WebRequest ifconfig.me/ip).Content $CurrentFolderPath = $PWD - Set-Location "../../DeploymentV2/terraform" + Set-Location "../../DeploymentV2/terraform_layer2" $env:TF_VAR_ip_address = $myIp #------------------------------------------------------------------------------------------------------------ diff --git a/solution/DeploymentV2/Deploy.ps1 b/solution/DeploymentV2/Deploy.ps1 index 7250b57f..3901dbc3 100644 --- a/solution/DeploymentV2/Deploy.ps1 +++ b/solution/DeploymentV2/Deploy.ps1 @@ -32,6 +32,8 @@ param ( [Parameter(Mandatory=$false)] [bool]$PublishSQLLogins=0, [Parameter(Mandatory=$false)] + [bool]$PerformPostIACPublishing=0, + [Parameter(Mandatory=$false)] [string]$FeatureTemplate="basic_deployment" ) @@ -102,10 +104,6 @@ $ipaddress2 = $env:TF_VAR_ip_address2 #------------------------------------------------------------------------------------------------------------ ./Deploy_3_Infra1.ps1 -deploymentFolderPath $deploymentFolderPath -skipTerraformDeployment $skipTerraformDeployment -skipCustomTerraform $skipCustomTerraform -Invoke-Expression ./Deploy_4_PrivateLinks.ps1 -Invoke-Expression ./Deploy_5_WebApp.ps1 -Invoke-Expression ./Deploy_6_FuncApp.ps1 - #------------------------------------------------------------------------------------------------------------ # SQL Deployment and Users # In order for a deployment agent service principal to execute the two scripts below you need to give directory read to the Azure SQL Instance Managed Identity and the Synapse Managed Identity @@ -117,8 +115,17 @@ Invoke-Expression ./Deploy_6_FuncApp.ps1 # Data Factory & Synapse Artefacts and Samplefiles #------------------------------------------------------------------------------------------------------------ -Invoke-Expression ./Deploy_9_DataFactory.ps1 -Invoke-Expression ./Deploy_10_SampleFiles.ps1 +if($PerformPostIACPublishing -eq $false) { + Write-Host "Skipping Post IAC Publishing" +} +else { + Invoke-Expression ./Deploy_4_PrivateLinks.ps1 + Invoke-Expression ./Deploy_5_WebApp.ps1 + Invoke-Expression ./Deploy_6_FuncApp.ps1 + Invoke-Expression ./Deploy_9_DataFactory.ps1 + Invoke-Expression ./Deploy_10_SampleFiles.ps1 +} + #---------------------------------------------------------------------------------------------------------------- # Set up Purview From cebe5b9d3bd3470bdad5e659fb9da79ce8d6d09f Mon Sep 17 00:00:00 2001 From: John Rampono Date: Thu, 28 Jul 2022 19:40:38 +0800 Subject: [PATCH 060/151] modified: solution/DeploymentV2/GenerateAndUploadADFPipelines.ps1 modified: solution/Synapse/Patterns/GatherOutputsFromTerraform.psm1 --- solution/DeploymentV2/GenerateAndUploadADFPipelines.ps1 | 2 +- solution/Synapse/Patterns/GatherOutputsFromTerraform.psm1 | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/solution/DeploymentV2/GenerateAndUploadADFPipelines.ps1 b/solution/DeploymentV2/GenerateAndUploadADFPipelines.ps1 index 8e61bf6b..b6427610 100644 --- a/solution/DeploymentV2/GenerateAndUploadADFPipelines.ps1 +++ b/solution/DeploymentV2/GenerateAndUploadADFPipelines.ps1 @@ -1,7 +1,7 @@ Set-Location $deploymentFolderPath Write-Host "Reading Terraform Outputs" -Set-Location "./terraform" +Set-Location "./terraform_layer2" Import-Module .\..\GatherOutputsFromTerraform.psm1 -force $tout = GatherOutputsFromTerraform Set-Location $deploymentFolderPath diff --git a/solution/Synapse/Patterns/GatherOutputsFromTerraform.psm1 b/solution/Synapse/Patterns/GatherOutputsFromTerraform.psm1 index 02e50246..50d57f9f 100644 --- a/solution/Synapse/Patterns/GatherOutputsFromTerraform.psm1 +++ b/solution/Synapse/Patterns/GatherOutputsFromTerraform.psm1 @@ -5,7 +5,7 @@ function GatherOutputsFromTerraform() $myIp = (Invoke-WebRequest ifconfig.me/ip).Content $CurrentFolderPath = $PWD - Set-Location "../../DeploymentV2/terraform" + Set-Location "../../DeploymentV2/terraform_layer2" $env:TF_VAR_ip_address = $myIp #------------------------------------------------------------------------------------------------------------ From 275e5a2bd31fee49b72738d5aa4acf8023a77206 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Fri, 29 Jul 2022 15:11:49 +0800 Subject: [PATCH 061/151] CICD Updates --- .../workflows/03.grant-aad-permissions.yml | 171 ++++++++++++++++++ 1 file changed, 171 insertions(+) create mode 100644 .github/workflows/03.grant-aad-permissions.yml diff --git a/.github/workflows/03.grant-aad-permissions.yml b/.github/workflows/03.grant-aad-permissions.yml new file mode 100644 index 00000000..a204568c --- /dev/null +++ b/.github/workflows/03.grant-aad-permissions.yml @@ -0,0 +1,171 @@ +name: Continuous Delivery + +on: + workflow_dispatch: + push: + branches: $default-branch + +jobs: + deploy-to-env-one: + name: Deploy to Environment One + concurrency: terraform + environment: + name: development + env: + # This determines the location of the .hcl file that will be used + environmentName: staging + gitDeploy : true + skipTerraformDeployment: false + featureTemplate: ${{ secrets.ARM_FEATURE_TEMPLATE }} + WEB_APP_ADMIN_USER: ${{ secrets.WEB_APP_ADMIN_USER }} + keyVaultName: ${{ secrets.ARM_KEYVAULT_NAME }} + ARM_SYNAPSE_WORKSPACE_NAME: ${{ secrets.ARM_SYNAPSE_WORKSPACE_NAME }} + datalakeName: ${{ secrets.ARM_DATALAKE_NAME }} + # Required for Terraform + ARM_CLIENT_ID: ${{ secrets.ARM_CLIENT_ID }} + ARM_PAL_PARTNER_ID: ${{ secrets.ARM_PAL_PARTNER_ID }} + ARM_CLIENT_SECRET: ${{ secrets.ARM_CLIENT_SECRET }} + ARM_SUBSCRIPTION_ID: ${{ secrets.ARM_SUBSCRIPTION_ID }} + ARM_TENANT_ID: ${{ secrets.ARM_TENANT_ID }} + # Customizing Terraform vars + TF_VAR_ip_address2 : ${{ secrets.ARM_IP_ADDRESS2 }} + TF_VAR_domain : ${{ secrets.ARM_DOMAIN }} + TF_VAR_subscription_id : ${{ secrets.ARM_SUBSCRIPTION_ID }} + TF_VAR_tenant_id : ${{ secrets.ARM_TENANT_ID }} + TF_VAR_environment_tag : ${{ secrets.ENVIRONMENT_TAG }} + TF_VAR_resource_group_name : ${{ secrets.ARM_RESOURCE_GROUP_NAME }} + TF_VAR_state_storage_account_name : ${{ secrets.ARM_STORAGE_NAME }} + TF_VAR_jumphost_password : ${{secrets.ARM_JUMPHOST_PASSWORD}} + TF_VAR_synapse_sql_password : ${{secrets.ARM_SYNAPSE_PASSWORD}} + TF_VAR_web_app_admin_security_group : ${{secrets.WEB_APP_ADMIN_SECURITY_GROUP}} + # GIT Integration set up + TF_VAR_synapse_git_repository_name : ${{secrets.GIT_REPOSITORY_NAME}} + TF_VAR_synapse_git_repository_branch_name : ${{secrets.GIT_SYNAPSE_REPOSITORY_BRANCH_NAME}} + TF_VAR_synapse_git_pat : ${{secrets.GIT_PAT}} + TF_VAR_synapse_git_user_name : ${{secrets.GIT_USER_NAME}} + TF_VAR_synapse_git_email_address : ${{secrets.GIT_EMAIL_ADDRESS}} + TF_VAR_adf_git_repository_name : ${{secrets.GIT_REPOSITORY_NAME}} + TF_VAR_adf_git_repository_branch_name : ${{secrets.GIT_ADF_REPOSITORY_BRANCH_NAME}} + TF_VAR_adf_git_pat : ${{secrets.GIT_PAT}} + TF_VAR_adf_git_user_name : ${{secrets.GIT_USER_NAME}} + TF_VAR_adf_git_email_address : ${{secrets.GIT_EMAIL_ADDRESS}} + + #TF_LOG : TRACE + + runs-on: ubuntu-latest + steps: + - name: PrintInfo + run: | + echo "Deploying to Resource Group: ${{ env.TF_VAR_resource_group_name }} " + echo "echo Hcl file name: ${{ env.environmentName}} " + + - name: Checkout + uses: actions/checkout@v3.0.0 + + - name: Get public IP + id: ip + uses: haythem/public-ip@v1.2 + + - name: Login via Az module + uses: azure/login@v1 + with: + creds: '{"clientId":"${{ secrets.ARM_CLIENT_ID }}","clientSecret":"${{ secrets.ARM_CLIENT_SECRET }}","subscriptionId":"${{ secrets.ARM_SUBSCRIPTION_ID }}","tenantId":"${{ secrets.ARM_TENANT_ID }}"}' + enable-AzPSSession: true + + - name: Open Firewalls for Agent + id: open_firewalls + continue-on-error: true + working-directory: ./solution/DeploymentV2/terraform + run: | + az keyvault network-rule add -g ${{ env.TF_VAR_resource_group_name }} --name ${{ env.keyVaultName }} --ip-address ${{ steps.ip.outputs.ipv4 }}/32 + az synapse workspace firewall-rule create --name AllowCICD --resource-group ${{ env.TF_VAR_resource_group_name }} --start-ip-address ${{ steps.ip.outputs.ipv4 }} --end-ip-address ${{ steps.ip.outputs.ipv4 }} --workspace-name ${{ env.ARM_SYNAPSE_WORKSPACE_NAME }} + az storage account network-rule add --resource-group ${{ env.TF_VAR_resource_group_name }} --account-name ${{ env.datalakeName }} --ip-address ${{ steps.ip.outputs.ipv4 }} + + - name: Set PAL + id: set_pal + continue-on-error: true + run: | + az extension add --name managementpartner + az managementpartner update --partner-id ${{ secrets.ARM_PAL_PARTNER_ID }} || az managementpartner create --partner-id ${{ secrets.ARM_PAL_PARTNER_ID }} + + - name: Terragrunt Install + id: terragrunt_install + working-directory: ./solution/DeploymentV2/terraform + run: | + brew install terragrunt + + - name: Install Jsonnet + id: jsonnet-install + working-directory: ./solution/DeploymentV2/ + env: + TF_VAR_ip_address : ${{steps.ip.outputs.ipv4}} + run: | + wget https://github.com/google/go-jsonnet/releases/download/v0.17.0/jsonnet-go_0.17.0_linux_amd64.deb + sudo dpkg -i jsonnet-go_0.17.0_linux_amd64.deb + + - name: Deploy Solution + id: solution-deployment + working-directory: ./solution/DeploymentV2/ + shell: pwsh + env: + TF_VAR_ip_address : ${{steps.ip.outputs.ipv4}} + run: | + git update-index --chmod=+x ./Deploy.ps1 + ./Deploy.ps1 -RunTerraformLayer3 $true -FeatureTemplate ${{ env.featureTemplate}} -PerformPostIACPublishing $false + + + #PROD ENVIRONMENT + deploy-to-env-two: + name: Deploy to Environment Two + concurrency: terraform + needs: [deploy-to-env-one] + environment: + name: Prod + env: + environmentName: production + gitDeploy : true + skipTerraformDeployment: false + WEB_APP_ADMIN_USER: ${{ secrets.WEB_APP_ADMIN_USER }} + keyVaultName: ${{ secrets.ARM_KEYVAULT_NAME }} + ARM_SYNAPSE_WORKSPACE_NAME: ${{ secrets.ARM_SYNAPSE_WORKSPACE_NAME }} + datalakeName: ${{ secrets.ARM_DATALAKE_NAME }} + # Required for Terraform + ARM_CLIENT_ID: ${{ secrets.ARM_CLIENT_ID }} + ARM_PAL_PARTNER_ID: ${{ secrets.ARM_PAL_PARTNER_ID }} + ARM_CLIENT_SECRET: ${{ secrets.ARM_CLIENT_SECRET }} + ARM_SUBSCRIPTION_ID: ${{ secrets.ARM_SUBSCRIPTION_ID }} + ARM_TENANT_ID: ${{ secrets.ARM_TENANT_ID }} + # Customizing Terraform vars + TF_VAR_domain : ${{ secrets.ARM_DOMAIN }} + TF_VAR_subscription_id : ${{ secrets.ARM_SUBSCRIPTION_ID }} + TF_VAR_tenant_id : ${{ secrets.ARM_TENANT_ID }} + TF_VAR_environment_tag : ${{ secrets.ENVIRONMENT_TAG }} + TF_VAR_resource_group_name : ${{ secrets.ARM_RESOURCE_GROUP_NAME }} + TF_VAR_state_storage_account_name : ${{ secrets.ARM_STORAGE_NAME }} + TF_VAR_jumphost_password : ${{secrets.ARM_JUMPHOST_PASSWORD}} + TF_VAR_synapse_sql_password : ${{secrets.ARM_SYNAPSE_PASSWORD}} + TF_VAR_web_app_admin_security_group : ${{secrets.WEB_APP_ADMIN_SECURITY_GROUP}} + # GIT Integration set up + TF_VAR_synapse_git_repository_name : ${{secrets.GIT_REPOSITORY_NAME}} + TF_VAR_synapse_git_repository_branch_name : ${{secrets.GIT_SYNAPSE_REPOSITORY_BRANCH_NAME}} + TF_VAR_synapse_git_pat : ${{secrets.GIT_PAT}} + TF_VAR_synapse_git_user_name : ${{secrets.GIT_USER_NAME}} + TF_VAR_synapse_git_email_address : ${{secrets.GIT_EMAIL_ADDRESS}} + TF_VAR_adf_git_repository_name : ${{secrets.GIT_REPOSITORY_NAME}} + TF_VAR_adf_git_repository_branch_name : ${{secrets.GIT_ADF_REPOSITORY_BRANCH_NAME}} + TF_VAR_adf_git_pat : ${{secrets.GIT_PAT}} + TF_VAR_adf_git_user_name : ${{secrets.GIT_USER_NAME}} + TF_VAR_adf_git_email_address : ${{secrets.GIT_EMAIL_ADDRESS}} + + #PROD ENVIRONMENT + #TF_LOG : TRACE + + runs-on: ubuntu-latest + steps: + + - name: Checkout + uses: actions/checkout@v3.0.0 + + - name: Get public IP + id: ip + uses: haythem/public-ip@v1.2 From 95af48ce0bdf52ca99fc475e559af2fc5240b2d1 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Fri, 29 Jul 2022 15:11:54 +0800 Subject: [PATCH 062/151] CICD Updates --- ...e.yml => 01.create-rg-and-aad-objects.yml} | 0 ...elivery.yml => 02.continuous-delivery.yml} | 0 solution/DeploymentV2/Cleanup_RemoveAll.ps1 | 4 +- solution/DeploymentV2/Deploy.ps1 | 109 ++++---- solution/DeploymentV2/Deploy_0_Prep.ps1 | 76 ----- .../DeploymentV2/Deploy_2_Infra0_Outputs.ps1 | 2 +- solution/DeploymentV2/Deploy_8_SQLLogins.ps1 | 31 ++- .../DeploymentV2/EnvironmentTemplate_admz.hcl | 85 ------ .../EnvironmentTemplate_local.hcl | 31 --- .../EnvironmentTemplate_staging.hcl | 50 ---- .../GenerateAndUploadADFPipelines.ps1 | 4 +- .../RemoteInstallIntegrationRuntime.ps1 | 6 +- .../DeploymentV2/RemoteInstallSQLWithCDC.ps1 | 6 +- .../vars/PreprocessEnvironment.ps1 | 8 +- .../environments/vars/common_vars_schema.json | 261 ++++++++++++++++++ .../vars/common_vars_template.jsonnet | 33 +++ .../vars/staging/common_vars_values.jsonc | 10 +- .../pwshmodules/Deploy_0_Prep.psm1 | 79 ++++++ .../GatherOutputsFromTerraform.psm1 | 6 +- solution/DeploymentV2/pwshmodules/Test.psm1 | 4 + .../terraform_layer2/.terraform.lock.hcl | 19 ++ .../terraform_layer2/database.ps1 | 28 ++ .../DeploymentV2/terraform_layer2/database.tf | 11 +- .../DeploymentV2/terraform_layer2/outputs.tf | 3 + .../DeploymentV2/terraform_layer2/vars.tf | 6 + .../vars/local/terragrunt.hcl | 1 + .../vars/production/terragrunt.hcl | 3 +- .../vars/staging/terragrunt.hcl | 3 +- 28 files changed, 562 insertions(+), 317 deletions(-) rename .github/workflows/{deploy-terraform-state-store.yml => 01.create-rg-and-aad-objects.yml} (100%) rename .github/workflows/{continuous-delivery.yml => 02.continuous-delivery.yml} (100%) delete mode 100644 solution/DeploymentV2/Deploy_0_Prep.ps1 delete mode 100644 solution/DeploymentV2/EnvironmentTemplate_admz.hcl delete mode 100644 solution/DeploymentV2/EnvironmentTemplate_local.hcl delete mode 100644 solution/DeploymentV2/EnvironmentTemplate_staging.hcl create mode 100644 solution/DeploymentV2/environments/vars/common_vars_schema.json create mode 100644 solution/DeploymentV2/pwshmodules/Deploy_0_Prep.psm1 rename solution/DeploymentV2/{ => pwshmodules}/GatherOutputsFromTerraform.psm1 (89%) create mode 100644 solution/DeploymentV2/pwshmodules/Test.psm1 create mode 100644 solution/DeploymentV2/terraform_layer2/database.ps1 diff --git a/.github/workflows/deploy-terraform-state-store.yml b/.github/workflows/01.create-rg-and-aad-objects.yml similarity index 100% rename from .github/workflows/deploy-terraform-state-store.yml rename to .github/workflows/01.create-rg-and-aad-objects.yml diff --git a/.github/workflows/continuous-delivery.yml b/.github/workflows/02.continuous-delivery.yml similarity index 100% rename from .github/workflows/continuous-delivery.yml rename to .github/workflows/02.continuous-delivery.yml diff --git a/solution/DeploymentV2/Cleanup_RemoveAll.ps1 b/solution/DeploymentV2/Cleanup_RemoveAll.ps1 index 17089531..3df8decf 100644 --- a/solution/DeploymentV2/Cleanup_RemoveAll.ps1 +++ b/solution/DeploymentV2/Cleanup_RemoveAll.ps1 @@ -17,8 +17,8 @@ Set-Location ".\terraform" # Get all the outputs from terraform so we can use them in subsequent steps #------------------------------------------------------------------------------------------------------------ Write-Host "Reading Terraform Outputs" -Import-Module .\..\GatherOutputsFromTerraform.psm1 -force -$tout = GatherOutputsFromTerraform +Import-Module .\pwshmodules\GatherOutputsFromTerraform.psm1 -force +$tout = GatherOutputsFromTerraform -TerraformFolderPath ./terraform_layer2 #Delete Resource Group az group delete --name $tout.resource_group_name diff --git a/solution/DeploymentV2/Deploy.ps1 b/solution/DeploymentV2/Deploy.ps1 index 3901dbc3..604979e6 100644 --- a/solution/DeploymentV2/Deploy.ps1 +++ b/solution/DeploymentV2/Deploy.ps1 @@ -19,6 +19,11 @@ # - Run this script # # You can run this script multiple times if needed. +# +# ./Deploy.ps1 -RunTerraformLayer1 $true -FeatureTemplate "basic_deployment" -PerformPostIACPublishing $false +# ./Deploy.ps1 -RunTerraformLayer2 $true -FeatureTemplate "basic_deployment" -PerformPostIACPublishing $true +# ./Deploy.ps1 -RunTerraformLayer3 $true -FeatureTemplate "basic_deployment" -$PublishSQLLogins $true +# #---------------------------------------------------------------------------------------------------------------- param ( [Parameter(Mandatory=$false)] @@ -28,7 +33,7 @@ param ( [Parameter(Mandatory=$false)] [bool]$RunTerraformLayer3=0, [Parameter(Mandatory=$false)] - [bool]$PublishMetadataDatabase=0, + [bool]$PublishMetadataDatabase=$true, [Parameter(Mandatory=$false)] [bool]$PublishSQLLogins=0, [Parameter(Mandatory=$false)] @@ -37,6 +42,12 @@ param ( [string]$FeatureTemplate="basic_deployment" ) + +#------------------------------------------------------------------------------------------------------------ +# Module Imports #Mandatory +#------------------------------------------------------------------------------------------------------------ +import-Module ./pwshmodules/GatherOutputsFromTerraform.psm1 -force +import-Module ./pwshmodules/Deploy_0_Prep.psm1 -force #------------------------------------------------------------------------------------------------------------ # Preparation #Mandatory #------------------------------------------------------------------------------------------------------------ @@ -45,7 +56,8 @@ $gitDeploy = ([System.Environment]::GetEnvironmentVariable('gitDeploy') -eq 'tr $skipTerraformDeployment = ([System.Environment]::GetEnvironmentVariable('skipTerraformDeployment') -eq 'true') $ipaddress = $env:TF_VAR_ip_address $ipaddress2 = $env:TF_VAR_ip_address2 -./Deploy_0_Prep.ps1 -gitDeploy $gitDeploy -deploymentFolderPath $deploymentFolderPath -FeatureTemplate $FeatureTemplate + +PrepareDeployment -gitDeploy $gitDeploy -deploymentFolderPath $deploymentFolderPath -FeatureTemplate $FeatureTemplate #------------------------------------------------------------------------------------------------------------ # Main Terraform @@ -56,47 +68,46 @@ $ipaddress2 = $env:TF_VAR_ip_address2 #------------------------------------------------------------------------------------------------------------ # Get all the outputs from terraform so we can use them in subsequent steps #Mandatory #------------------------------------------------------------------------------------------------------------ - Set-Location "./terraform_layer2" - Write-Host "Reading Terraform Outputs" - #Run Init Just in Case we skipped the Infra Section - #$init = terragrunt init --terragrunt-config vars/$environmentName/terragrunt.hcl -reconfigure - Import-Module .\..\GatherOutputsFromTerraform.psm1 -force - $tout = GatherOutputsFromTerraform - $outputs = terragrunt output -json --terragrunt-config ./vars/$env:environmentName/terragrunt.hcl | ConvertFrom-Json - $subscription_id =$outputs.subscription_id.value - $resource_group_name =$outputs.resource_group_name.value - $webapp_name =$outputs.webapp_name.value - $functionapp_name=$outputs.functionapp_name.value - $purview_name=$outputs.purview_name.value - $sqlserver_name=$outputs.sqlserver_name.value - $blobstorage_name=$outputs.blobstorage_name.value - $adlsstorage_name=$outputs.adlsstorage_name.value - $datafactory_name=$outputs.datafactory_name.value - $keyvault_name=$outputs.keyvault_name.value - #sif database name - $sifdb_name = if([string]::IsNullOrEmpty($outputs.sifdb_name.value)){"SIFDM"} - $stagingdb_name=$outputs.stagingdb_name.value - $sampledb_name=$outputs.sampledb_name.value - $metadatadb_name=$outputs.metadatadb_name.value - $loganalyticsworkspace_id=$outputs.loganalyticsworkspace_id.value - $purview_sp_name=$outputs.purview_sp_name.value - $synapse_workspace_name=if([string]::IsNullOrEmpty($outputs.synapse_workspace_name.value)) {"Dummy"} else {$outputs.synapse_workspace_name.value} - $synapse_sql_pool_name=if([string]::IsNullOrEmpty($outputs.synapse_sql_pool_name.value)) {"Dummy"} else {$outputs.synapse_sql_pool_name.value} - $synapse_spark_pool_name=if([string]::IsNullOrEmpty($outputs.synapse_spark_pool_name.value)) {"Dummy"} else {$outputs.synapse_spark_pool_name.value} - $skipCustomTerraform = if($tout.deploy_custom_terraform) {$false} else {$true} - $skipWebApp = if($tout.publish_web_app -and $tout.deploy_web_app) {$false} else {$true} - $skipFunctionApp = if($tout.publish_function_app -and $tout.deploy_function_app) {$false} else {$true} - $skipDatabase = if($tout.publish_metadata_database -and $tout.deploy_metadata_database) {$false} else {$true} - $skipSQLLogins = if($tout.publish_sql_logins -and $tout.deploy_sql_server) {$false} else {$true} - $skipSynapseLogins = if($tout.publish_sql_logins -and $tout.deploy_synapse) {$false} else {$true} - $skipSampleFiles = if($tout.publish_sample_files){$false} else {$true} - $skipSIF= if($tout.publish_sif_database){$false} else {$true} - $skipNetworking = if($tout.configure_networking){$false} else {$true} - $skipDataFactoryPipelines = if($tout.publish_datafactory_pipelines) {$false} else {$true} - $skipFunctionalTests = if($tout.publish_functional_tests) {$false} else {$true} - $skipConfigurePurview = if($tout.publish_configure_purview) {$false} else {$true} - $AddCurrentUserAsWebAppAdmin = if($tout.publish_web_app_addcurrentuserasadmin) {$true} else {$false} - Set-Location $deploymentFolderPath +Set-Location "./terraform_layer2" +Write-Host "Reading Terraform Outputs" +#Run Init Just in Case we skipped the Infra Section +#$init = terragrunt init --terragrunt-config vars/$environmentName/terragrunt.hcl -reconfigure +$tout = GatherOutputsFromTerraform -TerraformFolderPath "./" +$outputs = terragrunt output -json --terragrunt-config ./vars/$env:environmentName/terragrunt.hcl | ConvertFrom-Json +$subscription_id =$outputs.subscription_id.value +$resource_group_name =$outputs.resource_group_name.value +$webapp_name =$outputs.webapp_name.value +$functionapp_name=$outputs.functionapp_name.value +$purview_name=$outputs.purview_name.value +$sqlserver_name=$outputs.sqlserver_name.value +$blobstorage_name=$outputs.blobstorage_name.value +$adlsstorage_name=$outputs.adlsstorage_name.value +$datafactory_name=$outputs.datafactory_name.value +$keyvault_name=$outputs.keyvault_name.value +#sif database name +$sifdb_name = if([string]::IsNullOrEmpty($outputs.sifdb_name.value)){"SIFDM"} +$stagingdb_name=$outputs.stagingdb_name.value +$sampledb_name=$outputs.sampledb_name.value +$metadatadb_name=$outputs.metadatadb_name.value +$loganalyticsworkspace_id=$outputs.loganalyticsworkspace_id.value +$purview_sp_name=$outputs.purview_sp_name.value +$synapse_workspace_name=if([string]::IsNullOrEmpty($outputs.synapse_workspace_name.value)) {"Dummy"} else {$outputs.synapse_workspace_name.value} +$synapse_sql_pool_name=if([string]::IsNullOrEmpty($outputs.synapse_sql_pool_name.value)) {"Dummy"} else {$outputs.synapse_sql_pool_name.value} +$synapse_spark_pool_name=if([string]::IsNullOrEmpty($outputs.synapse_spark_pool_name.value)) {"Dummy"} else {$outputs.synapse_spark_pool_name.value} +$skipCustomTerraform = if($tout.deploy_custom_terraform) {$false} else {$true} +$skipWebApp = if($tout.publish_web_app -and $tout.deploy_web_app) {$false} else {$true} +$skipFunctionApp = if($tout.publish_function_app -and $tout.deploy_function_app) {$false} else {$true} +$skipDatabase = if($tout.publish_metadata_database -and $tout.deploy_metadata_database) {$false} else {$true} +$skipSQLLogins = if($tout.publish_sql_logins -and $tout.deploy_sql_server) {$false} else {$true} +$skipSynapseLogins = if($tout.publish_sql_logins -and $tout.deploy_synapse) {$false} else {$true} +$skipSampleFiles = if($tout.publish_sample_files){$false} else {$true} +$skipSIF= if($tout.publish_sif_database){$false} else {$true} +$skipNetworking = if($tout.configure_networking){$false} else {$true} +$skipDataFactoryPipelines = if($tout.publish_datafactory_pipelines) {$false} else {$true} +$skipFunctionalTests = if($tout.publish_functional_tests) {$false} else {$true} +$skipConfigurePurview = if($tout.publish_configure_purview) {$false} else {$true} +$AddCurrentUserAsWebAppAdmin = if($tout.publish_web_app_addcurrentuserasadmin) {$true} else {$false} +Set-Location $deploymentFolderPath #------------------------------------------------------------------------------------------------------------ @@ -108,7 +119,6 @@ $ipaddress2 = $env:TF_VAR_ip_address2 # SQL Deployment and Users # In order for a deployment agent service principal to execute the two scripts below you need to give directory read to the Azure SQL Instance Managed Identity and the Synapse Managed Identity #------------------------------------------------------------------------------------------------------------ -./Deploy_7_MetadataDB.ps1 -publish_metadata_database $PublishMetadataDatabase ./Deploy_8_SQLLogins.ps1 -PublishSQLLogins $PublishSQLLogins #------------------------------------------------------------------------------------------------------------ @@ -119,11 +129,12 @@ if($PerformPostIACPublishing -eq $false) { Write-Host "Skipping Post IAC Publishing" } else { - Invoke-Expression ./Deploy_4_PrivateLinks.ps1 - Invoke-Expression ./Deploy_5_WebApp.ps1 - Invoke-Expression ./Deploy_6_FuncApp.ps1 - Invoke-Expression ./Deploy_9_DataFactory.ps1 - Invoke-Expression ./Deploy_10_SampleFiles.ps1 + ./Deploy_4_PrivateLinks.ps1 + ./Deploy_5_WebApp.ps1 + ./Deploy_6_FuncApp.ps1 + ./Deploy_7_MetadataDB.ps1 -publish_metadata_database $PublishMetadataDatabase + ./Deploy_9_DataFactory.ps1 + ./Deploy_10_SampleFiles.ps1 } diff --git a/solution/DeploymentV2/Deploy_0_Prep.ps1 b/solution/DeploymentV2/Deploy_0_Prep.ps1 deleted file mode 100644 index 67b1a6d2..00000000 --- a/solution/DeploymentV2/Deploy_0_Prep.ps1 +++ /dev/null @@ -1,76 +0,0 @@ -param ( - [Parameter(Mandatory=$true)] - [System.Boolean]$gitDeploy=$false, - [Parameter(Mandatory=$true)] - [String]$deploymentFolderPath, - [Parameter(Mandatory=$true)] - [String]$FeatureTemplate -) - -#Check for SQLServer Module -$SqlInstalled = false -try { - $SqlInstalled = Get-InstalledModule SqlServer -} -catch { "SqlServer PowerShell module not installed." } - -if($null -eq $SqlInstalled) -{ - write-host "Installing SqlServer Module" - Install-Module -Name SqlServer -Scope CurrentUser -Force -} - -#needed for git integration -az extension add --upgrade --name datafactory - -#accept custom image terms -#https://docs.microsoft.com/en-us/cli/azure/vm/image/terms?view=azure-cli-latest - -#az vm image terms accept --urn h2o-ai:h2o-driverles-ai:h2o-dai-lts:latest - - - -if ($gitDeploy) -{ - $resourceGroupName = [System.Environment]::GetEnvironmentVariable('ARM_RESOURCE_GROUP_NAME') - $synapseWorkspaceName = [System.Environment]::GetEnvironmentVariable('ARM_RESOURCE_SYNAPSE_WORKSPACE_NAME') - $env:TF_VAR_ip_address = (Invoke-WebRequest ifconfig.me/ip).Content -} -else -{ - - #Only Prompt if Environment Variable has not been set - if ($null -eq [System.Environment]::GetEnvironmentVariable('environmentName')) - { - $envlist = (Get-ChildItem -Directory -Path ./environments/vars | Select-Object -Property Name).Name - Import-Module ./pwshmodules/GetSelectionFromUser.psm1 -Force - $environmentName = Get-SelectionFromUser -Options ($envlist) -Prompt "Select deployment environment" - [System.Environment]::SetEnvironmentVariable('environmentName', $environmentName) - } - - $env:TF_VAR_ip_address2 = (Invoke-WebRequest ifconfig.me/ip).Content - -} - - - -$environmentName = [System.Environment]::GetEnvironmentVariable('environmentName') - -if ($environmentName -eq "Quit" -or [string]::IsNullOrEmpty($environmentName)) -{ - write-host "environmentName is currently: $environmentName" - Write-Error "Environment is not set" - Exit -} - - -#Re-process Environment Config Files. -Set-Location ./environments/vars/ -./PreprocessEnvironment.ps1 -Environment $environmentName -FeatureTemplate $FeatureTemplate -gitDeploy $gitDeploy -Set-Location $deploymentFolderPath - -[System.Environment]::SetEnvironmentVariable('TFenvironmentName',$environmentName) - - - - diff --git a/solution/DeploymentV2/Deploy_2_Infra0_Outputs.ps1 b/solution/DeploymentV2/Deploy_2_Infra0_Outputs.ps1 index ec850304..b5028e5e 100644 --- a/solution/DeploymentV2/Deploy_2_Infra0_Outputs.ps1 +++ b/solution/DeploymentV2/Deploy_2_Infra0_Outputs.ps1 @@ -3,7 +3,7 @@ #------------------------------------------------------------------------------------------------------------ Set-Location "./terraform" Write-Host "Reading Terraform Outputs" -Import-Module .\..\GatherOutputsFromTerraform.psm1 -force +Import-Module .\pwshmodules\GatherOutputsFromTerraform.psm1 -force $tout = GatherOutputsFromTerraform $outputs = terragrunt output -json --terragrunt-config ./vars/$env:environmentName/terragrunt.hcl | ConvertFrom-Json diff --git a/solution/DeploymentV2/Deploy_8_SQLLogins.ps1 b/solution/DeploymentV2/Deploy_8_SQLLogins.ps1 index ca2011c5..9cf70c84 100644 --- a/solution/DeploymentV2/Deploy_8_SQLLogins.ps1 +++ b/solution/DeploymentV2/Deploy_8_SQLLogins.ps1 @@ -20,6 +20,17 @@ else { $aadUsers += $purview_sp_name } + $sqladmins = ($env:TF_VAR_azure_sql_aad_administrators | ConvertFrom-Json -Depth 10) + $sqladmins2 = ($Sqladmins | Get-Member) | Where-Object {$_.MemberType -eq "NoteProperty"} | Select-Object -Property Name + foreach($user in $sqladmins2) + { + if($user.Name -ne "sql_aad_admin") + { + $aadUsers += $user.Name + } + } + + $token=$(az account get-access-token --resource=https://database.windows.net --query accessToken --output tsv) foreach($database in $databases) { @@ -29,12 +40,18 @@ else { if (![string]::IsNullOrEmpty($user)) { $sqlcommand = " - DROP USER IF EXISTS [$user] - CREATE USER [$user] FROM EXTERNAL PROVIDER; - ALTER ROLE db_datareader ADD MEMBER [$user]; - ALTER ROLE db_datawriter ADD MEMBER [$user]; - GRANT EXECUTE ON SCHEMA::[dbo] TO [$user]; - GO + + IF NOT EXISTS (SELECT * + FROM [sys].[database_principals] + WHERE [type] = N'E' AND [name] = N'$user') + BEGIN + CREATE USER [$user] FROM EXTERNAL PROVIDER; + END + ALTER ROLE db_datareader ADD MEMBER [$user]; + ALTER ROLE db_datawriter ADD MEMBER [$user]; + GRANT EXECUTE ON SCHEMA::[dbo] TO [$user]; + GO + " write-host "Granting MSI Privileges on $database DB to $user" @@ -92,7 +109,7 @@ else { $token=$(az account get-access-token --resource=https://sql.azuresynapse.net --query accessToken --output tsv) if ((![string]::IsNullOrEmpty($datafactory_name)) -and ($synapse_sql_pool_name -ne 'Dummy') -and (![string]::IsNullOrEmpty($synapse_sql_pool_name))) - { + { # For a Spark user to read and write directly from Spark into or from a SQL pool, db_owner permission is required. Invoke-Sqlcmd -ServerInstance "$synapse_workspace_name.sql.azuresynapse.net,1433" -Database $synapse_sql_pool_name -AccessToken $token -query "IF NOT EXISTS (SELECT name FROM [sys].[database_principals] diff --git a/solution/DeploymentV2/EnvironmentTemplate_admz.hcl b/solution/DeploymentV2/EnvironmentTemplate_admz.hcl deleted file mode 100644 index eb8331de..00000000 --- a/solution/DeploymentV2/EnvironmentTemplate_admz.hcl +++ /dev/null @@ -1,85 +0,0 @@ -remote_state { - backend = "azurerm" - generate = { - path = "backend.tf" - if_exists = "overwrite_terragrunt" - } - config = { - # You need to update the resource group and storage account here. - # You should have created these with the Prepare.ps1 script. - resource_group_name = "{resource_group_name}" - storage_account_name = "{storage_account_name}" - container_name = "tstate" - key = "terraform.tfstate" - } -} - -# These inputs are provided to the terraform variables when deploying the environment -# If you are deploying using pipelines, these can be overridden from environment variables -# using TF_VAR_variablename -inputs = { - prefix = "{prefix}" # All azure resources will be prefixed with this - domain = "{domain}" # Used when configuring AAD config for Azure functions - tenant_id = "{tenant_id}" # This is the Azure AD tenant ID - subscription_id = "{subscription_id}" # The azure subscription id to deploy to - resource_location = "Australia East" # The location of the resources - resource_group_name = "{resource_group_name}" # The resource group all resources will be deployed to - owner_tag = "Contoso" # Owner tag value for Azure resources - environment_tag = "stg" # This is used on Azure tags as well as all resource names - ip_address = "{ip_address}" # This is the ip address of the agent/current IP. Used to create firewall exemptions. - configure_networking = false - is_vnet_isolated = true - publish_web_app = false - publish_function_app = false - publish_sample_files = true - publish_metadata_database = false - publish_datafactory_pipelines = true - publish_web_app_addcurrentuserasadmin = true - deploy_web_app = false - deploy_function_app = false - deploy_custom_terraform = false # This is whether the infrastructure located in the terraform_custom folder is deployed or not. - deploy_bastion = false - deploy_sentinel = false - deploy_purview = false - deploy_synapse = true - deploy_app_service_plan = false - deploy_synapse_sqlpool = false - deploy_selfhostedsql = false - deploy_metadata_database = false - deploy_h2o-ai = false - is_onprem_datafactory_ir_registered = false - publish_sif_database = {publish_sif_database} - - #Below is a space for providing details of EXISTING resources so that the deployment can integrate with your management zone. - #Please ensure that you enter everything that is relevant otherwise deployed resources may not work properly. - - #log anayltics resource id can be found under the properties of the log analytics resource NOTE: This is the full URI not the workspaceID - existing_log_analytics_resource_id = "LOG ANALYTICS RESOURCE ID" - existing_log_analytics_workspace_id = "LOG ANALYTICS WORKSPACE ID" - #synapse private link hub id can be found under the properties of the synapse private link NOTE: This is the full URI (ResourceID) - existing_synapse_private_link_hub_id = "SYNAPSE PRIVATE LINK HUB ID" - - #Please assign subnet id's for the following - you may end up using the same subnet id for all of these resources depending on your already deployed assets. - #command used to get subnet ids: - # az network vnet subnet show -g MyResourceGroup -n MySubnet --vnet-name MyVNet - existing_plink_subnet_id = "PRIVATE LINK SUBNET ID" - existing_bastion_subnet_id = "BASTION SUBNET ID" - existing_app_service_subnet_id = "APP SERVICE SUBNET ID" - existing_vm_subnet_id = "VM SUBNET ID" - - #assign the private DNS zone id's for the following. - #command used to get existing private-dns zones: - #az network private-dns zone list -g MyResourceGroup - existing_private_dns_zone_db_id = "DB PRIVATE DNS ZONE ID" - existing_private_dns_zone_kv_id = "KEYVAULT PRIVATE DNS ZONE ID" - existing_private_dns_zone_blob_id = "BLOB PRIVATE DNS ZONE ID" - existing_private_dns_zone_queue_id = "QUEUE PRIVATE DNS ZONE ID" - existing_private_dns_zone_dfs_id = "DFS PRIVATE DNS ZONE ID" - existing_private_dns_zone_purview_id = "PURVIEW PRIVATE DNS ZONE ID" - existing_private_dns_zone_purview_studio_id = "PURVIEW STUDIO PRIVATE DNS ZONE ID" - existing_private_dns_zone_servicebus_id = "SERVICEBUS PRIVATE DNS ZONE ID" - existing_private_dns_zone_synapse_gateway_id = "SYNAPSE GATEWAY PRIVATE DNS ZONE ID" - existing_private_dns_zone_synapse_studio_id = "SYNAPSE STUDIO PRIVATE DNS ZONE ID" - existing_private_dns_zone_synapse_sql_id = "SYNAPSE SQL PRIVATE DNS ZONE ID" - -} \ No newline at end of file diff --git a/solution/DeploymentV2/EnvironmentTemplate_local.hcl b/solution/DeploymentV2/EnvironmentTemplate_local.hcl deleted file mode 100644 index c827c8de..00000000 --- a/solution/DeploymentV2/EnvironmentTemplate_local.hcl +++ /dev/null @@ -1,31 +0,0 @@ -inputs = { - prefix = "{prefix}" # All azure resources will be prefixed with this - domain = "{domain}" # Used when configuring AAD config for Azure functions - tenant_id = "{tenant_id}" # This is the Azure AD tenant ID - subscription_id = "{subscription_id}" # The azure subscription id to deploy to - resource_location = "Australia East" # The location of the resources - resource_group_name = "{resource_group_name}" # The resource group all resources will be deployed to - owner_tag = "Contoso" # Owner tag value for Azure resources - environment_tag = "dev" # This is used on Azure tags as well as all resource names - ip_address = "{ip_address}" # This is the ip address of the agent/current IP. Used to create firewall exemptions. - deploy_web_app = {deploy_web_app} - deploy_function_app = {deploy_function_app} - deploy_custom_terraform = {deploy_custom_terraform} # This is whether the infrastructure located in the terraform_custom folder is deployed or not. - deploy_app_service_plan = {deploy_app_service_plan} - deploy_data_factory = {deploy_data_factory} - deploy_sentinel = {deploy_sentinel} - deploy_purview = {deploy_purview} - deploy_synapse = {deploy_synapse} - deploy_metadata_database = {deploy_metadata_database} - is_vnet_isolated = {is_vnet_isolated} - publish_web_app = {publish_web_app} - publish_function_app = {publish_function_app} - publish_sample_files = {publish_sample_files} - publish_metadata_database = {publish_metadata_database} - configure_networking = {configure_networking} - publish_datafactory_pipelines = {publish_datafactory_pipelines} - publish_web_app_addcurrentuserasadmin = {publish_web_app_addcurrentuserasadmin} - deploy_selfhostedsql = {deploy_selfhostedsql} - is_onprem_datafactory_ir_registered = {is_onprem_datafactory_ir_registered} - publish_sif_database = {publish_sif_database} -} diff --git a/solution/DeploymentV2/EnvironmentTemplate_staging.hcl b/solution/DeploymentV2/EnvironmentTemplate_staging.hcl deleted file mode 100644 index 34252ed4..00000000 --- a/solution/DeploymentV2/EnvironmentTemplate_staging.hcl +++ /dev/null @@ -1,50 +0,0 @@ -remote_state { - backend = "azurerm" - generate = { - path = "backend.tf" - if_exists = "overwrite_terragrunt" - } - config = { - # You need to update the resource group and storage account here. - # You should have created these with the Prepare.ps1 script. - resource_group_name = "{resource_group_name}" - storage_account_name = "{storage_account_name}" - container_name = "tstate" - key = "terraform.tfstate" - } -} - -# These inputs are provided to the terraform variables when deploying the environment -# If you are deploying using pipelines, these can be overridden from environment variables -# using TF_VAR_variablename -inputs = { - prefix = "{prefix}" # All azure resources will be prefixed with this - domain = "{domain}" # Used when configuring AAD config for Azure functions - tenant_id = "{tenant_id}" # This is the Azure AD tenant ID - subscription_id = "{subscription_id}" # The azure subscription id to deploy to - resource_location = "Australia East" # The location of the resources - resource_group_name = "{resource_group_name}" # The resource group all resources will be deployed to - owner_tag = "Contoso" # Owner tag value for Azure resources - environment_tag = "stg" # This is used on Azure tags as well as all resource names - ip_address = "{ip_address}" # This is the ip address of the agent/current IP. Used to create firewall exemptions. - deploy_web_app = {deploy_web_app} - deploy_function_app = {deploy_function_app} - deploy_custom_terraform = {deploy_custom_terraform} # This is whether the infrastructure located in the terraform_custom folder is deployed or not. - deploy_app_service_plan = {deploy_app_service_plan} - deploy_data_factory = {deploy_data_factory} - deploy_sentinel = {deploy_sentinel} - deploy_purview = {deploy_purview} - deploy_synapse = {deploy_synapse} - deploy_metadata_database = {deploy_metadata_database} - is_vnet_isolated = {is_vnet_isolated} - publish_web_app = {publish_web_app} - publish_function_app = {publish_function_app} - publish_sample_files = {publish_sample_files} - publish_metadata_database = {publish_metadata_database} - configure_networking = {configure_networking} - publish_datafactory_pipelines = {publish_datafactory_pipelines} - publish_web_app_addcurrentuserasadmin = {publish_web_app_addcurrentuserasadmin} - deploy_selfhostedsql = {deploy_selfhostedsql} - is_onprem_datafactory_ir_registered = {is_onprem_datafactory_ir_registered} - publish_sif_database = {publish_sif_database} -} \ No newline at end of file diff --git a/solution/DeploymentV2/GenerateAndUploadADFPipelines.ps1 b/solution/DeploymentV2/GenerateAndUploadADFPipelines.ps1 index b6427610..81ec3397 100644 --- a/solution/DeploymentV2/GenerateAndUploadADFPipelines.ps1 +++ b/solution/DeploymentV2/GenerateAndUploadADFPipelines.ps1 @@ -2,8 +2,8 @@ Set-Location $deploymentFolderPath Write-Host "Reading Terraform Outputs" Set-Location "./terraform_layer2" -Import-Module .\..\GatherOutputsFromTerraform.psm1 -force -$tout = GatherOutputsFromTerraform +Import-Module .\pwshmodules\GatherOutputsFromTerraform.psm1 -force +$tout = GatherOutputsFromTerraform -TerraformFolderPath './' Set-Location $deploymentFolderPath Write-Host "Starting Adf Patterns" -ForegroundColor Yellow diff --git a/solution/DeploymentV2/RemoteInstallIntegrationRuntime.ps1 b/solution/DeploymentV2/RemoteInstallIntegrationRuntime.ps1 index 31a156aa..de52d5c2 100644 --- a/solution/DeploymentV2/RemoteInstallIntegrationRuntime.ps1 +++ b/solution/DeploymentV2/RemoteInstallIntegrationRuntime.ps1 @@ -1,12 +1,12 @@ $deploymentFolderPath = (Get-Location).Path -Set-Location "./terraform" +Set-Location "./terraform_layer2" #------------------------------------------------------------------------------------------------------------ # Get all the outputs from terraform so we can use them in subsequent steps #------------------------------------------------------------------------------------------------------------ Write-Host "Reading Terraform Outputs" -Import-Module .\..\GatherOutputsFromTerraform.psm1 -force -$tout = GatherOutputsFromTerraform +Import-Module .\pwshmodules\GatherOutputsFromTerraform.psm1 -force +$tout = GatherOutputsFromTerraform -TerraformFolderPath './' $irKey1 = az datafactory integration-runtime list-auth-key --factory-name $tout.datafactory_name --name $tout.integration_runtimes[1].name --resource-group $tout.resource_group_name --query authKey1 --out tsv Write-Debug " irKey1 retrieved." diff --git a/solution/DeploymentV2/RemoteInstallSQLWithCDC.ps1 b/solution/DeploymentV2/RemoteInstallSQLWithCDC.ps1 index 7389dad5..d4b329b9 100644 --- a/solution/DeploymentV2/RemoteInstallSQLWithCDC.ps1 +++ b/solution/DeploymentV2/RemoteInstallSQLWithCDC.ps1 @@ -1,12 +1,12 @@ $deploymentFolderPath = (Get-Location).Path -Set-Location "./terraform" +Set-Location "./terraform_layer2" #------------------------------------------------------------------------------------------------------------ # Get all the outputs from terraform so we can use them in subsequent steps #------------------------------------------------------------------------------------------------------------ Write-Host "Reading Terraform Outputs" -Import-Module .\..\GatherOutputsFromTerraform.psm1 -force -$tout = GatherOutputsFromTerraform +Import-Module .\pwshmodules\GatherOutputsFromTerraform.psm1 -force +$tout = GatherOutputsFromTerraform -TerraformFolderPath './' $ScriptUri = "https://gist.githubusercontent.com/jrampono/91076c406345c1d2487a82b1f106dfaa/raw/AW_EnableCDC.ps1" diff --git a/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 b/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 index f6b734a6..bdd597ac 100644 --- a/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 +++ b/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 @@ -60,8 +60,12 @@ foreach($t in ($obj.ForEnvVar | Get-Member | Where-Object {$_.MemberType -eq "No if($Value.GetType().Name -eq "Boolean") { $Value = $Value.ToString().ToLower() - } - + } + if($Value.GetType().Name -eq "PSCustomObject") + { + $Value = ($Value | ConvertTo-Json -Depth 10) + } + if([string]::IsNullOrEmpty($Value) -eq $false -and $Value -ne '#####') { [Environment]::SetEnvironmentVariable($Name, $Value) diff --git a/solution/DeploymentV2/environments/vars/common_vars_schema.json b/solution/DeploymentV2/environments/vars/common_vars_schema.json new file mode 100644 index 00000000..5355ed38 --- /dev/null +++ b/solution/DeploymentV2/environments/vars/common_vars_schema.json @@ -0,0 +1,261 @@ +{ + "type": "object", + "required": [], + "properties": { + "owner_tag": { + "type": "string", + "default": "Contoso", + "description": "Owner tag to be applied to resources.", + "examples": [ + "Contoso" + ] + }, + "resource_location": { + "type": "string", + "default": "australiaeast", + "description": "Azure region to be used for deployment.", + "enum": [ + "australiaeast", + "brazilsouth", + "eastasia", + "eastus", + "eastus2", + "northeurope", + "southcentralus", + "southeastasia", + "westcentralus", + "westeurope", + "westus", + "westus2" + ] + }, + "environment_tag": { + "type": "string", + "default": "dev", + "description": "Environment tag to be used in resource naming convention. Keep to 3 letters to prevent names becoming too long.", + "examples": [ + "dev" + ] + }, + "domain": { + "type": "string", + "default": "mydomain.com", + "description": "Azure Active Directory Domain to be used for authentication", + "examples": [ + "mydomain.com" + ] + }, + "subscription_id": { + "type": "string", + "default": "035ass64-f00d-48e2-b582-4fe1ss905ee3", + "description": "Id of the subscription into which the analytics landing zone will be deployed", + "examples": [ + "035a1ss4-f00d-48e2-b582-4fe1ss905ee3" + ] + }, + "prefix": { + "type": "string", + "default": "ads", + "description": "Prefix to be applied to all resource names. Keep to 3 letters to prevent names becoming too long.", + "examples": [ + "ads" + ] + }, + "resource_group_name": { + "type": "string", + "default": "gf1", + "description": "Name of the resource group into which the analytics landing zone will be deployed", + "examples": [ + "gf1" + ] + }, + "ip_address": { + "type": "string" + }, + "ip_address2": { + "type": "string" + }, + "tenant_id": { + "type": "string" + }, + "deployment_principal_layers1and3": { + "type": "string" + }, + "resource_owners": { + "type": "array", + "items": { + "type": "string" + } + }, + "synapse_administrators": { + "type": "object", + "required": [], + "properties": {}, + "default": {}, + "description": "Object with a property for each user that is to be a Synapse Administrator. The property name is descriptive only and should be unique. The property value is the object_id of the user. Leave as empty object when not is use.", + "examples": [ + { + "user_1": "5c736d19-4076-4a76-87f3-6fbfd77f007d", + "user_2": "5c732d19-4076-4a76-87f3-6fbfd72f007d" + }] + }, + "azure_sql_aad_administrators": { + "type": "object", + "required": [], + "default": {"sql_aad_admin": ""}, + "description": "Object with a property for each user that is to be a Azure SQL Administrator. The property name should be the username of the user in AAD and should be unique. The property value is the object_id of the user. Leave as empty object when not is use. You MUST provide the sql_aad_admin property.", + "properties": { + "sql_aad_admin": { + "type": "string" + }, + "deploy_agent": { + "type": "string" + } + }, + "examples": [ + { + "sql_aad_admin": "5c736d19-4076-4a76-87f3-6fbfd77f007d", + "deploy_agent": "5c732d19-4076-4a76-87f3-6fbfd72f007d" + } + ] + }, + "synapse_publishers": { + "type": "object", + "required": [], + "properties": {}, + "default": {}, + "description": "Object with a property for each user that is to be a Synapse Publisher. The property name is descriptive only and should be unique. The property value is the object_id of the user. Leave as empty object when not is use.", + "examples": [ + { + "user_1": "5c736d19-4076-4a76-87f3-6fbfd77f007d", + "user_2": "5c732d19-4076-4a76-87f3-6fbfd72f007d" + } + ] + + }, + "synapse_contributors": { + "type": "object", + "required": [], + "properties": {}, + "default": {}, + "description": "Object with a property for each user that is to be a Synapse Contributor. The property name is descriptive only and should be unique. The property value is the object_id of the user. Leave as empty object when not is use.", + "examples": [ + { + "user_1": "5c736d19-4076-4a76-87f3-6fbfd77f007d", + "user_2": "5c732d19-4076-4a76-87f3-6fbfd72f007d" + }] + }, + "WEB_APP_ADMIN_USER": { + "type": "string", + "default": "#####", + "description": "Object_Id of the user that you would like to have direct, explicit admin access to framework web front end.Set to '#####' if not in use. This setting over-rides the WEB_APP_ADMIN_SECURITY_GROUP. Use in scenarios where use of a security group is not possible.", + "examples": [ + "5c732d19-4076-4a76-87f3-6fbfd72f007d" + ] + }, + "WEB_APP_ADMIN_SECURITY_GROUP": { + "type": "string", + "default": "#####", + "description": "Name of the security group whos memebers will be given admin access to framework web front end.Set to '#####' if not in use.", + "examples": [ + "mysecuritygroup" + ] + }, + "ARM_SYNAPSE_WORKSPACE_NAME": { + "type": "string", + "default": "adsstgsynwadslwra", + "description": "Post Layer 1 Resource name. Deployment of Terraform Layer One will give you these values. They are required to allow subsequent agent deployments to open firewall rules to allow deployment updates.", + "examples": [ + "adsstgsynwadslwra" + ] + }, + "ARM_KEYVAULT_NAME": { + "type": "string", + "default": "ads-stg-kv-ads-lwra", + "description": "Post Layer 1 Resource name. Deployment of Terraform Layer One will give you these values. They are required to allow subsequent agent deployments to open firewall rules to allow deployment updates..", + "examples": [ + "ads-stg-kv-ads-lwra" + ] + }, + "ARM_DATALAKE_NAME": { + "type": "string", + "default": "adsstgdlsadslwraadsl", + "description": "Post Layer 1 Resource name. Deployment of Terraform Layer One will give you these values. They are required to allow subsequent agent deployments to open firewall rules to allow deployment updates..", + "examples": [ + "adsstgdlsadslwraadsl" + ] + }, + "ARM_PAL_PARTNER_ID": { + "type": "string", + "default": "0", + "description": "ID of Implementation Partner for PAL purposes. Set to 0 if not in use", + "examples": [ + "0" + ] + }, + "GIT_REPOSITORY_NAME": { + "type": "string", + "default": "#####", + "description": "Git Repository name used for for publishing ADF and Synapse artefacts. Only used if synapse_git_toggle_integration or adf_git_toggle_integration is true in the hcl file. Set to '#####' if not using Git integration for Synapse and ADF.", + "examples": [ + "mytestrepo" + ] + }, + "GIT_SYNAPSE_REPOSITORY_BRANCH_NAME": { + "type": "string", + "default": "#####", + "description": "Git Branch Name associated with the branch that will be used to publish Synapse artefacts. Only used if synapse_git_toggle_integration is true in the hcl file. Set to '#####' if not using Git integration for Synapse and ADF.", + "examples": [ + "main" + ] + }, + "GIT_USER_NAME": { + "type": "string", + "default": "#####", + "description": "Usernames of git user used for publishing artefacts. Only used if synapse_git_toggle_integration or adf_git_toggle_integration is true in the hcl file. Set to '#####' if not using Git integration for Synapse and ADF.", + "examples": [ + "test user" + ] + }, + "GIT_EMAIL_ADDRESS": { + "type": "string", + "default": "#####", + "description": "Email address of git user used for publishing artefacts. Only used if synapse_git_toggle_integration or adf_git_toggle_integration is true in the hcl file. Set to '#####' if not using Git integration for Synapse and ADF.", + "examples": [ + "test@test.com" + ] + }, + "GIT_ADF_REPOSITORY_NAME": { + "type": "string", + "default": "#####", + "description": "Git Repository name used for for publishing ADF and Synapse artefacts. Only used if synapse_git_toggle_integration or adf_git_toggle_integration is true in the hcl file.Set to '#####' if not using Git integration for Synapse and ADF.", + "examples": [ + "mytestrepo" + ] + }, + "GIT_ADF_REPOSITORY_BRANCH_NAME": { + "type": "string", + "default": "#####", + "description": "Git Branch Name associated with the branch that will be used to publish Synapse artefacts. Only used if synapse_git_toggle_integration is true in the hcl file. Set to '#####' if not using Git integration for Synapse and ADF.", + "examples": [ + "main" + ] + }, + "GIT_ADF_USER_NAME": { + "type": "string", + "default": "#####", + "description": "Usernames of git user used for publishing artefacts. Only used if synapse_git_toggle_integration or adf_git_toggle_integration is true in the hcl file. Set to '#####' if not using Git integration for Synapse and ADF.", + "examples": [ + "test user" + ] + }, + "GIT_ADF_EMAIL_ADDRESS": { + "type": "string", + "default": "#####", + "description": "Email address of git user used for publishing artefacts. Only used if synapse_git_toggle_integration or adf_git_toggle_integration is true in the hcl file. Set to '#####' if not using Git integration for Synapse and ADF.", + "examples": [ + "test@test.com" + ] + } + } + } \ No newline at end of file diff --git a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet index 28d3924b..fbddfecb 100644 --- a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet +++ b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet @@ -192,6 +192,13 @@ local AllVariables = [ "Value": locals[environment].synapse_administrators, "Sensitive": false }, + { + "CICDSecretName": "", + "EnvVarName": "TF_VAR_azure_sql_aad_administrators", + "HCLName": "azure_sql_aad_administrators", + "Value": locals[environment].azure_sql_aad_administrators, + "Sensitive": false + }, { "CICDSecretName": "", "EnvVarName": "", @@ -361,6 +368,32 @@ local SecretFileSensitiveVars = { // Object comprehension. + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc index c1dc1939..b54ad09a 100644 --- a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc +++ b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc @@ -1,7 +1,8 @@ { + "$schema": "./../common_vars_schema.json", //Core "owner_tag": "Contoso", - "resource_location": "AustraliaEast", + "resource_location": "australiaeast", "environment_tag": "stg", "domain": "microsoft.com", "subscription_id": "035a1364-f00d-48e2-b582-4fe125905ee3", @@ -14,7 +15,7 @@ //Owners & User Access "deployment_principal_layers1and3": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", "resource_owners": [ - //Deployment Agent - Note that the first element here will become the SQL server AAD Admin + //Deployment Agent "4c732d19-4076-4a76-87f3-6fbfd77f007d", //Admin User - Jorampon "ccbdbba4-669c-48d6-86b8-75c9ab2ee578" @@ -24,6 +25,11 @@ //"deploy_user": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", "deploy_agent": "4c732d19-4076-4a76-87f3-6fbfd77f007d" }, + "azure_sql_aad_administrators": /*Note that you must designate a SQL AAD Admin*/{ + "sql_aad_admin": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", + "jorampon@microsoft.com": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", + "AdsGoFastDeployerjkcgkaibkungm": "4c732d19-4076-4a76-87f3-6fbfd77f007d" + }, "synapse_publishers": {}, "synapse_contributors": {}, "WEB_APP_ADMIN_USER": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", diff --git a/solution/DeploymentV2/pwshmodules/Deploy_0_Prep.psm1 b/solution/DeploymentV2/pwshmodules/Deploy_0_Prep.psm1 new file mode 100644 index 00000000..e51ec65f --- /dev/null +++ b/solution/DeploymentV2/pwshmodules/Deploy_0_Prep.psm1 @@ -0,0 +1,79 @@ +function PrepareDeployment ( + [Parameter(Mandatory=$true)] + [System.Boolean]$gitDeploy=$false, + [Parameter(Mandatory=$true)] + [String]$deploymentFolderPath, + [Parameter(Mandatory=$true)] + [String]$FeatureTemplate +) +{ + Set-Location $deploymentFolderPath + + #Check for SQLServer Module + $SqlInstalled = false + try { + $SqlInstalled = Get-InstalledModule SqlServer + } + catch { "SqlServer PowerShell module not installed." } + + if($null -eq $SqlInstalled) + { + write-host "Installing SqlServer Module" + Install-Module -Name SqlServer -Scope CurrentUser -Force + } + + #needed for git integration + az extension add --upgrade --name datafactory + + #accept custom image terms + #https://docs.microsoft.com/en-us/cli/azure/vm/image/terms?view=azure-cli-latest + + #az vm image terms accept --urn h2o-ai:h2o-driverles-ai:h2o-dai-lts:latest + + + + if ($gitDeploy) + { + $resourceGroupName = [System.Environment]::GetEnvironmentVariable('ARM_RESOURCE_GROUP_NAME') + $synapseWorkspaceName = [System.Environment]::GetEnvironmentVariable('ARM_RESOURCE_SYNAPSE_WORKSPACE_NAME') + $env:TF_VAR_ip_address = (Invoke-WebRequest ifconfig.me/ip).Content + } + else + { + + #Only Prompt if Environment Variable has not been set + if ($null -eq [System.Environment]::GetEnvironmentVariable('environmentName')) + { + $envlist = (Get-ChildItem -Directory -Path ./environments/vars | Select-Object -Property Name).Name + Import-Module ./pwshmodules/GetSelectionFromUser.psm1 -Force + $environmentName = Get-SelectionFromUser -Options ($envlist) -Prompt "Select deployment environment" + [System.Environment]::SetEnvironmentVariable('environmentName', $environmentName) + } + + $env:TF_VAR_ip_address2 = (Invoke-WebRequest ifconfig.me/ip).Content + + } + + + + $environmentName = [System.Environment]::GetEnvironmentVariable('environmentName') + + if ($environmentName -eq "Quit" -or [string]::IsNullOrEmpty($environmentName)) + { + write-host "environmentName is currently: $environmentName" + Write-Error "Environment is not set" + Exit + } + + + #Re-process Environment Config Files. + Set-Location ./environments/vars/ + ./PreprocessEnvironment.ps1 -Environment $environmentName -FeatureTemplate $FeatureTemplate -gitDeploy $gitDeploy + Set-Location $deploymentFolderPath + + [System.Environment]::SetEnvironmentVariable('TFenvironmentName',$environmentName) + + + + +} \ No newline at end of file diff --git a/solution/DeploymentV2/GatherOutputsFromTerraform.psm1 b/solution/DeploymentV2/pwshmodules/GatherOutputsFromTerraform.psm1 similarity index 89% rename from solution/DeploymentV2/GatherOutputsFromTerraform.psm1 rename to solution/DeploymentV2/pwshmodules/GatherOutputsFromTerraform.psm1 index 36517695..6063517d 100644 --- a/solution/DeploymentV2/GatherOutputsFromTerraform.psm1 +++ b/solution/DeploymentV2/pwshmodules/GatherOutputsFromTerraform.psm1 @@ -1,5 +1,8 @@ -function GatherOutputsFromTerraform() +function GatherOutputsFromTerraform($TerraformFolderPath) { + + $currentPath = (Get-Location).Path + Set-Location $TerraformFolderPath $environmentName = $env:TFenvironmentName #$environmentName = "local" # currently supports (local, staging) $myIp = (Invoke-WebRequest ifconfig.me/ip).Content @@ -26,5 +29,6 @@ function GatherOutputsFromTerraform() #Set-Location $CurrentFolderPath Write-Host "Reading Terraform Outputs - Finished" Write-Host "-------------------------------------------------------------------------------------------------" + Set-Location $currentPath return $tout } \ No newline at end of file diff --git a/solution/DeploymentV2/pwshmodules/Test.psm1 b/solution/DeploymentV2/pwshmodules/Test.psm1 new file mode 100644 index 00000000..d3d8c21b --- /dev/null +++ b/solution/DeploymentV2/pwshmodules/Test.psm1 @@ -0,0 +1,4 @@ +function Test { + +return gci +} \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer2/.terraform.lock.hcl b/solution/DeploymentV2/terraform_layer2/.terraform.lock.hcl index 252e7b4d..a120aa10 100644 --- a/solution/DeploymentV2/terraform_layer2/.terraform.lock.hcl +++ b/solution/DeploymentV2/terraform_layer2/.terraform.lock.hcl @@ -41,6 +41,25 @@ provider "registry.terraform.io/hashicorp/azurerm" { ] } +provider "registry.terraform.io/hashicorp/null" { + version = "3.1.1" + hashes = [ + "h1:71sNUDvmiJcijsvfXpiLCz0lXIBSsEJjMxljt7hxMhw=", + "zh:063466f41f1d9fd0dd93722840c1314f046d8760b1812fa67c34de0afcba5597", + "zh:08c058e367de6debdad35fc24d97131c7cf75103baec8279aba3506a08b53faf", + "zh:73ce6dff935150d6ddc6ac4a10071e02647d10175c173cfe5dca81f3d13d8afe", + "zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3", + "zh:8fdd792a626413502e68c195f2097352bdc6a0df694f7df350ed784741eb587e", + "zh:976bbaf268cb497400fd5b3c774d218f3933271864345f18deebe4dcbfcd6afa", + "zh:b21b78ca581f98f4cdb7a366b03ae9db23a73dfa7df12c533d7c19b68e9e72e5", + "zh:b7fc0c1615dbdb1d6fd4abb9c7dc7da286631f7ca2299fb9cd4664258ccfbff4", + "zh:d1efc942b2c44345e0c29bc976594cb7278c38cfb8897b344669eafbc3cddf46", + "zh:e356c245b3cd9d4789bab010893566acace682d7db877e52d40fc4ca34a50924", + "zh:ea98802ba92fcfa8cf12cbce2e9e7ebe999afbf8ed47fa45fc847a098d89468b", + "zh:eff8872458806499889f6927b5d954560f3d74bf20b6043409edf94d26cd906f", + ] +} + provider "registry.terraform.io/hashicorp/random" { version = "3.3.0" constraints = ">= 2.2.0, 3.3.0" diff --git a/solution/DeploymentV2/terraform_layer2/database.ps1 b/solution/DeploymentV2/terraform_layer2/database.ps1 new file mode 100644 index 00000000..14c8c5e8 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/database.ps1 @@ -0,0 +1,28 @@ +param ( + [Parameter(Mandatory=$true)] + [string]$user=$false, + [Parameter(Mandatory=$true)] + [string]$sqlserver_name=$false, + [Parameter(Mandatory=$true)] + [string]$database=$false +) + +$token=$(az account get-access-token --resource=https://database.windows.net --query accessToken --output tsv) + +$sqlcommand = " + +IF NOT EXISTS (SELECT * +FROM [sys].[database_principals] +WHERE [type] = N'E' AND [name] = N'$user') +BEGIN + CREATE USER [$user] FROM EXTERNAL PROVIDER; +END +ALTER ROLE db_datareader ADD MEMBER [$user]; +ALTER ROLE db_datawriter ADD MEMBER [$user]; +GRANT EXECUTE ON SCHEMA::[dbo] TO [$user]; +GO + +" + +write-host "Granting MSI Privileges on $database DB to $user" +Invoke-Sqlcmd -ServerInstance "$sqlserver_name.database.windows.net,1433" -Database $database -AccessToken $token -query $sqlcommand \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer2/database.tf b/solution/DeploymentV2/terraform_layer2/database.tf index 2e6d7dc4..4c38c464 100644 --- a/solution/DeploymentV2/terraform_layer2/database.tf +++ b/solution/DeploymentV2/terraform_layer2/database.tf @@ -26,7 +26,7 @@ resource "azurerm_mssql_server" "sqlserver" { azuread_administrator { login_username = "sqladmin" - object_id = var.resource_owners[0] + object_id = var.azure_sql_aad_administrators["sql_aad_admin"] } identity { type = "SystemAssigned" @@ -109,3 +109,12 @@ resource "azurerm_private_endpoint" "db_private_endpoint_with_dns" { ] } } + +/* resource "null_resource" "metadatadb_admins" { + for_each = (var.azure_sql_aad_administrators) + provisioner "local-exec" { + working_dir = path.module + command = "pwsh -file database.ps1 -user ${each.key} -sqlserver_name ${local.sql_server_name} -database ${local.metadata_database_name}" + } + +} */ diff --git a/solution/DeploymentV2/terraform_layer2/outputs.tf b/solution/DeploymentV2/terraform_layer2/outputs.tf index 8d478033..5115e679 100644 --- a/solution/DeploymentV2/terraform_layer2/outputs.tf +++ b/solution/DeploymentV2/terraform_layer2/outputs.tf @@ -141,6 +141,9 @@ output "publish_datafactory_pipelines" { output "publish_web_app_addcurrentuserasadmin" { value = var.publish_web_app_addcurrentuserasadmin } +output "azure_sql_aad_administrators" { + value = var.azure_sql_aad_administrators +} output "synapse_workspace_name" { value = var.deploy_synapse ? azurerm_synapse_workspace.synapse[0].name : "" } diff --git a/solution/DeploymentV2/terraform_layer2/vars.tf b/solution/DeploymentV2/terraform_layer2/vars.tf index 7e424492..a6d80b39 100644 --- a/solution/DeploymentV2/terraform_layer2/vars.tf +++ b/solution/DeploymentV2/terraform_layer2/vars.tf @@ -844,6 +844,12 @@ variable "deployment_principal_layers1and3" { type = string } +variable "azure_sql_aad_administrators" { + description = "List of Azure SQL Administrators" + type = map(string) + default = {} +} + variable "synapse_administrators" { description = "List of Synapse Administrators" type = map(string) diff --git a/solution/DeploymentV2/terraform_layer2/vars/local/terragrunt.hcl b/solution/DeploymentV2/terraform_layer2/vars/local/terragrunt.hcl index cb5d834c..61c047e2 100644 --- a/solution/DeploymentV2/terraform_layer2/vars/local/terragrunt.hcl +++ b/solution/DeploymentV2/terraform_layer2/vars/local/terragrunt.hcl @@ -51,6 +51,7 @@ inputs = { owner_tag = "${local.common_vars.owner_tag}" # Owner tag value for Azure resources environment_tag = "${local.common_vars.environment_tag}" # This is used on Azure tags as well as all resource names ip_address = "${local.common_vars.ip_address}" # This is the ip address of the agent/current IP. Used to create firewall exemptions. + azure_sql_aad_administrators = "${local.common_vars.azure_sql_aad_administrators}" synapse_administrators = "${local.common_vars.synapse_administrators}" resource_owners = "${local.common_vars.resource_owners}" deploy_web_app = true diff --git a/solution/DeploymentV2/terraform_layer2/vars/production/terragrunt.hcl b/solution/DeploymentV2/terraform_layer2/vars/production/terragrunt.hcl index 532e8e17..06bfa6ee 100644 --- a/solution/DeploymentV2/terraform_layer2/vars/production/terragrunt.hcl +++ b/solution/DeploymentV2/terraform_layer2/vars/production/terragrunt.hcl @@ -50,7 +50,8 @@ inputs = { resource_group_name = "${local.common_vars.resource_group_name}" # The resource group all resources will be deployed to owner_tag = "${local.common_vars.owner_tag}" # Owner tag value for Azure resources environment_tag = "${local.common_vars.environment_tag}" # This is used on Azure tags as well as all resource names - ip_address = "${local.common_vars.ip_address}" # This is the ip address of the agent/current IP. Used to create firewall exemptions. + ip_address = "${local.common_vars.ip_address}" # This is the ip address of the agent/current IP. Used to create firewall exemptions + azure_sql_aad_administrators = "${local.common_vars.azure_sql_aad_administrators}" synapse_administrators = "${local.common_vars.synapse_administrators}" resource_owners = "${local.common_vars.resource_owners}" deploy_web_app = true diff --git a/solution/DeploymentV2/terraform_layer2/vars/staging/terragrunt.hcl b/solution/DeploymentV2/terraform_layer2/vars/staging/terragrunt.hcl index 5fc9eb64..049ed36e 100644 --- a/solution/DeploymentV2/terraform_layer2/vars/staging/terragrunt.hcl +++ b/solution/DeploymentV2/terraform_layer2/vars/staging/terragrunt.hcl @@ -51,8 +51,9 @@ inputs = { owner_tag = "${local.common_vars.owner_tag}" # Owner tag value for Azure resources environment_tag = "${local.common_vars.environment_tag}" # This is used on Azure tags as well as all resource names ip_address = "${local.common_vars.ip_address}" # This is the ip address of the agent/current IP. Used to create firewall exemptions. + azure_sql_aad_administrators = "${local.common_vars.azure_sql_aad_administrators}" synapse_administrators = "${local.common_vars.synapse_administrators}" - resource_owners = "${local.common_vars.resource_owners}" + resource_owners = "${local.common_vars.resource_owners}" deploy_web_app = true deploy_function_app = true deploy_custom_terraform = false # This is whether the infrastructure located in the terraform_custom folder is deployed or not. From e87587ceafe0805801d16542c6d686adbc16ddc1 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Fri, 29 Jul 2022 15:28:51 +0800 Subject: [PATCH 063/151] Fixed Path issue with Import-Module --- solution/DeploymentV2/Cleanup_RemoveAll.ps1 | 4 ++- .../GenerateAndUploadADFPipelines.ps1 | 3 +- .../RemoteInstallIntegrationRuntime.ps1 | 3 +- .../DeploymentV2/RemoteInstallSQLWithCDC.ps1 | 3 +- .../terraform_layer3/database.ps1 | 33 +++++++++++++++++++ .../DeploymentV2/terraform_layer3/database.tf | 8 +++++ 6 files changed, 50 insertions(+), 4 deletions(-) create mode 100644 solution/DeploymentV2/terraform_layer3/database.ps1 create mode 100644 solution/DeploymentV2/terraform_layer3/database.tf diff --git a/solution/DeploymentV2/Cleanup_RemoveAll.ps1 b/solution/DeploymentV2/Cleanup_RemoveAll.ps1 index 3df8decf..7f6a3f46 100644 --- a/solution/DeploymentV2/Cleanup_RemoveAll.ps1 +++ b/solution/DeploymentV2/Cleanup_RemoveAll.ps1 @@ -4,6 +4,8 @@ #---------------------------------------------------------------------------------------------------------------- Import-Module .\pwshmodules\GetSelectionFromUser.psm1 -force +Import-Module .\pwshmodules\GatherOutputsFromTerraform.psm1 -force + $environmentName = Get-SelectionFromUser -Options ('local','staging') -Prompt "Select deployment environment" if ($environmentName -eq "Quit") { @@ -17,7 +19,7 @@ Set-Location ".\terraform" # Get all the outputs from terraform so we can use them in subsequent steps #------------------------------------------------------------------------------------------------------------ Write-Host "Reading Terraform Outputs" -Import-Module .\pwshmodules\GatherOutputsFromTerraform.psm1 -force + $tout = GatherOutputsFromTerraform -TerraformFolderPath ./terraform_layer2 #Delete Resource Group diff --git a/solution/DeploymentV2/GenerateAndUploadADFPipelines.ps1 b/solution/DeploymentV2/GenerateAndUploadADFPipelines.ps1 index 81ec3397..fd3c200d 100644 --- a/solution/DeploymentV2/GenerateAndUploadADFPipelines.ps1 +++ b/solution/DeploymentV2/GenerateAndUploadADFPipelines.ps1 @@ -1,8 +1,9 @@ Set-Location $deploymentFolderPath +Import-Module .\pwshmodules\GatherOutputsFromTerraform.psm1 -force Write-Host "Reading Terraform Outputs" Set-Location "./terraform_layer2" -Import-Module .\pwshmodules\GatherOutputsFromTerraform.psm1 -force + $tout = GatherOutputsFromTerraform -TerraformFolderPath './' Set-Location $deploymentFolderPath diff --git a/solution/DeploymentV2/RemoteInstallIntegrationRuntime.ps1 b/solution/DeploymentV2/RemoteInstallIntegrationRuntime.ps1 index de52d5c2..1324a8d6 100644 --- a/solution/DeploymentV2/RemoteInstallIntegrationRuntime.ps1 +++ b/solution/DeploymentV2/RemoteInstallIntegrationRuntime.ps1 @@ -1,11 +1,12 @@ $deploymentFolderPath = (Get-Location).Path +Import-Module .\pwshmodules\GatherOutputsFromTerraform.psm1 -force Set-Location "./terraform_layer2" #------------------------------------------------------------------------------------------------------------ # Get all the outputs from terraform so we can use them in subsequent steps #------------------------------------------------------------------------------------------------------------ Write-Host "Reading Terraform Outputs" -Import-Module .\pwshmodules\GatherOutputsFromTerraform.psm1 -force + $tout = GatherOutputsFromTerraform -TerraformFolderPath './' $irKey1 = az datafactory integration-runtime list-auth-key --factory-name $tout.datafactory_name --name $tout.integration_runtimes[1].name --resource-group $tout.resource_group_name --query authKey1 --out tsv diff --git a/solution/DeploymentV2/RemoteInstallSQLWithCDC.ps1 b/solution/DeploymentV2/RemoteInstallSQLWithCDC.ps1 index d4b329b9..d77a2298 100644 --- a/solution/DeploymentV2/RemoteInstallSQLWithCDC.ps1 +++ b/solution/DeploymentV2/RemoteInstallSQLWithCDC.ps1 @@ -1,11 +1,12 @@ $deploymentFolderPath = (Get-Location).Path +Import-Module .\pwshmodules\GatherOutputsFromTerraform.psm1 -force Set-Location "./terraform_layer2" #------------------------------------------------------------------------------------------------------------ # Get all the outputs from terraform so we can use them in subsequent steps #------------------------------------------------------------------------------------------------------------ Write-Host "Reading Terraform Outputs" -Import-Module .\pwshmodules\GatherOutputsFromTerraform.psm1 -force + $tout = GatherOutputsFromTerraform -TerraformFolderPath './' $ScriptUri = "https://gist.githubusercontent.com/jrampono/91076c406345c1d2487a82b1f106dfaa/raw/AW_EnableCDC.ps1" diff --git a/solution/DeploymentV2/terraform_layer3/database.ps1 b/solution/DeploymentV2/terraform_layer3/database.ps1 new file mode 100644 index 00000000..bae09b7f --- /dev/null +++ b/solution/DeploymentV2/terraform_layer3/database.ps1 @@ -0,0 +1,33 @@ +param ( + [Parameter(Mandatory=$true)] + [string]$user=$false, + [Parameter(Mandatory=$true)] + [string]$sqlserver_name=$false, + [Parameter(Mandatory=$true)] + [string]$database=$false +) + +$token=$(az account get-access-token --resource=https://database.windows.net --query accessToken --output tsv) + +$sqlcommand = " +IF '$user' = 'sql_aad_admin' +BEGIN + Exit +END + + +IF NOT EXISTS (SELECT * +FROM [sys].[database_principals] +WHERE [type] = N'E' AND [name] = N'$user') +BEGIN + CREATE USER [$user] FROM EXTERNAL PROVIDER; +END +ALTER ROLE db_datareader ADD MEMBER [$user]; +ALTER ROLE db_datawriter ADD MEMBER [$user]; +GRANT EXECUTE ON SCHEMA::[dbo] TO [$user]; +GO + +" + +write-host "Granting MSI Privileges on $database DB to $user" +Invoke-Sqlcmd -ServerInstance "$sqlserver_name.database.windows.net,1433" -Database $database -AccessToken $token -query $sqlcommand \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer3/database.tf b/solution/DeploymentV2/terraform_layer3/database.tf new file mode 100644 index 00000000..fcf2a96f --- /dev/null +++ b/solution/DeploymentV2/terraform_layer3/database.tf @@ -0,0 +1,8 @@ +resource "null_resource" "metadatadb_admins" { + for_each = (var.azure_sql_aad_administrators) + provisioner "local-exec" { + working_dir = path.module + command = "pwsh -file database.ps1 -user ${each.key} -sqlserver_name ${local.sql_server_name} -database ${local.metadata_database_name}" + } + +} From e8f7ec7520205c2b9ed5d40570a5bfa0e0e56732 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Fri, 29 Jul 2022 17:00:29 +0800 Subject: [PATCH 064/151] Added SQL User Permission Scripts to Layer3 Terraform --- .../DeploymentV2/Deploy_11_AdAppRoles.ps1 | 43 --------- .../vars/common_vars_template.jsonnet | 18 ++++ .../terraform_layer2/.terraform.lock.hcl | 19 ---- .../terraform_layer3/.terraform.lock.hcl | 19 ++++ .../terraform_layer3/app_service.ps1 | 22 +++++ .../terraform_layer3/app_service.tf | 9 ++ .../terraform_layer3/database.ps1 | 12 +-- .../DeploymentV2/terraform_layer3/database.tf | 87 ++++++++++++++++++- .../DeploymentV2/terraform_layer3/synapse.tf | 9 ++ .../DeploymentV2/terraform_layer3/vars.tf | 17 ++++ 10 files changed, 187 insertions(+), 68 deletions(-) delete mode 100644 solution/DeploymentV2/Deploy_11_AdAppRoles.ps1 create mode 100644 solution/DeploymentV2/terraform_layer3/app_service.ps1 create mode 100644 solution/DeploymentV2/terraform_layer3/app_service.tf create mode 100644 solution/DeploymentV2/terraform_layer3/synapse.tf diff --git a/solution/DeploymentV2/Deploy_11_AdAppRoles.ps1 b/solution/DeploymentV2/Deploy_11_AdAppRoles.ps1 deleted file mode 100644 index 751f0fcd..00000000 --- a/solution/DeploymentV2/Deploy_11_AdAppRoles.ps1 +++ /dev/null @@ -1,43 +0,0 @@ - - - -#---------------------------------------------------------------------------------------------------------------- -# Web App Admin User -#---------------------------------------------------------------------------------------------------------------- - -#---------------------------------------------------------------------------------------------------------------- -if ($gitDeploy -or $null -eq (az ad signed-in-user show) ) -{ - if ($null -ne [System.Environment]::GetEnvironmentVariable('WEB_APP_ADMIN_USER') -and [System.Environment]::GetEnvironmentVariable('WEB_APP_ADMIN_USER') -ne "") { - write-host "Adding Admin Role To WebApp for specific user" - $authapp = (az ad app show --id $tout.aad_webreg_id) | ConvertFrom-Json - $authappid = $authapp.appId - $authappobjectid = (az ad sp show --id $authapp.appId | ConvertFrom-Json).id - $body = '{"principalId": "@principalid","resourceId":"@resourceId","appRoleId": "@appRoleId"}' | ConvertFrom-Json - $body.resourceId = $authappobjectid - $body.appRoleId = ($authapp.appRoles | Where-Object {$_.value -eq "Administrator" }).id - $body.principalId = [System.Environment]::GetEnvironmentVariable('WEB_APP_ADMIN_USER') - $body = ($body | ConvertTo-Json -compress | Out-String).Replace('"','\"') - - $result = az rest --method post --uri "https://graph.microsoft.com/v1.0/servicePrincipals/$authappobjectid/appRoleAssignedTo" --headers '{\"Content-Type\":\"application/json\"}' --body $body - } -} -else -{ - if ($AddCurrentUserAsWebAppAdmin) { - write-host "Adding Admin Role To WebApp" - $authapp = (az ad app show --id $tout.aad_webreg_id) | ConvertFrom-Json - $cu = az ad signed-in-user show | ConvertFrom-Json - $callinguser = $cu.id - $authappid = $authapp.appId - $authappobjectid = (az ad sp show --id $authapp.appId | ConvertFrom-Json).id - - $body = '{"principalId": "@principalid","resourceId":"@resourceId","appRoleId": "@appRoleId"}' | ConvertFrom-Json - $body.resourceId = $authappobjectid - $body.appRoleId = ($authapp.appRoles | Where-Object {$_.value -eq "Administrator" }).id - $body.principalId = $callinguser - $body = ($body | ConvertTo-Json -compress | Out-String).Replace('"','\"') - - $result = az rest --method post --uri "https://graph.microsoft.com/v1.0/servicePrincipals/$authappobjectid/appRoleAssignedTo" --headers '{\"Content-Type\":\"application/json\"}' --body $body - } -} \ No newline at end of file diff --git a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet index fbddfecb..2adc8985 100644 --- a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet +++ b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet @@ -385,6 +385,24 @@ local SecretFileSensitiveVars = { // Object comprehension. + + + + + + + + + + + + + + + + + + diff --git a/solution/DeploymentV2/terraform_layer2/.terraform.lock.hcl b/solution/DeploymentV2/terraform_layer2/.terraform.lock.hcl index a120aa10..252e7b4d 100644 --- a/solution/DeploymentV2/terraform_layer2/.terraform.lock.hcl +++ b/solution/DeploymentV2/terraform_layer2/.terraform.lock.hcl @@ -41,25 +41,6 @@ provider "registry.terraform.io/hashicorp/azurerm" { ] } -provider "registry.terraform.io/hashicorp/null" { - version = "3.1.1" - hashes = [ - "h1:71sNUDvmiJcijsvfXpiLCz0lXIBSsEJjMxljt7hxMhw=", - "zh:063466f41f1d9fd0dd93722840c1314f046d8760b1812fa67c34de0afcba5597", - "zh:08c058e367de6debdad35fc24d97131c7cf75103baec8279aba3506a08b53faf", - "zh:73ce6dff935150d6ddc6ac4a10071e02647d10175c173cfe5dca81f3d13d8afe", - "zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3", - "zh:8fdd792a626413502e68c195f2097352bdc6a0df694f7df350ed784741eb587e", - "zh:976bbaf268cb497400fd5b3c774d218f3933271864345f18deebe4dcbfcd6afa", - "zh:b21b78ca581f98f4cdb7a366b03ae9db23a73dfa7df12c533d7c19b68e9e72e5", - "zh:b7fc0c1615dbdb1d6fd4abb9c7dc7da286631f7ca2299fb9cd4664258ccfbff4", - "zh:d1efc942b2c44345e0c29bc976594cb7278c38cfb8897b344669eafbc3cddf46", - "zh:e356c245b3cd9d4789bab010893566acace682d7db877e52d40fc4ca34a50924", - "zh:ea98802ba92fcfa8cf12cbce2e9e7ebe999afbf8ed47fa45fc847a098d89468b", - "zh:eff8872458806499889f6927b5d954560f3d74bf20b6043409edf94d26cd906f", - ] -} - provider "registry.terraform.io/hashicorp/random" { version = "3.3.0" constraints = ">= 2.2.0, 3.3.0" diff --git a/solution/DeploymentV2/terraform_layer3/.terraform.lock.hcl b/solution/DeploymentV2/terraform_layer3/.terraform.lock.hcl index 8ca5626b..6d027bc6 100644 --- a/solution/DeploymentV2/terraform_layer3/.terraform.lock.hcl +++ b/solution/DeploymentV2/terraform_layer3/.terraform.lock.hcl @@ -41,6 +41,25 @@ provider "registry.terraform.io/hashicorp/azurerm" { ] } +provider "registry.terraform.io/hashicorp/null" { + version = "3.1.1" + hashes = [ + "h1:71sNUDvmiJcijsvfXpiLCz0lXIBSsEJjMxljt7hxMhw=", + "zh:063466f41f1d9fd0dd93722840c1314f046d8760b1812fa67c34de0afcba5597", + "zh:08c058e367de6debdad35fc24d97131c7cf75103baec8279aba3506a08b53faf", + "zh:73ce6dff935150d6ddc6ac4a10071e02647d10175c173cfe5dca81f3d13d8afe", + "zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3", + "zh:8fdd792a626413502e68c195f2097352bdc6a0df694f7df350ed784741eb587e", + "zh:976bbaf268cb497400fd5b3c774d218f3933271864345f18deebe4dcbfcd6afa", + "zh:b21b78ca581f98f4cdb7a366b03ae9db23a73dfa7df12c533d7c19b68e9e72e5", + "zh:b7fc0c1615dbdb1d6fd4abb9c7dc7da286631f7ca2299fb9cd4664258ccfbff4", + "zh:d1efc942b2c44345e0c29bc976594cb7278c38cfb8897b344669eafbc3cddf46", + "zh:e356c245b3cd9d4789bab010893566acace682d7db877e52d40fc4ca34a50924", + "zh:ea98802ba92fcfa8cf12cbce2e9e7ebe999afbf8ed47fa45fc847a098d89468b", + "zh:eff8872458806499889f6927b5d954560f3d74bf20b6043409edf94d26cd906f", + ] +} + provider "registry.terraform.io/hashicorp/random" { version = "3.3.0" constraints = ">= 2.2.0, 3.3.0" diff --git a/solution/DeploymentV2/terraform_layer3/app_service.ps1 b/solution/DeploymentV2/terraform_layer3/app_service.ps1 new file mode 100644 index 00000000..08ba2096 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer3/app_service.ps1 @@ -0,0 +1,22 @@ +#---------------------------------------------------------------------------------------------------------------- +# Web App Admin User +#---------------------------------------------------------------------------------------------------------------- +param ( + [Parameter(Mandatory=$true)] + [string]$aad_webreg_id="" +) +#---------------------------------------------------------------------------------------------------------------- + +write-host "Adding Admin Role To WebApp for specific user" +$authapp = (az ad app show --id $aad_webreg_id) | ConvertFrom-Json +$authappid = $authapp.appId +$authappobjectid = (az ad sp show --id $authapp.appId | ConvertFrom-Json).id +$body = '{"principalId": "@principalid","resourceId":"@resourceId","appRoleId": "@appRoleId"}' | ConvertFrom-Json +$body.resourceId = $authappobjectid +$body.appRoleId = ($authapp.appRoles | Where-Object {$_.value -eq "Administrator" }).id +$body.principalId = [System.Environment]::GetEnvironmentVariable('WEB_APP_ADMIN_USER') +$body = ($body | ConvertTo-Json -compress | Out-String).Replace('"','\"') + +$result = az rest --method post --uri "https://graph.microsoft.com/v1.0/servicePrincipals/$authappobjectid/appRoleAssignedTo" --headers '{\"Content-Type\":\"application/json\"}' --body $body + + diff --git a/solution/DeploymentV2/terraform_layer3/app_service.tf b/solution/DeploymentV2/terraform_layer3/app_service.tf new file mode 100644 index 00000000..538af90b --- /dev/null +++ b/solution/DeploymentV2/terraform_layer3/app_service.tf @@ -0,0 +1,9 @@ + +resource "null_resource" "webapp_admins" { + #for_each = (var.azure_sql_aad_administrators) + provisioner "local-exec" { + working_dir = path.module + command = "pwsh -file app_service.ps1 -aad_webreg_id ${data.terraform_remote_state.layer2.outputs.aad_webreg_id}" + } + +} diff --git a/solution/DeploymentV2/terraform_layer3/database.ps1 b/solution/DeploymentV2/terraform_layer3/database.ps1 index bae09b7f..bcc3bd75 100644 --- a/solution/DeploymentV2/terraform_layer3/database.ps1 +++ b/solution/DeploymentV2/terraform_layer3/database.ps1 @@ -1,10 +1,10 @@ param ( [Parameter(Mandatory=$true)] - [string]$user=$false, + [string]$user="", [Parameter(Mandatory=$true)] - [string]$sqlserver_name=$false, + [string]$sqlserver_name="", [Parameter(Mandatory=$true)] - [string]$database=$false + [string]$database="" ) $token=$(az account get-access-token --resource=https://database.windows.net --query accessToken --output tsv) @@ -12,7 +12,7 @@ $token=$(az account get-access-token --resource=https://database.windows.net --q $sqlcommand = " IF '$user' = 'sql_aad_admin' BEGIN - Exit + GOTO ExitLabel END @@ -25,8 +25,10 @@ END ALTER ROLE db_datareader ADD MEMBER [$user]; ALTER ROLE db_datawriter ADD MEMBER [$user]; GRANT EXECUTE ON SCHEMA::[dbo] TO [$user]; + + +ExitLabel: GO - " write-host "Granting MSI Privileges on $database DB to $user" diff --git a/solution/DeploymentV2/terraform_layer3/database.tf b/solution/DeploymentV2/terraform_layer3/database.tf index fcf2a96f..3d31eb19 100644 --- a/solution/DeploymentV2/terraform_layer3/database.tf +++ b/solution/DeploymentV2/terraform_layer3/database.tf @@ -2,7 +2,92 @@ resource "null_resource" "metadatadb_admins" { for_each = (var.azure_sql_aad_administrators) provisioner "local-exec" { working_dir = path.module - command = "pwsh -file database.ps1 -user ${each.key} -sqlserver_name ${local.sql_server_name} -database ${local.metadata_database_name}" + command = "pwsh -file database.ps1 -user ${each.key} -sqlserver_name ${data.terraform_remote_state.layer2.outputs.sqlserver_name} -database ${data.terraform_remote_state.layer2.outputs.metadatadb_name}" } } + +resource "null_resource" "stagingdb_admins" { + for_each = (var.azure_sql_aad_administrators) + provisioner "local-exec" { + working_dir = path.module + command = "pwsh -file database.ps1 -user ${each.key} -sqlserver_name ${data.terraform_remote_state.layer2.outputs.sqlserver_name} -database ${data.terraform_remote_state.layer2.outputs.stagingdb_name}" + } + +} + +resource "null_resource" "sampledb_admins" { + for_each = (var.azure_sql_aad_administrators) + provisioner "local-exec" { + working_dir = path.module + command = "pwsh -file database.ps1 -user ${each.key} -sqlserver_name ${data.terraform_remote_state.layer2.outputs.sqlserver_name} -database ${data.terraform_remote_state.layer2.outputs.sampledb_name}" + } + +} + +//Puview +resource "null_resource" "purview_access_sampledb" { + count = var.deploy_purview ? 1 : 0 + provisioner "local-exec" { + working_dir = path.module + command = "pwsh -file database.ps1 -user ${data.terraform_remote_state.layer2.outputs.purview_name} -sqlserver_name ${data.terraform_remote_state.layer2.outputs.sqlserver_name} -database ${data.terraform_remote_state.layer2.outputs.sampledb_name}" + } + +} + +resource "null_resource" "purview_access_stagingdb" { + count = var.deploy_purview ? 1 : 0 + provisioner "local-exec" { + working_dir = path.module + command = "pwsh -file database.ps1 -user ${data.terraform_remote_state.layer2.outputs.purview_name} -sqlserver_name ${data.terraform_remote_state.layer2.outputs.sqlserver_name} -database ${data.terraform_remote_state.layer2.outputs.stagingdb_name}" + } + +} + +//Puview SP +resource "null_resource" "purview_sp_access_sampledb" { + count = var.deploy_purview ? 1 : 0 + provisioner "local-exec" { + working_dir = path.module + command = "pwsh -file database.ps1 -user ${data.terraform_remote_state.layer2.outputs.purview_sp_name} -sqlserver_name ${data.terraform_remote_state.layer2.outputs.sqlserver_name} -database ${data.terraform_remote_state.layer2.outputs.sampledb_name}" + } + +} + +resource "null_resource" "purview_sp_access_stagingdb" { + count = var.deploy_purview ? 1 : 0 + provisioner "local-exec" { + working_dir = path.module + command = "pwsh -file database.ps1 -user ${data.terraform_remote_state.layer2.outputs.purview_sp_name} -sqlserver_name ${data.terraform_remote_state.layer2.outputs.sqlserver_name} -database ${data.terraform_remote_state.layer2.outputs.stagingdb_name}" + } + +} + +//Data Factory +resource "null_resource" "datafactory_access_sampledb" { + count = var.deploy_data_factory ? 1 : 0 + provisioner "local-exec" { + working_dir = path.module + command = "pwsh -file database.ps1 -user ${data.terraform_remote_state.layer2.outputs.datafactory_name} -sqlserver_name ${data.terraform_remote_state.layer2.outputs.sqlserver_name} -database ${data.terraform_remote_state.layer2.outputs.sampledb_name}" + } + +} + +resource "null_resource" "datafactory_access_stagingdb" { + count = var.deploy_data_factory ? 1 : 0 + provisioner "local-exec" { + working_dir = path.module + command = "pwsh -file database.ps1 -user ${data.terraform_remote_state.layer2.outputs.datafactory_name} -sqlserver_name ${data.terraform_remote_state.layer2.outputs.sqlserver_name} -database ${data.terraform_remote_state.layer2.outputs.stagingdb_name}" + } + +} + +resource "null_resource" "datafactory_access_metadatadb" { + count = var.deploy_data_factory ? 1 : 0 + provisioner "local-exec" { + working_dir = path.module + command = "pwsh -file database.ps1 -user ${data.terraform_remote_state.layer2.outputs.datafactory_name} -sqlserver_name ${data.terraform_remote_state.layer2.outputs.sqlserver_name} -database ${data.terraform_remote_state.layer2.outputs.metadatadb_name}" + } + +} + diff --git a/solution/DeploymentV2/terraform_layer3/synapse.tf b/solution/DeploymentV2/terraform_layer3/synapse.tf new file mode 100644 index 00000000..b0cab934 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer3/synapse.tf @@ -0,0 +1,9 @@ + +/* resource "null_resource" "synapsedb_admins" { + for_each = (var.azure_sql_aad_administrators) + provisioner "local-exec" { + working_dir = path.module + command = "pwsh -file database.ps1 -user ${each.key} -sqlserver_name ${local.sql_server_name} -database ${local.sample_database_name}" + } + +} */ diff --git a/solution/DeploymentV2/terraform_layer3/vars.tf b/solution/DeploymentV2/terraform_layer3/vars.tf index 446bd5e8..48f696e7 100644 --- a/solution/DeploymentV2/terraform_layer3/vars.tf +++ b/solution/DeploymentV2/terraform_layer3/vars.tf @@ -94,6 +94,11 @@ variable "aad_functionapp_name" { # Feature Toggles #--------------------------------------------------------------- +variable "deploy_data_factory" { + description = "Feature toggle for deploying the Azure Data Factory" + default = true + type = bool +} variable "deploy_web_app" { description = "Feature toggle for deploying the Web App" @@ -131,6 +136,18 @@ variable "is_vnet_isolated" { type = bool } + + +#--------------------------------------------------------------- +# User Access and Ownership/ +#--------------------------------------------------------------- + +variable "azure_sql_aad_administrators" { + description = "List of Azure SQL Administrators" + type = map(string) + default = {} +} + variable "resource_owners" { description = "A web app Azure security group used for admin access." default = { From 581e535c70773386eda440b23dec1fea99aa9c11 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Fri, 29 Jul 2022 17:46:38 +0800 Subject: [PATCH 065/151] Fixed Path Issue with clean up script --- solution/DeploymentV2/Cleanup_RemoveAll.ps1 | 2 -- solution/DeploymentV2/Prepare.ps1 | 2 -- .../vars/common_vars_template.jsonnet | 4 ++++ .../vars/staging/common_vars_values.jsonc | 23 ++++--------------- .../DeploymentV2/terraform_layer2/layer1.tf | 4 ++-- .../DeploymentV2/terraform_layer3/layer2.tf | 4 ++-- 6 files changed, 12 insertions(+), 27 deletions(-) diff --git a/solution/DeploymentV2/Cleanup_RemoveAll.ps1 b/solution/DeploymentV2/Cleanup_RemoveAll.ps1 index 7f6a3f46..2330fbc4 100644 --- a/solution/DeploymentV2/Cleanup_RemoveAll.ps1 +++ b/solution/DeploymentV2/Cleanup_RemoveAll.ps1 @@ -13,8 +13,6 @@ if ($environmentName -eq "Quit") } [System.Environment]::SetEnvironmentVariable('TFenvironmentName',$environmentName) -Set-Location ".\terraform" - #------------------------------------------------------------------------------------------------------------ # Get all the outputs from terraform so we can use them in subsequent steps #------------------------------------------------------------------------------------------------------------ diff --git a/solution/DeploymentV2/Prepare.ps1 b/solution/DeploymentV2/Prepare.ps1 index 8129d356..c81fbde8 100644 --- a/solution/DeploymentV2/Prepare.ps1 +++ b/solution/DeploymentV2/Prepare.ps1 @@ -77,8 +77,6 @@ if ($gitDeploy) } else { - $environmentFile = "./EnvironmentTemplate_" + $environmentName + ".hcl" - $environmentFileContents = Get-Content $environmentFile $env:TF_VAR_resource_group_name = Read-Host "Enter the name of the resource group to create (enter to skip)" $env:TF_VAR_storage_account_name = $env:TF_VAR_resource_group_name+"state" $CONTAINER_NAME="tstate" diff --git a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet index 2adc8985..eff3c736 100644 --- a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet +++ b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet @@ -417,6 +417,10 @@ local SecretFileSensitiveVars = { // Object comprehension. + + + + diff --git a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc index b54ad09a..b78e4364 100644 --- a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc +++ b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc @@ -1,49 +1,36 @@ { "$schema": "./../common_vars_schema.json", - //Core "owner_tag": "Contoso", "resource_location": "australiaeast", "environment_tag": "stg", "domain": "microsoft.com", "subscription_id": "035a1364-f00d-48e2-b582-4fe125905ee3", "prefix": "ads", - "resource_group_name": "gft4", + "resource_group_name": "adf1", "ip_address": "144.138.148.220", "ip_address2": "144.138.148.220", "tenant_id": "72f988bf-86f1-41af-91ab-2d7cd011db47", - - //Owners & User Access "deployment_principal_layers1and3": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", "resource_owners": [ - //Deployment Agent "4c732d19-4076-4a76-87f3-6fbfd77f007d", - //Admin User - Jorampon "ccbdbba4-669c-48d6-86b8-75c9ab2ee578" - ], "synapse_administrators": { - //"deploy_user": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", "deploy_agent": "4c732d19-4076-4a76-87f3-6fbfd77f007d" }, - "azure_sql_aad_administrators": /*Note that you must designate a SQL AAD Admin*/{ + "azure_sql_aad_administrators": { "sql_aad_admin": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", - "jorampon@microsoft.com": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", - "AdsGoFastDeployerjkcgkaibkungm": "4c732d19-4076-4a76-87f3-6fbfd77f007d" + "jorampon@microsoft.com": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", + "AdsGoFastDeployerjkcgkaibkungm": "4c732d19-4076-4a76-87f3-6fbfd77f007d" }, "synapse_publishers": {}, "synapse_contributors": {}, "WEB_APP_ADMIN_USER": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", "WEB_APP_ADMIN_SECURITY_GROUP": "#####", - - //Post Layer 1 Reources. Deployment of Terraform Layer One will give you these values. They are required to allow subsequent agent deployments to open firewall rules to allow deployment updates. "ARM_SYNAPSE_WORKSPACE_NAME": "adsstgsynwadslwra", "ARM_KEYVAULT_NAME": "ads-stg-kv-ads-lwra", "ARM_DATALAKE_NAME": "adsstgdlsadslwraadsl", - - //PAL "ARM_PAL_PARTNER_ID": "0", - - //GIT "GIT_REPOSITORY_NAME": "#####", "GIT_SYNAPSE_REPOSITORY_BRANCH_NAME": "#####", "GIT_USER_NAME": "#####", @@ -52,6 +39,4 @@ "GIT_ADF_REPOSITORY_BRANCH_NAME": "#####", "GIT_ADF_USER_NAME": "#####", "GIT_ADF_EMAIL_ADDRESS": "#####" - - } diff --git a/solution/DeploymentV2/terraform_layer2/layer1.tf b/solution/DeploymentV2/terraform_layer2/layer1.tf index 7cb68c42..49ce5b16 100644 --- a/solution/DeploymentV2/terraform_layer2/layer1.tf +++ b/solution/DeploymentV2/terraform_layer2/layer1.tf @@ -7,7 +7,7 @@ config = { container_name = "tstate" key = "terraform_layer1.tfstate" - resource_group_name = "gft4" - storage_account_name = "gft4state" + resource_group_name = "adf1" + storage_account_name = "adf1state" } } diff --git a/solution/DeploymentV2/terraform_layer3/layer2.tf b/solution/DeploymentV2/terraform_layer3/layer2.tf index beccd25e..8a4d6cb9 100644 --- a/solution/DeploymentV2/terraform_layer3/layer2.tf +++ b/solution/DeploymentV2/terraform_layer3/layer2.tf @@ -7,7 +7,7 @@ config = { container_name = "tstate" key = "terraform_layer2.tfstate" - resource_group_name = "gft4" - storage_account_name = "gft4state" + resource_group_name = "adf1" + storage_account_name = "adf1state" } } From 5f9fd882f250a58a2a7f32674b65ed4a5035ed21 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Fri, 29 Jul 2022 23:32:33 +1000 Subject: [PATCH 066/151] Update common_vars_values.jsonc --- .../environments/vars/staging/common_vars_values.jsonc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc index b78e4364..45c40efc 100644 --- a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc +++ b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc @@ -19,7 +19,7 @@ "deploy_agent": "4c732d19-4076-4a76-87f3-6fbfd77f007d" }, "azure_sql_aad_administrators": { - "sql_aad_admin": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", + "sql_aad_admin": "4c732d19-4076-4a76-87f3-6fbfd77f007d", "jorampon@microsoft.com": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", "AdsGoFastDeployerjkcgkaibkungm": "4c732d19-4076-4a76-87f3-6fbfd77f007d" }, From 9d57ebff79ff6a504955f8d524fb45591d9b2e26 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Sat, 30 Jul 2022 13:04:20 +0800 Subject: [PATCH 067/151] Fixed Issue with SQL Users still being added in layer 2 instead of layer3 --- .github/workflows/02.continuous-delivery.yml | 23 ++- .../1-0-0/A-Journaled/008-CreateMSILogins.sql | 4 +- solution/DeploymentV2/Cleanup_RemoveAll.ps1 | 3 + .../DeploymentV2/Deploy_10_SampleFiles.ps1 | 37 ----- solution/DeploymentV2/Deploy_5_WebApp.ps1 | 25 --- solution/DeploymentV2/Deploy_6_FuncApp.ps1 | 29 ---- solution/DeploymentV2/Deploy_7_MetadataDB.ps1 | 85 ---------- .../DeploymentV2/Deploy_9_DataFactory.ps1 | 26 ---- .../GenerateAndUploadADFPipelines.ps1 | 36 ----- .../vars/common_vars_template.jsonnet | 66 ++++++++ .../pwshmodules/Deploy_0_Prep.psm1 | 14 +- .../pwshmodules/Deploy_10_SampleFiles.psm1 | 51 ++++++ .../pwshmodules/Deploy_5_WebApp.psm1 | 47 ++++++ .../pwshmodules/Deploy_6_FuncApp.psm1 | 48 ++++++ .../pwshmodules/Deploy_7_MetadataDB.psm1 | 73 +++++++++ .../pwshmodules/Deploy_9_DataFactory.psm1 | 51 ++++++ .../GenerateAndUploadADFPipelines.psm1 | 46 ++++++ .../terraform_layer1/01-deploy.ps1 | 47 ++++++ .../terraform_layer2/02-deploy.ps1 | 47 ++++++ .../terraform_layer2/02-publish.ps1 | 64 ++++++++ .../terraform_layer3/.terraform.lock.hcl | 19 --- .../terraform_layer3/03-deploy.ps1 | 47 ++++++ .../{app_service.tf => app_service.tf.bak} | 0 .../terraform_layer3/database.ps1 | 146 ++++++++++++++---- .../DeploymentV2/terraform_layer3/database.tf | 93 ----------- .../terraform_layer3/database.tf.bak | 137 ++++++++++++++++ .../terraform_layer3/redo_sql_users.ps1 | 11 ++ .../Patterns/UploadTaskTypeMappings.ps1 | 2 +- 28 files changed, 888 insertions(+), 389 deletions(-) delete mode 100644 solution/DeploymentV2/Deploy_10_SampleFiles.ps1 delete mode 100644 solution/DeploymentV2/Deploy_5_WebApp.ps1 delete mode 100644 solution/DeploymentV2/Deploy_6_FuncApp.ps1 delete mode 100644 solution/DeploymentV2/Deploy_7_MetadataDB.ps1 delete mode 100644 solution/DeploymentV2/Deploy_9_DataFactory.ps1 delete mode 100644 solution/DeploymentV2/GenerateAndUploadADFPipelines.ps1 create mode 100644 solution/DeploymentV2/pwshmodules/Deploy_10_SampleFiles.psm1 create mode 100644 solution/DeploymentV2/pwshmodules/Deploy_5_WebApp.psm1 create mode 100644 solution/DeploymentV2/pwshmodules/Deploy_6_FuncApp.psm1 create mode 100644 solution/DeploymentV2/pwshmodules/Deploy_7_MetadataDB.psm1 create mode 100644 solution/DeploymentV2/pwshmodules/Deploy_9_DataFactory.psm1 create mode 100644 solution/DeploymentV2/pwshmodules/GenerateAndUploadADFPipelines.psm1 create mode 100644 solution/DeploymentV2/terraform_layer1/01-deploy.ps1 create mode 100644 solution/DeploymentV2/terraform_layer2/02-deploy.ps1 create mode 100644 solution/DeploymentV2/terraform_layer2/02-publish.ps1 create mode 100644 solution/DeploymentV2/terraform_layer3/03-deploy.ps1 rename solution/DeploymentV2/terraform_layer3/{app_service.tf => app_service.tf.bak} (100%) delete mode 100644 solution/DeploymentV2/terraform_layer3/database.tf create mode 100644 solution/DeploymentV2/terraform_layer3/database.tf.bak create mode 100644 solution/DeploymentV2/terraform_layer3/redo_sql_users.ps1 diff --git a/.github/workflows/02.continuous-delivery.yml b/.github/workflows/02.continuous-delivery.yml index 42205eab..ee2f7ee3 100644 --- a/.github/workflows/02.continuous-delivery.yml +++ b/.github/workflows/02.continuous-delivery.yml @@ -90,7 +90,7 @@ jobs: - name: Terragrunt Install id: terragrunt_install - working-directory: ./solution/DeploymentV2/terraform + working-directory: ./solution/DeploymentV2/terraform/terraform_layer2 run: | brew install terragrunt @@ -103,16 +103,25 @@ jobs: wget https://github.com/google/go-jsonnet/releases/download/v0.17.0/jsonnet-go_0.17.0_linux_amd64.deb sudo dpkg -i jsonnet-go_0.17.0_linux_amd64.deb - - name: Deploy Solution - id: solution-deployment - working-directory: ./solution/DeploymentV2/ + - name: Deploy Solution IAC + id: solution-deployment-iac + working-directory: ./solution/DeploymentV2/terraform_layer2 shell: pwsh env: TF_VAR_ip_address : ${{steps.ip.outputs.ipv4}} run: | - git update-index --chmod=+x ./Deploy.ps1 - ./Deploy.ps1 -RunTerraformLayer2 $true -FeatureTemplate ${{ env.featureTemplate}} -PerformPostIACPublishing $true - + git update-index --chmod=+x ./02_deploy.ps1 + ./02_deploy.ps1 + + - name: Build and Publish Code Artefacts + id: solution-deployment-code + working-directory: ./solution/DeploymentV2/terraform_layer2 + shell: pwsh + env: + TF_VAR_ip_address : ${{steps.ip.outputs.ipv4}} + run: | + git update-index --chmod=+x ./02_publish.ps1 + ./02_publish.ps1 #PROD ENVIRONMENT deploy-to-env-two: diff --git a/solution/Database/ADSGoFastDbUp/AdsGoFastDbUp/1-0-0/A-Journaled/008-CreateMSILogins.sql b/solution/Database/ADSGoFastDbUp/AdsGoFastDbUp/1-0-0/A-Journaled/008-CreateMSILogins.sql index c55c86cb..6d968489 100644 --- a/solution/Database/ADSGoFastDbUp/AdsGoFastDbUp/1-0-0/A-Journaled/008-CreateMSILogins.sql +++ b/solution/Database/ADSGoFastDbUp/AdsGoFastDbUp/1-0-0/A-Journaled/008-CreateMSILogins.sql @@ -1,4 +1,4 @@ - DROP USER IF EXISTS [$FunctionAppName$] + /* DROP USER IF EXISTS [$FunctionAppName$] CREATE USER [$FunctionAppName$] FROM EXTERNAL PROVIDER; ALTER ROLE db_datareader ADD MEMBER [$FunctionAppName$]; ALTER ROLE db_datawriter ADD MEMBER [$FunctionAppName$]; @@ -18,4 +18,4 @@ ALTER ROLE db_datareader ADD MEMBER [$DataFactoryName$]; ALTER ROLE db_datawriter ADD MEMBER [$DataFactoryName$]; GRANT EXECUTE ON SCHEMA::[dbo] TO [$DataFactoryName$]; - GO \ No newline at end of file + GO */ \ No newline at end of file diff --git a/solution/DeploymentV2/Cleanup_RemoveAll.ps1 b/solution/DeploymentV2/Cleanup_RemoveAll.ps1 index 2330fbc4..ab4ace1a 100644 --- a/solution/DeploymentV2/Cleanup_RemoveAll.ps1 +++ b/solution/DeploymentV2/Cleanup_RemoveAll.ps1 @@ -40,6 +40,9 @@ $apps = (az ad sp list --show-mine | ConvertFrom-Json | Where-Object {$_.display foreach($app in $apps) {az ad sp delete --id $app} +$apps = (az ad app list --show-mine | ConvertFrom-Json | Where-Object {$_.displayName.StartsWith("ADS GoFast")}).id +foreach($app in $apps) {az ad app delete --id $app} + #> \ No newline at end of file diff --git a/solution/DeploymentV2/Deploy_10_SampleFiles.ps1 b/solution/DeploymentV2/Deploy_10_SampleFiles.ps1 deleted file mode 100644 index e9053086..00000000 --- a/solution/DeploymentV2/Deploy_10_SampleFiles.ps1 +++ /dev/null @@ -1,37 +0,0 @@ - - - -#---------------------------------------------------------------------------------------------------------------- -# Deploy Sample Files -#---------------------------------------------------------------------------------------------------------------- - -#---------------------------------------------------------------------------------------------------------------- -if($skipSampleFiles) { - Write-Host "Skipping Sample Files" -} -else -{ - Set-Location $deploymentFolderPath - Set-Location "../SampleFiles/" - Write-Host "Deploying Sample files" - if ($tout.is_vnet_isolated -eq $true) - { - $result = az storage account update --resource-group $resource_group_name --name $adlsstorage_name --default-action Allow - } - - $result = az storage container create --name "datalakelanding" --account-name $adlsstorage_name --auth-mode login - $result = az storage container create --name "datalakeraw" --account-name $adlsstorage_name --auth-mode login - $result = az storage container create --name "datalakeraw" --account-name $blobstorage_name --auth-mode login - $result = az storage container create --name "transientin" --account-name $blobstorage_name --auth-mode login - - $result = az storage blob upload-batch --overwrite --destination "datalakeraw" --account-name $adlsstorage_name --source ./ --destination-path samples/ --auth-mode login - $result = az storage blob upload-batch --overwrite --destination "datalakeraw" --account-name $blobstorage_name --source ./ --destination-path samples/ --auth-mode login - - if ($tout.is_vnet_isolated -eq $true) - { - $result = az storage account update --resource-group $resource_group_name --name $adlsstorage_name --default-action Deny - } - - Set-Location $deploymentFolderPath - -} \ No newline at end of file diff --git a/solution/DeploymentV2/Deploy_5_WebApp.ps1 b/solution/DeploymentV2/Deploy_5_WebApp.ps1 deleted file mode 100644 index 2f87788c..00000000 --- a/solution/DeploymentV2/Deploy_5_WebApp.ps1 +++ /dev/null @@ -1,25 +0,0 @@ -#---------------------------------------------------------------------------------------------------------------- -# Building & Deploy Web App -#---------------------------------------------------------------------------------------------------------------- -if ($skipWebApp) { - Write-Host "Skipping Building & Deploying Web Application" -} -else { - Write-Host "Building & Deploying Web Application" - #Move From Workflows to Function App - Set-Location $deploymentFolderPath - Set-Location "../WebApplication" - dotnet restore - dotnet publish --no-restore --configuration Release --output '..\DeploymentV2\bin\publish\unzipped\webapplication\' - #Move back to workflows - Set-Location $deploymentFolderPath - Set-Location "./bin/publish" - $Path = (Get-Location).Path + "/zipped/webapplication" - New-Item -ItemType Directory -Force -Path $Path - $Path = $Path + "/Publish.zip" - Compress-Archive -Path '.\unzipped\webapplication\*' -DestinationPath $Path -force - - $result = az webapp deployment source config-zip --resource-group $resource_group_name --name $webapp_name --src $Path - - Set-Location $deploymentFolderPath -} \ No newline at end of file diff --git a/solution/DeploymentV2/Deploy_6_FuncApp.ps1 b/solution/DeploymentV2/Deploy_6_FuncApp.ps1 deleted file mode 100644 index 31c32183..00000000 --- a/solution/DeploymentV2/Deploy_6_FuncApp.ps1 +++ /dev/null @@ -1,29 +0,0 @@ - -#---------------------------------------------------------------------------------------------------------------- -# Building & Deploy Function App -#---------------------------------------------------------------------------------------------------------------- -if ($skipFunctionApp) { - Write-Host "Skipping Building & Deploying Function Application" -} -else { - Write-Host "Building & Deploying Function Application" - Set-Location $deploymentFolderPath - Set-Location "..\FunctionApp\FunctionApp" - dotnet restore - dotnet publish --no-restore --configuration Release --output '..\..\DeploymentV2\bin\publish\unzipped\functionapp\' - - Set-Location $deploymentFolderPath - Set-Location "./bin/publish" - $Path = (Get-Location).Path + "/zipped/functionapp" - New-Item -ItemType Directory -Force -Path $Path - $Path = $Path + "/Publish.zip" - Compress-Archive -Path '.\unzipped\functionapp\*' -DestinationPath $Path -force - - $result = az functionapp deployment source config-zip --resource-group $resource_group_name --name $functionapp_name --src $Path - - #Make sure we are running V6.0 --TODO: Move this to terraform if possible -- This is now done! - $result = az functionapp config set --net-framework-version v6.0 -n $functionapp_name -g $resource_group_name - $result = az functionapp config appsettings set --name $functionapp_name --resource-group $resource_group_name --settings FUNCTIONS_EXTENSION_VERSION=~4 - - Set-Location $deploymentFolderPath -} diff --git a/solution/DeploymentV2/Deploy_7_MetadataDB.ps1 b/solution/DeploymentV2/Deploy_7_MetadataDB.ps1 deleted file mode 100644 index a1960985..00000000 --- a/solution/DeploymentV2/Deploy_7_MetadataDB.ps1 +++ /dev/null @@ -1,85 +0,0 @@ -param ( - [Parameter(Mandatory=$false)] - [bool]$publish_metadata_database=$false -) -#---------------------------------------------------------------------------------------------------------------- -# Populate the Metadata Database -#---------------------------------------------------------------------------------------------------------------- -if($publish_metadata_database -eq $false) { - Write-Host "Skipping Populating Metadata Database" -} -else { - - Write-Host "Populating Metadata Database" - - Set-Location $deploymentFolderPath - Set-Location "..\Database\ADSGoFastDbUp\AdsGoFastDbUp" - dotnet restore - dotnet publish --no-restore --configuration Release --output '..\..\..\DeploymentV2\bin\publish\unzipped\database\' - - #Add Ip to SQL Firewall - $result = az sql server update -n $sqlserver_name -g $resource_group_name --set publicNetworkAccess="Enabled" - - $myIp = $env:TF_VAR_ip_address - $myIp2 = $env:TF_VAR_ip_address2 - - if($myIp -ne $null) - { - $result = az sql server firewall-rule create -g $resource_group_name -s $sqlserver_name -n "DeploymentAgent" --start-ip-address $myIp --end-ip-address $myIp - } - if($myIp2 -ne $null) - { - $result = az sql server firewall-rule create -g $resource_group_name -s $sqlserver_name -n "DeploymentUser" --start-ip-address $myIp2 --end-ip-address $myIp2 - } - #Allow Azure services and resources to access this server - $result = az sql server firewall-rule create -g $resource_group_name -s $sqlserver_name -n "Azure" --start-ip-address 0.0.0.0 --end-ip-address 0.0.0.0 - - Set-Location $deploymentFolderPath - Set-Location ".\bin\publish\unzipped\database\" - - $lake_database_container_name = $tout.synapse_lakedatabase_container_name - - # This has been updated to use the Azure CLI cred - dotnet AdsGoFastDbUp.dll -a True -c "Data Source=tcp:${sqlserver_name}.database.windows.net;Initial Catalog=${metadatadb_name};" -v True --DataFactoryName $datafactory_name --ResourceGroupName $resource_group_name --KeyVaultName $keyvault_name --LogAnalyticsWorkspaceId $loganalyticsworkspace_id --SubscriptionId $subscription_id --SampleDatabaseName $sampledb_name --StagingDatabaseName $stagingdb_name --MetadataDatabaseName $metadatadb_name --BlobStorageName $blobstorage_name --AdlsStorageName $adlsstorage_name --WebAppName $webapp_name --FunctionAppName $functionapp_name --SqlServerName $sqlserver_name --SynapseWorkspaceName $synapse_workspace_name --SynapseDatabaseName $synapse_sql_pool_name --SynapseSQLPoolName $synapse_sql_pool_name --SynapseSparkPoolName $synapse_spark_pool_name --PurviewAccountName $purview_name --SynapseLakeDatabaseContainerName $lake_database_container_name - - # Fix the MSI registrations on the other databases. I'd like a better way of doing this in the future - $SqlInstalled = false - try { - $SqlInstalled = Get-InstalledModule SqlServer - } - catch { "SqlServer PowerShell module not installed." } - - if($null -eq $SqlInstalled) - { - write-host "Installing SqlServer Module" - Install-Module -Name SqlServer -Scope CurrentUser -Force - } - - $databases = @($stagingdb_name, $sampledb_name, $metadatadb_name) - #SIFDatabase - if (!$skipSIF){ - <# $databases = @($stagingdb_name, $sampledb_name, $sifdb_name ,$metadatadb_name) - Set-Location $deploymentFolderPath - Set-Location "..\Database\ADSGoFastDbUp\SIF" - dotnet restore - dotnet publish --no-restore --configuration Release --output '..\..\..\DeploymentV2\bin\publish\unzipped\database\' - - Set-Location $deploymentFolderPath - Set-Location ".\bin\publish\unzipped\database\" - - $synapse_sql_serverless_name = "${synapse_workspace_name}-ondemand.sql.azuresynapse.net" - $AdlsStorageurl = "https://${adlsstorage_name}.blob.core.windows.net/datalakelanding" - - - dotnet SIF.dll -a True -c "Data Source=tcp:$synapse_sql_serverless_name;Initial Catalog=master;" -v True --DataFactoryName $datafactory_name --ResourceGroupName $resource_group_name ` - --KeyVaultName $keyvault_name --LogAnalyticsWorkspaceId $loganalyticsworkspace_id --SubscriptionId $subscription_id --WebAppName $webapp_name ` - --FunctionAppName $functionapp_name --SqlServerName $sqlserver_name --SynapseWorkspaceName $synapse_workspace_name --SynapseSQLPoolName $synapse_sql_pool_name ` - --SynapseDatabaseName $sifdb_name --SIFDatabaseName $sifdb_name --RelativePath $RelativePath --AdlsStorageName $AdlsStorageurl #> - - } else { - $databases = @($stagingdb_name, $sampledb_name ,$metadatadb_name) - } - - Set-Location $deploymentFolderPath - -} \ No newline at end of file diff --git a/solution/DeploymentV2/Deploy_9_DataFactory.ps1 b/solution/DeploymentV2/Deploy_9_DataFactory.ps1 deleted file mode 100644 index 1885ec30..00000000 --- a/solution/DeploymentV2/Deploy_9_DataFactory.ps1 +++ /dev/null @@ -1,26 +0,0 @@ -#---------------------------------------------------------------------------------------------------------------- -# Deploy Data Factory Pipelines -#---------------------------------------------------------------------------------------------------------------- -if ($skipDataFactoryPipelines) { - Write-Host "Skipping DataFactory Pipelines" -} -else { - Set-Location $deploymentFolderPath - #Add Ip to SQL Firewall - $result = az sql server update -n $sqlserver_name -g $resource_group_name --set publicNetworkAccess="Enabled" - $result = az sql server firewall-rule create -g $resource_group_name -s $sqlserver_name -n "Deploy.ps1" --start-ip-address $myIp --end-ip-address $myIp - #Allow Azure services and resources to access this server - $result = az sql server firewall-rule create -g $resource_group_name -s $sqlserver_name -n "Azure" --start-ip-address 0.0.0.0 --end-ip-address 0.0.0.0 - - $SqlInstalled = Get-InstalledModule SqlServer - if($null -eq $SqlInstalled) - { - write-host "Installing SqlServer Module" - Install-Module -Name SqlServer -Scope CurrentUser -Force - } - - Invoke-Expression ./GenerateAndUploadADFPipelines.ps1 - Set-Location $deploymentFolderPath - - -} \ No newline at end of file diff --git a/solution/DeploymentV2/GenerateAndUploadADFPipelines.ps1 b/solution/DeploymentV2/GenerateAndUploadADFPipelines.ps1 deleted file mode 100644 index fd3c200d..00000000 --- a/solution/DeploymentV2/GenerateAndUploadADFPipelines.ps1 +++ /dev/null @@ -1,36 +0,0 @@ -Set-Location $deploymentFolderPath -Import-Module .\pwshmodules\GatherOutputsFromTerraform.psm1 -force - -Write-Host "Reading Terraform Outputs" -Set-Location "./terraform_layer2" - -$tout = GatherOutputsFromTerraform -TerraformFolderPath './' -Set-Location $deploymentFolderPath - -Write-Host "Starting Adf Patterns" -ForegroundColor Yellow -Set-Location ../DataFactory/Patterns/ -Invoke-Expression ./Jsonnet_GenerateADFArtefacts.ps1 - -if ($tout.adf_git_toggle_integration) { - Invoke-Expression ./UploadGeneratedPatternsToGit.ps1 -} -else { - Invoke-Expression ./UploadGeneratedPatternsToADF.ps1 -} -Invoke-Expression ./UploadTaskTypeMappings.ps1 -#Below is temporary - we want to make a parent folder for the both of these directories in the future. -#Currently there are duplicate powershell scripts. Plan is to iterate through each subfolder (datafactory / synapse) with one script -Write-Host "Starting Synapse Parts" -ForegroundColor Yellow -Set-Location ../../Synapse/Patterns/ -Invoke-Expression ./Jsonnet_GenerateADFArtefacts.ps1 -if ($tout.synapse_git_toggle_integration) { - Invoke-Expression ./UploadGeneratedPatternsToGit.ps1 -} -else { - Invoke-Expression ./UploadGeneratedPatternsToADF.ps1 - Invoke-Expression ./uploadNotebooks.ps1 -} -Invoke-Expression ./UploadTaskTypeMappings.ps1 - - -Set-Location $deploymentFolderPath \ No newline at end of file diff --git a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet index eff3c736..7487493b 100644 --- a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet +++ b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet @@ -406,6 +406,72 @@ local SecretFileSensitiveVars = { // Object comprehension. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/solution/DeploymentV2/pwshmodules/Deploy_0_Prep.psm1 b/solution/DeploymentV2/pwshmodules/Deploy_0_Prep.psm1 index e51ec65f..46b1c6a5 100644 --- a/solution/DeploymentV2/pwshmodules/Deploy_0_Prep.psm1 +++ b/solution/DeploymentV2/pwshmodules/Deploy_0_Prep.psm1 @@ -4,7 +4,9 @@ function PrepareDeployment ( [Parameter(Mandatory=$true)] [String]$deploymentFolderPath, [Parameter(Mandatory=$true)] - [String]$FeatureTemplate + [String]$FeatureTemplate, + [Parameter(Mandatory=$false)] + [String]$PathToReturnTo="" ) { Set-Location $deploymentFolderPath @@ -73,7 +75,13 @@ function PrepareDeployment ( [System.Environment]::SetEnvironmentVariable('TFenvironmentName',$environmentName) - - + if([string]::IsNullOrEmpty($PathToReturnTo) -ne $true) + { + Write-Debug "Returning to $PathToReturnTo" + Set-Location $PathToReturnTo + } + else { + Write-Debug "Path to return to is null" + } } \ No newline at end of file diff --git a/solution/DeploymentV2/pwshmodules/Deploy_10_SampleFiles.psm1 b/solution/DeploymentV2/pwshmodules/Deploy_10_SampleFiles.psm1 new file mode 100644 index 00000000..b9e34ebe --- /dev/null +++ b/solution/DeploymentV2/pwshmodules/Deploy_10_SampleFiles.psm1 @@ -0,0 +1,51 @@ + + +function DeploySampleFiles ( + [Parameter(Mandatory = $true)] + [pscustomobject]$tout = $false, + [Parameter(Mandatory = $true)] + [string]$deploymentFolderPath = "", + [Parameter(Mandatory = $true)] + [String]$PathToReturnTo = "" +) { + #---------------------------------------------------------------------------------------------------------------- + # Deploy Sample Files + #---------------------------------------------------------------------------------------------------------------- + + #---------------------------------------------------------------------------------------------------------------- + $skipSampleFiles = if ($tout.publish_sample_files) { $false } else { $true } + if ($skipSampleFiles) { + Write-Host "Skipping Sample Files" + } + else { + Set-Location $deploymentFolderPath + Set-Location "../SampleFiles/" + Write-Host "Deploying Sample files" + if ($tout.is_vnet_isolated -eq $true) { + $result = az storage account update --resource-group $tout.resource_group_name --name $tout.adlsstorage_name --default-action Allow + } + + $result = az storage container create --name "datalakelanding" --account-name $tout.adlsstorage_name --auth-mode login + $result = az storage container create --name "datalakeraw" --account-name $tout.adlsstorage_name --auth-mode login + $result = az storage container create --name "datalakeraw" --account-name $tout.blobstorage_name --auth-mode login + $result = az storage container create --name "transientin" --account-name $tout.blobstorage_name --auth-mode login + + $result = az storage blob upload-batch --overwrite --destination "datalakeraw" --account-name $tout.adlsstorage_name --source ./ --destination-path samples/ --auth-mode login + $result = az storage blob upload-batch --overwrite --destination "datalakeraw" --account-name $tout.blobstorage_name --source ./ --destination-path samples/ --auth-mode login + + if ($tout.is_vnet_isolated -eq $true) { + $result = az storage account update --resource-group $tout.resource_group_name --name $tout.adlsstorage_name --default-action Deny + } + + Set-Location $deploymentFolderPath + + if ([string]::IsNullOrEmpty($PathToReturnTo) -ne $true) { + Write-Debug "Returning to $PathToReturnTo" + Set-Location $PathToReturnTo + } + else { + Write-Debug "Path to return to is null" + } + + } +} \ No newline at end of file diff --git a/solution/DeploymentV2/pwshmodules/Deploy_5_WebApp.psm1 b/solution/DeploymentV2/pwshmodules/Deploy_5_WebApp.psm1 new file mode 100644 index 00000000..10e122e4 --- /dev/null +++ b/solution/DeploymentV2/pwshmodules/Deploy_5_WebApp.psm1 @@ -0,0 +1,47 @@ +function DeployWebApp ( + [Parameter(Mandatory=$true)] + [pscustomobject]$tout=$false, + [Parameter(Mandatory=$true)] + [string]$deploymentFolderPath="", + [Parameter(Mandatory=$true)] + [String]$PathToReturnTo="" +) +{ + #---------------------------------------------------------------------------------------------------------------- + # Building & Deploy Web App + #---------------------------------------------------------------------------------------------------------------- + $skipWebApp = if($tout.publish_web_app -and $tout.deploy_web_app) {$false} else {$true} + if ($skipWebApp) { + Write-Host "Skipping Building & Deploying Web Application" + } + else { + Write-Host "Building & Deploying Web Application" + #Move From Workflows to Function App + Set-Location $deploymentFolderPath + Set-Location "../WebApplication" + dotnet restore + dotnet publish --no-restore --configuration Release --output '..\DeploymentV2\bin\publish\unzipped\webapplication\' + #Move back to workflows + Set-Location $deploymentFolderPath + Set-Location "./bin/publish" + $Path = (Get-Location).Path + "/zipped/webapplication" + New-Item -ItemType Directory -Force -Path $Path + $Path = $Path + "/Publish.zip" + Compress-Archive -Path '.\unzipped\webapplication\*' -DestinationPath $Path -force + + $result = az webapp deployment source config-zip --resource-group $tout.resource_group_name --name $tout.webapp_name --src $Path + + Set-Location $deploymentFolderPath + + if([string]::IsNullOrEmpty($PathToReturnTo) -ne $true) + { + Write-Debug "Returning to $PathToReturnTo" + Set-Location $PathToReturnTo + } + else { + Write-Debug "Path to return to is null" + } + } + + +} \ No newline at end of file diff --git a/solution/DeploymentV2/pwshmodules/Deploy_6_FuncApp.psm1 b/solution/DeploymentV2/pwshmodules/Deploy_6_FuncApp.psm1 new file mode 100644 index 00000000..e9714e8f --- /dev/null +++ b/solution/DeploymentV2/pwshmodules/Deploy_6_FuncApp.psm1 @@ -0,0 +1,48 @@ +function DeployFuncApp ( + [Parameter(Mandatory=$true)] + [pscustomobject]$tout=$false, + [Parameter(Mandatory=$true)] + [string]$deploymentFolderPath="", + [Parameter(Mandatory=$true)] + [String]$PathToReturnTo="" +) +{ + #---------------------------------------------------------------------------------------------------------------- + # Building & Deploy Function App + #---------------------------------------------------------------------------------------------------------------- + $skipFunctionApp = if($tout.publish_function_app -and $tout.deploy_function_app) {$false} else {$true} + if ($skipFunctionApp) { + Write-Host "Skipping Building & Deploying Function Application" + } + else { + Write-Host "Building & Deploying Function Application" + Set-Location $deploymentFolderPath + Set-Location "..\FunctionApp\FunctionApp" + dotnet restore + dotnet publish --no-restore --configuration Release --output '..\..\DeploymentV2\bin\publish\unzipped\functionapp\' + + Set-Location $deploymentFolderPath + Set-Location "./bin/publish" + $Path = (Get-Location).Path + "/zipped/functionapp" + New-Item -ItemType Directory -Force -Path $Path + $Path = $Path + "/Publish.zip" + Compress-Archive -Path '.\unzipped\functionapp\*' -DestinationPath $Path -force + + $result = az functionapp deployment source config-zip --resource-group $tout.resource_group_name --name $tout.functionapp_name --src $Path + + #Make sure we are running V6.0 --TODO: Move this to terraform if possible -- This is now done! + $result = az functionapp config set --net-framework-version v6.0 -n $tout.functionapp_name -g $tout.resource_group_name + $result = az functionapp config appsettings set --name $tout.functionapp_name --resource-group $tout.resource_group_name --settings FUNCTIONS_EXTENSION_VERSION=~4 + + Set-Location $deploymentFolderPath + + if([string]::IsNullOrEmpty($PathToReturnTo) -ne $true) + { + Write-Debug "Returning to $PathToReturnTo" + Set-Location $PathToReturnTo + } + else { + Write-Debug "Path to return to is null" + } + } +} diff --git a/solution/DeploymentV2/pwshmodules/Deploy_7_MetadataDB.psm1 b/solution/DeploymentV2/pwshmodules/Deploy_7_MetadataDB.psm1 new file mode 100644 index 00000000..6082c1df --- /dev/null +++ b/solution/DeploymentV2/pwshmodules/Deploy_7_MetadataDB.psm1 @@ -0,0 +1,73 @@ +function DeployMataDataDB ( + [Parameter(Mandatory = $false)] + [bool]$publish_metadata_database = $false, + [Parameter(Mandatory = $true)] + [pscustomobject]$tout = $false, + [Parameter(Mandatory = $true)] + [string]$deploymentFolderPath = "", + [Parameter(Mandatory = $true)] + [String]$PathToReturnTo = "" +) +{ + #---------------------------------------------------------------------------------------------------------------- + # Populate the Metadata Database + #---------------------------------------------------------------------------------------------------------------- + if ($publish_metadata_database -eq $false) { + Write-Host "Skipping Populating Metadata Database" + } + else { + + Write-Host "Populating Metadata Database" + + Set-Location $deploymentFolderPath + Set-Location "..\Database\ADSGoFastDbUp\AdsGoFastDbUp" + dotnet restore + dotnet publish --no-restore --configuration Release --output '..\..\..\DeploymentV2\bin\publish\unzipped\database\' + + #Add Ip to SQL Firewall + $result = az sql server update -n $tout.sqlserver_name -g $tout.resource_group_name --set publicNetworkAccess="Enabled" + + $myIp = $env:TF_VAR_ip_address + $myIp2 = $env:TF_VAR_ip_address2 + + if ($myIp -ne $null) { + $result = az sql server firewall-rule create -g $tout.resource_group_name -s $tout.sqlserver_name -n "DeploymentAgent" --start-ip-address $myIp --end-ip-address $myIp + } + if ($myIp2 -ne $null) { + $result = az sql server firewall-rule create -g $tout.resource_group_name -s $tout.sqlserver_name -n "DeploymentUser" --start-ip-address $myIp2 --end-ip-address $myIp2 + } + #Allow Azure services and resources to access this server + $result = az sql server firewall-rule create -g $tout.resource_group_name -s $tout.sqlserver_name -n "Azure" --start-ip-address 0.0.0.0 --end-ip-address 0.0.0.0 + + Set-Location $deploymentFolderPath + Set-Location ".\bin\publish\unzipped\database\" + + $lake_database_container_name = $tout.synapse_lakedatabase_container_name + + # This has been updated to use the Azure CLI cred + dotnet AdsGoFastDbUp.dll -a True -c "Data Source=tcp:$($tout.sqlserver_name).database.windows.net;Initial Catalog=$($tout.metadatadb_name);" -v True --DataFactoryName $tout.datafactory_name --ResourceGroupName $tout.resource_group_name --KeyVaultName $tout.keyvault_name --LogAnalyticsWorkspaceId $tout.loganalyticsworkspace_id --SubscriptionId $tout.subscription_id --SampleDatabaseName $tout.sampledb_name --StagingDatabaseName $tout.stagingdb_name --MetadataDatabaseName $tout.metadatadb_name --BlobStorageName $tout.blobstorage_name --AdlsStorageName $tout.adlsstorage_name --WebAppName $tout.webapp_name --FunctionAppName $tout.functionapp_name --SqlServerName $tout.sqlserver_name --SynapseWorkspaceName $tout.synapse_workspace_name --SynapseDatabaseName $tout.synapse_sql_pool_name --SynapseSQLPoolName $tout.synapse_sql_pool_name --SynapseSparkPoolName $tout.synapse_spark_pool_name --PurviewAccountName $tout.purview_name --SynapseLakeDatabaseContainerName $tout.synapse_lakedatabase_container_name + + <# # Fix the MSI registrations on the other databases. I'd like a better way of doing this in the future + $SqlInstalled = false + try { + $SqlInstalled = Get-InstalledModule SqlServer + } + catch { "SqlServer PowerShell module not installed." } + + if ($null -eq $SqlInstalled) { + write-host "Installing SqlServer Module" + Install-Module -Name SqlServer -Scope CurrentUser -Force + } #> + + Set-Location $deploymentFolderPath + + if([string]::IsNullOrEmpty($PathToReturnTo) -ne $true) + { + Write-Debug "Returning to $PathToReturnTo" + Set-Location $PathToReturnTo + } + else { + Write-Debug "Path to return to is null" + } + } +} \ No newline at end of file diff --git a/solution/DeploymentV2/pwshmodules/Deploy_9_DataFactory.psm1 b/solution/DeploymentV2/pwshmodules/Deploy_9_DataFactory.psm1 new file mode 100644 index 00000000..9b5be9e6 --- /dev/null +++ b/solution/DeploymentV2/pwshmodules/Deploy_9_DataFactory.psm1 @@ -0,0 +1,51 @@ +function DeployDataFactoryAndSynapseArtefacts ( + [Parameter(Mandatory = $true)] + [pscustomobject]$tout = $false, + [Parameter(Mandatory = $true)] + [string]$deploymentFolderPath = "", + [Parameter(Mandatory = $true)] + [String]$PathToReturnTo = "" +) { + #---------------------------------------------------------------------------------------------------------------- + # Deploy Data Factory Pipelines + #---------------------------------------------------------------------------------------------------------------- + if ($skipDataFactoryPipelines) { + Write-Host "Skipping DataFactory Pipelines" + } + else { + #needed for git integration + az extension add --upgrade --name datafactory + + Set-Location $deploymentFolderPath + #Add Ip to SQL Firewall + $myIp = $env:TF_VAR_ip_address + $myIp2 = $env:TF_VAR_ip_address2 + + if ($myIp -ne $null) { + $result = az sql server firewall-rule create -g $tout.resource_group_name -s $tout.sqlserver_name -n "DeploymentAgent" --start-ip-address $myIp --end-ip-address $myIp + } + if ($myIp2 -ne $null) { + $result = az sql server firewall-rule create -g $tout.resource_group_name -s $tout.sqlserver_name -n "DeploymentUser" --start-ip-address $myIp2 --end-ip-address $myIp2 + } + + $SqlInstalled = Get-InstalledModule SqlServer + if ($null -eq $SqlInstalled) { + write-host "Installing SqlServer Module" + Install-Module -Name SqlServer -Scope CurrentUser -Force + } + + Import-Module ./pwshmodules/GenerateAndUploadADFPipelines.psm1 -force + GenerateAndUploadDataFactoryAndSynapseArtefacts -tout $tout -deploymentFolderPath $deploymentFolderPath -PathToReturnTo $PathToReturnTo + + if([string]::IsNullOrEmpty($PathToReturnTo) -ne $true) + { + Write-Debug "Returning to $PathToReturnTo" + Set-Location $PathToReturnTo + } + else { + Write-Debug "Path to return to is null" + } + + + } +} \ No newline at end of file diff --git a/solution/DeploymentV2/pwshmodules/GenerateAndUploadADFPipelines.psm1 b/solution/DeploymentV2/pwshmodules/GenerateAndUploadADFPipelines.psm1 new file mode 100644 index 00000000..108dc5d5 --- /dev/null +++ b/solution/DeploymentV2/pwshmodules/GenerateAndUploadADFPipelines.psm1 @@ -0,0 +1,46 @@ +function GenerateAndUploadDataFactoryAndSynapseArtefacts ( + [Parameter(Mandatory = $true)] + [pscustomobject]$tout = $false, + [Parameter(Mandatory = $true)] + [string]$deploymentFolderPath = "", + [Parameter(Mandatory = $true)] + [String]$PathToReturnTo = "" +) { + Set-Location $deploymentFolderPath + Import-Module .\pwshmodules\GatherOutputsFromTerraform.psm1 -force + + Write-Host "Starting Adf Patterns" -ForegroundColor Yellow + Set-Location ../DataFactory/Patterns/ + Invoke-Expression ./Jsonnet_GenerateADFArtefacts.ps1 + + if ($tout.adf_git_toggle_integration) { + Invoke-Expression ./UploadGeneratedPatternsToGit.ps1 + } + else { + Invoke-Expression ./UploadGeneratedPatternsToADF.ps1 + } + Invoke-Expression ./UploadTaskTypeMappings.ps1 + #Below is temporary - we want to make a parent folder for the both of these directories in the future. + #Currently there are duplicate powershell scripts. Plan is to iterate through each subfolder (datafactory / synapse) with one script + Write-Host "Starting Synapse Parts" -ForegroundColor Yellow + Set-Location ../../Synapse/Patterns/ + Invoke-Expression ./Jsonnet_GenerateADFArtefacts.ps1 + if ($tout.synapse_git_toggle_integration) { + Invoke-Expression ./UploadGeneratedPatternsToGit.ps1 + } + else { + Invoke-Expression ./UploadGeneratedPatternsToADF.ps1 + Invoke-Expression ./uploadNotebooks.ps1 + } + Invoke-Expression ./UploadTaskTypeMappings.ps1 + + Set-Location $deploymentFolderPath + if([string]::IsNullOrEmpty($PathToReturnTo) -ne $true) + { + Write-Debug "Returning to $PathToReturnTo" + Set-Location $PathToReturnTo + } + else { + Write-Debug "Path to return to is null" + } +} \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer1/01-deploy.ps1 b/solution/DeploymentV2/terraform_layer1/01-deploy.ps1 new file mode 100644 index 00000000..ae0af689 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer1/01-deploy.ps1 @@ -0,0 +1,47 @@ +#---------------------------------------------------------------------------------------------------------------- +# You must be logged into the Azure CLI to run this script +#---------------------------------------------------------------------------------------------------------------- +# This script will: +# - Deploy the required AAD objects (Application Registrations etc) +# +# This is intended for creating a once off deployment from your development machine. You should setup the +# GitHub actions for your long term prod/non-prod environments +# +# Intructions +# - Ensure that you have run the Prepare.ps1 script first. This will prepare your azure subscription for deployment +# - Ensure that you have run az login and az account set +# - Ensure you have Contributor Access to the subscription you are deploying to. +# - Ensure you have Application.ReadWrite.OwnedBy on the Azure AD. +# - Run this script +# +# You can run this script multiple times if needed. +# +#---------------------------------------------------------------------------------------------------------------- +param ( + [Parameter(Mandatory=$false)] + [string]$FeatureTemplate="basic_deployment" +) + +#------------------------------------------------------------------------------------------------------------ +# Module Imports #Mandatory +#------------------------------------------------------------------------------------------------------------ +import-Module ./../pwshmodules/GatherOutputsFromTerraform.psm1 -force +import-Module ./../pwshmodules/Deploy_0_Prep.psm1 -force +#------------------------------------------------------------------------------------------------------------ +# Preparation #Mandatory +#------------------------------------------------------------------------------------------------------------ +$PathToReturnTo = (Get-Location).Path +$deploymentFolderPath = Convert-Path -Path ((Get-Location).tostring() + './../') +$gitDeploy = ([System.Environment]::GetEnvironmentVariable('gitDeploy') -eq 'true') +$skipTerraformDeployment = ([System.Environment]::GetEnvironmentVariable('skipTerraformDeployment') -eq 'true') +$ipaddress = $env:TF_VAR_ip_address +$ipaddress2 = $env:TF_VAR_ip_address2 + +PrepareDeployment -gitDeploy $gitDeploy -deploymentFolderPath $deploymentFolderPath -FeatureTemplate $FeatureTemplate -PathToReturnTo $PathToReturnTo + +#------------------------------------------------------------------------------------------------------------ +# Main Terraform - Layer1 +#------------------------------------------------------------------------------------------------------------ +Write-Host "Starting Terraform Deployment- Layer 1" +terragrunt init --terragrunt-config vars/$env:environmentName/terragrunt.hcl -reconfigure +terragrunt apply -auto-approve --terragrunt-config vars/$env:environmentName/terragrunt.hcl diff --git a/solution/DeploymentV2/terraform_layer2/02-deploy.ps1 b/solution/DeploymentV2/terraform_layer2/02-deploy.ps1 new file mode 100644 index 00000000..9ad883c6 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/02-deploy.ps1 @@ -0,0 +1,47 @@ +#---------------------------------------------------------------------------------------------------------------- +# You must be logged into the Azure CLI to run this script +#---------------------------------------------------------------------------------------------------------------- +# This script will: +# - Deploy the required AAD objects (Application Registrations etc) +# +# This is intended for creating a once off deployment from your development machine. You should setup the +# GitHub actions for your long term prod/non-prod environments +# +# Intructions +# - Ensure that you have run the Prepare.ps1 script first. This will prepare your azure subscription for deployment +# - Ensure that you have run az login and az account set +# - Ensure you have Contributor Access to the subscription you are deploying to. +# - Ensure you have Application.ReadWrite.OwnedBy on the Azure AD. +# - Run this script +# +# You can run this script multiple times if needed. +# +#---------------------------------------------------------------------------------------------------------------- +param ( + [Parameter(Mandatory=$false)] + [string]$FeatureTemplate="basic_deployment" +) + +#------------------------------------------------------------------------------------------------------------ +# Module Imports #Mandatory +#------------------------------------------------------------------------------------------------------------ +import-Module ./../pwshmodules/GatherOutputsFromTerraform.psm1 -force +import-Module ./../pwshmodules/Deploy_0_Prep.psm1 -force +#------------------------------------------------------------------------------------------------------------ +# Preparation #Mandatory +#------------------------------------------------------------------------------------------------------------ +$PathToReturnTo = (Get-Location).Path +$deploymentFolderPath = Convert-Path -Path ((Get-Location).tostring() + './../') +$gitDeploy = ([System.Environment]::GetEnvironmentVariable('gitDeploy') -eq 'true') +$skipTerraformDeployment = ([System.Environment]::GetEnvironmentVariable('skipTerraformDeployment') -eq 'true') +$ipaddress = $env:TF_VAR_ip_address +$ipaddress2 = $env:TF_VAR_ip_address2 + +PrepareDeployment -gitDeploy $gitDeploy -deploymentFolderPath $deploymentFolderPath -FeatureTemplate $FeatureTemplate -PathToReturnTo $PathToReturnTo + +#------------------------------------------------------------------------------------------------------------ +# Main Terraform - Layer1 +#------------------------------------------------------------------------------------------------------------ +Write-Host "Starting Terraform Deployment- Layer 2" +terragrunt init --terragrunt-config vars/$env:environmentName/terragrunt.hcl -reconfigure +terragrunt apply -auto-approve --terragrunt-config vars/$env:environmentName/terragrunt.hcl diff --git a/solution/DeploymentV2/terraform_layer2/02-publish.ps1 b/solution/DeploymentV2/terraform_layer2/02-publish.ps1 new file mode 100644 index 00000000..f0f9d91d --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/02-publish.ps1 @@ -0,0 +1,64 @@ +#---------------------------------------------------------------------------------------------------------------- +# You must be logged into the Azure CLI to run this script +#---------------------------------------------------------------------------------------------------------------- +# This script will: +# - Deploy the required AAD objects (Application Registrations etc) +# +# This is intended for creating a once off deployment from your development machine. You should setup the +# GitHub actions for your long term prod/non-prod environments +# +# Intructions +# - Ensure that you have run the Prepare.ps1 script first. This will prepare your azure subscription for deployment +# - Ensure that you have run az login and az account set +# - Ensure you have Contributor Access to the subscription you are deploying to. +# - Ensure you have Application.ReadWrite.OwnedBy on the Azure AD. +# - Run this script +# +# You can run this script multiple times if needed. +# +#---------------------------------------------------------------------------------------------------------------- +param ( + [Parameter(Mandatory=$false)] + [string]$FeatureTemplate="basic_deployment" +) + +#------------------------------------------------------------------------------------------------------------ +# Module Imports #Mandatory +#------------------------------------------------------------------------------------------------------------ +import-Module ./../pwshmodules/GatherOutputsFromTerraform.psm1 -force +import-Module ./../pwshmodules/Deploy_0_Prep.psm1 -force +#------------------------------------------------------------------------------------------------------------ +# Preparation #Mandatory +#------------------------------------------------------------------------------------------------------------ +$PathToReturnTo = (Get-Location).Path +$deploymentFolderPath = Convert-Path -Path ((Get-Location).tostring() + './../') + +$gitDeploy = ([System.Environment]::GetEnvironmentVariable('gitDeploy') -eq 'true') +$skipTerraformDeployment = ([System.Environment]::GetEnvironmentVariable('skipTerraformDeployment') -eq 'true') +$ipaddress = $env:TF_VAR_ip_address +$ipaddress2 = $env:TF_VAR_ip_address2 + +PrepareDeployment -gitDeploy $gitDeploy -deploymentFolderPath $deploymentFolderPath -FeatureTemplate $FeatureTemplate -PathToReturnTo $PathToReturnTo + +#------------------------------------------------------------------------------------------------------------ +# Get Outputs #Mandatory +#------------------------------------------------------------------------------------------------------------ +$tout = GatherOutputsFromTerraform -TerraformFolderPath $PathToReturnTo + +#------------------------------------------------------------------------------------------------------------ +# Publish +#------------------------------------------------------------------------------------------------------------ +import-Module ./../pwshmodules/Deploy_5_WebApp.psm1 -force +DeployWebApp -tout $tout -deploymentFolderPath $deploymentFolderPath -PathToReturnTo $PathToReturnTo + +import-Module ./../pwshmodules/Deploy_6_FuncApp.psm1 -force +DeployFuncApp -tout $tout -deploymentFolderPath $deploymentFolderPath -PathToReturnTo $PathToReturnTo + +import-Module ./../pwshmodules/Deploy_7_MetadataDB.psm1 -force +DeployMataDataDB -publish_metadata_database $true -tout $tout -deploymentFolderPath $deploymentFolderPath -PathToReturnTo $PathToReturnTo + +import-Module ./../pwshmodules/Deploy_9_DataFactory.psm1 -force +DeployDataFactoryAndSynapseArtefacts -tout $tout -deploymentFolderPath $deploymentFolderPath -PathToReturnTo $PathToReturnTo + +import-Module ./../pwshmodules/Deploy_10_SampleFiles.psm1 -force +DeploySampleFiles -tout $tout -deploymentFolderPath $deploymentFolderPath -PathToReturnTo $PathToReturnTo \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer3/.terraform.lock.hcl b/solution/DeploymentV2/terraform_layer3/.terraform.lock.hcl index 6d027bc6..8ca5626b 100644 --- a/solution/DeploymentV2/terraform_layer3/.terraform.lock.hcl +++ b/solution/DeploymentV2/terraform_layer3/.terraform.lock.hcl @@ -41,25 +41,6 @@ provider "registry.terraform.io/hashicorp/azurerm" { ] } -provider "registry.terraform.io/hashicorp/null" { - version = "3.1.1" - hashes = [ - "h1:71sNUDvmiJcijsvfXpiLCz0lXIBSsEJjMxljt7hxMhw=", - "zh:063466f41f1d9fd0dd93722840c1314f046d8760b1812fa67c34de0afcba5597", - "zh:08c058e367de6debdad35fc24d97131c7cf75103baec8279aba3506a08b53faf", - "zh:73ce6dff935150d6ddc6ac4a10071e02647d10175c173cfe5dca81f3d13d8afe", - "zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3", - "zh:8fdd792a626413502e68c195f2097352bdc6a0df694f7df350ed784741eb587e", - "zh:976bbaf268cb497400fd5b3c774d218f3933271864345f18deebe4dcbfcd6afa", - "zh:b21b78ca581f98f4cdb7a366b03ae9db23a73dfa7df12c533d7c19b68e9e72e5", - "zh:b7fc0c1615dbdb1d6fd4abb9c7dc7da286631f7ca2299fb9cd4664258ccfbff4", - "zh:d1efc942b2c44345e0c29bc976594cb7278c38cfb8897b344669eafbc3cddf46", - "zh:e356c245b3cd9d4789bab010893566acace682d7db877e52d40fc4ca34a50924", - "zh:ea98802ba92fcfa8cf12cbce2e9e7ebe999afbf8ed47fa45fc847a098d89468b", - "zh:eff8872458806499889f6927b5d954560f3d74bf20b6043409edf94d26cd906f", - ] -} - provider "registry.terraform.io/hashicorp/random" { version = "3.3.0" constraints = ">= 2.2.0, 3.3.0" diff --git a/solution/DeploymentV2/terraform_layer3/03-deploy.ps1 b/solution/DeploymentV2/terraform_layer3/03-deploy.ps1 new file mode 100644 index 00000000..95ce3224 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer3/03-deploy.ps1 @@ -0,0 +1,47 @@ +#---------------------------------------------------------------------------------------------------------------- +# You must be logged into the Azure CLI to run this script +#---------------------------------------------------------------------------------------------------------------- +# This script will: +# - Deploy the required AAD objects (Application Registrations etc) +# +# This is intended for creating a once off deployment from your development machine. You should setup the +# GitHub actions for your long term prod/non-prod environments +# +# Intructions +# - Ensure that you have run the Prepare.ps1 script first. This will prepare your azure subscription for deployment +# - Ensure that you have run az login and az account set +# - Ensure you have Contributor Access to the subscription you are deploying to. +# - Ensure you have Application.ReadWrite.OwnedBy on the Azure AD. +# - Run this script +# +# You can run this script multiple times if needed. +# +#---------------------------------------------------------------------------------------------------------------- +param ( + [Parameter(Mandatory=$false)] + [string]$FeatureTemplate="basic_deployment" +) + +#------------------------------------------------------------------------------------------------------------ +# Module Imports #Mandatory +#------------------------------------------------------------------------------------------------------------ +import-Module ./../pwshmodules/GatherOutputsFromTerraform.psm1 -force +import-Module ./../pwshmodules/Deploy_0_Prep.psm1 -force +#------------------------------------------------------------------------------------------------------------ +# Preparation #Mandatory +#------------------------------------------------------------------------------------------------------------ +$PathToReturnTo = (Get-Location).Path +$deploymentFolderPath = (Get-Location).Path + './../' +$gitDeploy = ([System.Environment]::GetEnvironmentVariable('gitDeploy') -eq 'true') +$skipTerraformDeployment = ([System.Environment]::GetEnvironmentVariable('skipTerraformDeployment') -eq 'true') +$ipaddress = $env:TF_VAR_ip_address +$ipaddress2 = $env:TF_VAR_ip_address2 + +PrepareDeployment -gitDeploy $gitDeploy -deploymentFolderPath $deploymentFolderPath -FeatureTemplate $FeatureTemplate -PathToReturnTo $PathToReturnTo + +#------------------------------------------------------------------------------------------------------------ +# Main Terraform - Layer1 +#------------------------------------------------------------------------------------------------------------ +Write-Host "Starting Terraform Deployment- Layer 3" +terragrunt init --terragrunt-config vars/$env:environmentName/terragrunt.hcl -reconfigure +terragrunt apply -auto-approve --terragrunt-config vars/$env:environmentName/terragrunt.hcl diff --git a/solution/DeploymentV2/terraform_layer3/app_service.tf b/solution/DeploymentV2/terraform_layer3/app_service.tf.bak similarity index 100% rename from solution/DeploymentV2/terraform_layer3/app_service.tf rename to solution/DeploymentV2/terraform_layer3/app_service.tf.bak diff --git a/solution/DeploymentV2/terraform_layer3/database.ps1 b/solution/DeploymentV2/terraform_layer3/database.ps1 index bcc3bd75..5decd6e8 100644 --- a/solution/DeploymentV2/terraform_layer3/database.ps1 +++ b/solution/DeploymentV2/terraform_layer3/database.ps1 @@ -1,35 +1,129 @@ param ( - [Parameter(Mandatory=$true)] - [string]$user="", - [Parameter(Mandatory=$true)] - [string]$sqlserver_name="", - [Parameter(Mandatory=$true)] - [string]$database="" + [Parameter(Mandatory = $true)] + [pscustomobject]$tout = $false, + [Parameter(Mandatory = $true)] + [string]$deploymentFolderPath = "", + [Parameter(Mandatory = $true)] + [String]$PathToReturnTo = "", + [Parameter(Mandatory = $true)] + [bool]$PublishSQLLogins ) -$token=$(az account get-access-token --resource=https://database.windows.net --query accessToken --output tsv) +#---------------------------------------------------------------------------------------------------------------- +# Configure SQL Server Logins +#---------------------------------------------------------------------------------------------------------------- +if($PublishSQLLogins -eq $false) { + Write-Host "Skipping configuration of SQL Server Users" +} +else { + Write-Host "Configuring SQL Server Users" + $databases = @($tout.stagingdb_name, $tout.sampledb_name, $tout.metadatadb_name) -$sqlcommand = " -IF '$user' = 'sql_aad_admin' -BEGIN - GOTO ExitLabel -END + $aadUsers = @($tout.datafactory_name,$tout.functionapp_name, $tout.webapp_name ) + + if($env:TF_VAR_deploy_purview -eq $true) + { + $aadUsers += ($tout.purview_name) + #$aadUsers += ($tout.purview_sp_name) + } + + $sqladmins = ($env:TF_VAR_azure_sql_aad_administrators | ConvertFrom-Json -Depth 10) + $sqladmins2 = ($Sqladmins | Get-Member) | Where-Object {$_.MemberType -eq "NoteProperty"} | Select-Object -Property Name + foreach($user in $sqladmins2) + { + if($user.Name -ne "sql_aad_admin") + { + $aadUsers += $user.Name + } + } -IF NOT EXISTS (SELECT * -FROM [sys].[database_principals] -WHERE [type] = N'E' AND [name] = N'$user') -BEGIN - CREATE USER [$user] FROM EXTERNAL PROVIDER; -END -ALTER ROLE db_datareader ADD MEMBER [$user]; -ALTER ROLE db_datawriter ADD MEMBER [$user]; -GRANT EXECUTE ON SCHEMA::[dbo] TO [$user]; + $token=$(az account get-access-token --resource=https://database.windows.net --query accessToken --output tsv) + foreach($database in $databases) + { + + foreach($user in $aadUsers) + { + if (![string]::IsNullOrEmpty($user)) + { + $sqlcommand = " + + IF NOT EXISTS (SELECT * + FROM [sys].[database_principals] + WHERE [type] = N'E' AND [name] = N'$user') + BEGIN + CREATE USER [$user] FROM EXTERNAL PROVIDER; + END + ALTER ROLE db_datareader ADD MEMBER [$user]; + ALTER ROLE db_datawriter ADD MEMBER [$user]; + GRANT EXECUTE ON SCHEMA::[dbo] TO [$user]; + GO + + " + + write-host "Granting MSI Privileges on $database DB to $user" + Invoke-Sqlcmd -ServerInstance "$($tout.sqlserver_name).database.windows.net,1433" -Database $database -AccessToken $token -query $sqlcommand + } + } + } + + $ddlCommand = "ALTER ROLE db_ddladmin ADD MEMBER [$($tout.datafactory_name)];" + foreach($database in $databases) + { + write-host "Granting DDL Role on $database DB to $($tout.datafactory_name)" + Invoke-Sqlcmd -ServerInstance "$($tout.sqlserver_name).database.windows.net,1433" -Database $database -AccessToken $token -query $ddlCommand + } + +} + +#---------------------------------------------------------------------------------------------------------------- +# Configure Synapse Logins +#---------------------------------------------------------------------------------------------------------------- +if($PublishSQLLogins -eq $false) { + Write-Host "Skipping configuration of Synapse SQL Users" +} +else { + Write-Host "Configuring Synapse SQL Users" + + $myIp = $env:TF_VAR_ip_address + $myIp2 = $env:TF_VAR_ip_address2 + + #Add Ip to SQL Firewall + #$result = az synapse workspace update -n $synapse_workspace_name -g $resource_group_name --set publicNetworkAccess="Enabled" + $result = az synapse workspace firewall-rule create --resource-group $tout.resource_group_name --workspace-name $tout.synapse_workspace_name --name "DeploymentAgent" --start-ip-address $myIp --end-ip-address $myIp + $result = az synapse workspace firewall-rule create --resource-group $tout.resource_group_name --workspace-name $tout.synapse_workspace_name --name "DeploymentUser" --start-ip-address $myIp2 --end-ip-address $myIp2 + + if ($tout.is_vnet_isolated -eq $false) + { + $result = az synapse workspace firewall-rule create --resource-group $tout.resource_group_name --workspace-name $tout.synapse_workspace_name --name "AllowAllWindowsAzureIps" --start-ip-address "0.0.0.0" --end-ip-address "0.0.0.0" + } + + if([string]::IsNullOrEmpty($tout.synapse_sql_pool_name) ) + { + write-host "Synapse pool is not deployed." + } + else + { + # Fix the MSI registrations on the other databases. I'd like a better way of doing this in the future + $SqlInstalled = Get-InstalledModule SqlServer + if($null -eq $SqlInstalled) + { + write-host "Installing SqlServer Module" + Install-Module -Name SqlServer -Scope CurrentUser -Force + } -ExitLabel: -GO -" -write-host "Granting MSI Privileges on $database DB to $user" -Invoke-Sqlcmd -ServerInstance "$sqlserver_name.database.windows.net,1433" -Database $database -AccessToken $token -query $sqlcommand \ No newline at end of file + $token=$(az account get-access-token --resource=https://sql.azuresynapse.net --query accessToken --output tsv) + if ((![string]::IsNullOrEmpty($tout.datafactory_name)) -and ($tout.synapse_sql_pool_name -ne 'Dummy') -and (![string]::IsNullOrEmpty($tout.synapse_sql_pool_name))) + { + # For a Spark user to read and write directly from Spark into or from a SQL pool, db_owner permission is required. + Invoke-Sqlcmd -ServerInstance "$($tout.synapse_workspace_name).sql.azuresynapse.net,1433" -Database $tout.synapse_sql_pool_name -AccessToken $token -query "IF NOT EXISTS (SELECT name + FROM [sys].[database_principals] + WHERE [type] = 'E' AND name = N'$($tout.datafactory_name)') BEGIN CREATE USER [$($tout.datafactory_name)] FROM EXTERNAL PROVIDER END" + Invoke-Sqlcmd -ServerInstance "$($tout.synapse_workspace_name).sql.azuresynapse.net,1433" -Database $tout.synapse_sql_pool_name -AccessToken $token -query "EXEC sp_addrolemember 'db_owner', '$($tout.datafactory_name)'" + } + } + + +} \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer3/database.tf b/solution/DeploymentV2/terraform_layer3/database.tf deleted file mode 100644 index 3d31eb19..00000000 --- a/solution/DeploymentV2/terraform_layer3/database.tf +++ /dev/null @@ -1,93 +0,0 @@ -resource "null_resource" "metadatadb_admins" { - for_each = (var.azure_sql_aad_administrators) - provisioner "local-exec" { - working_dir = path.module - command = "pwsh -file database.ps1 -user ${each.key} -sqlserver_name ${data.terraform_remote_state.layer2.outputs.sqlserver_name} -database ${data.terraform_remote_state.layer2.outputs.metadatadb_name}" - } - -} - -resource "null_resource" "stagingdb_admins" { - for_each = (var.azure_sql_aad_administrators) - provisioner "local-exec" { - working_dir = path.module - command = "pwsh -file database.ps1 -user ${each.key} -sqlserver_name ${data.terraform_remote_state.layer2.outputs.sqlserver_name} -database ${data.terraform_remote_state.layer2.outputs.stagingdb_name}" - } - -} - -resource "null_resource" "sampledb_admins" { - for_each = (var.azure_sql_aad_administrators) - provisioner "local-exec" { - working_dir = path.module - command = "pwsh -file database.ps1 -user ${each.key} -sqlserver_name ${data.terraform_remote_state.layer2.outputs.sqlserver_name} -database ${data.terraform_remote_state.layer2.outputs.sampledb_name}" - } - -} - -//Puview -resource "null_resource" "purview_access_sampledb" { - count = var.deploy_purview ? 1 : 0 - provisioner "local-exec" { - working_dir = path.module - command = "pwsh -file database.ps1 -user ${data.terraform_remote_state.layer2.outputs.purview_name} -sqlserver_name ${data.terraform_remote_state.layer2.outputs.sqlserver_name} -database ${data.terraform_remote_state.layer2.outputs.sampledb_name}" - } - -} - -resource "null_resource" "purview_access_stagingdb" { - count = var.deploy_purview ? 1 : 0 - provisioner "local-exec" { - working_dir = path.module - command = "pwsh -file database.ps1 -user ${data.terraform_remote_state.layer2.outputs.purview_name} -sqlserver_name ${data.terraform_remote_state.layer2.outputs.sqlserver_name} -database ${data.terraform_remote_state.layer2.outputs.stagingdb_name}" - } - -} - -//Puview SP -resource "null_resource" "purview_sp_access_sampledb" { - count = var.deploy_purview ? 1 : 0 - provisioner "local-exec" { - working_dir = path.module - command = "pwsh -file database.ps1 -user ${data.terraform_remote_state.layer2.outputs.purview_sp_name} -sqlserver_name ${data.terraform_remote_state.layer2.outputs.sqlserver_name} -database ${data.terraform_remote_state.layer2.outputs.sampledb_name}" - } - -} - -resource "null_resource" "purview_sp_access_stagingdb" { - count = var.deploy_purview ? 1 : 0 - provisioner "local-exec" { - working_dir = path.module - command = "pwsh -file database.ps1 -user ${data.terraform_remote_state.layer2.outputs.purview_sp_name} -sqlserver_name ${data.terraform_remote_state.layer2.outputs.sqlserver_name} -database ${data.terraform_remote_state.layer2.outputs.stagingdb_name}" - } - -} - -//Data Factory -resource "null_resource" "datafactory_access_sampledb" { - count = var.deploy_data_factory ? 1 : 0 - provisioner "local-exec" { - working_dir = path.module - command = "pwsh -file database.ps1 -user ${data.terraform_remote_state.layer2.outputs.datafactory_name} -sqlserver_name ${data.terraform_remote_state.layer2.outputs.sqlserver_name} -database ${data.terraform_remote_state.layer2.outputs.sampledb_name}" - } - -} - -resource "null_resource" "datafactory_access_stagingdb" { - count = var.deploy_data_factory ? 1 : 0 - provisioner "local-exec" { - working_dir = path.module - command = "pwsh -file database.ps1 -user ${data.terraform_remote_state.layer2.outputs.datafactory_name} -sqlserver_name ${data.terraform_remote_state.layer2.outputs.sqlserver_name} -database ${data.terraform_remote_state.layer2.outputs.stagingdb_name}" - } - -} - -resource "null_resource" "datafactory_access_metadatadb" { - count = var.deploy_data_factory ? 1 : 0 - provisioner "local-exec" { - working_dir = path.module - command = "pwsh -file database.ps1 -user ${data.terraform_remote_state.layer2.outputs.datafactory_name} -sqlserver_name ${data.terraform_remote_state.layer2.outputs.sqlserver_name} -database ${data.terraform_remote_state.layer2.outputs.metadatadb_name}" - } - -} - diff --git a/solution/DeploymentV2/terraform_layer3/database.tf.bak b/solution/DeploymentV2/terraform_layer3/database.tf.bak new file mode 100644 index 00000000..a94ae362 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer3/database.tf.bak @@ -0,0 +1,137 @@ +/*Metadatadb Admins*/ +resource "null_resource" "metadatadb_admins" { + /* triggers = { + always_run = "${timestamp()}" + } */ + + for_each = (var.azure_sql_aad_administrators) + provisioner "local-exec" { + command = ".'${path.module}/database.ps1' -user \"${each.key}\" -sqlserver_name \"${data.terraform_remote_state.layer2.outputs.sqlserver_name}\" -database \"${data.terraform_remote_state.layer2.outputs.metadatadb_name}\"" + interpreter = ["pwsh", "-Command"] + } + +} + +resource "null_resource" "metadatadb_admins_functionapp" { + /* triggers = { + always_run = "${timestamp()}" + } */ + provisioner "local-exec" { + command = ".'${path.module}/database.ps1' -user \"${data.terraform_remote_state.layer2.outputs.functionapp_name}\" -sqlserver_name \"${data.terraform_remote_state.layer2.outputs.sqlserver_name}\" -database \"${data.terraform_remote_state.layer2.outputs.metadatadb_name}\"" + interpreter = ["pwsh", "-Command"] + } +} + +resource "null_resource" "metadatadb_admins_webapp" { + /* triggers = { + always_run = "${timestamp()}" + } */ + provisioner "local-exec" { + command = ".'${path.module}/database.ps1' -user \"${data.terraform_remote_state.layer2.outputs.functionapp_name}\" -sqlserver_name \"${data.terraform_remote_state.layer2.outputs.sqlserver_name}\" -database \"${data.terraform_remote_state.layer2.outputs.metadatadb_name }\"" + interpreter = ["pwsh", "-Command"] + } +} + +resource "null_resource" "metadatadb_admins_datafactory" { + /* triggers = { + always_run = "${timestamp()}" + } */ + provisioner "local-exec" { + command = ".'${path.module}/database.ps1' -user \"${data.terraform_remote_state.layer2.outputs.functionapp_name}\" -sqlserver_name \"${data.terraform_remote_state.layer2.outputs.sqlserver_name}\" -database \"${data.terraform_remote_state.layer2.outputs.metadatadb_name }\"" + interpreter = ["pwsh", "-Command"] + } +} + +resource "null_resource" "stagingdb_admins" { + /* triggers = { + always_run = "${timestamp()}" + } */ + for_each = (var.azure_sql_aad_administrators) + provisioner "local-exec" { + command = ".'${path.module}/database.ps1' -user \"${each.key}\" -sqlserver_name \"${data.terraform_remote_state.layer2.outputs.sqlserver_name}\" -database \"${data.terraform_remote_state.layer2.outputs.stagingdb_name}\"" + interpreter = ["pwsh", "-Command"] + } + +} + +resource "null_resource" "sampledb_admins" { + /* triggers = { + always_run = "${timestamp()}" + } */ + for_each = (var.azure_sql_aad_administrators) + provisioner "local-exec" { + command = ".'${path.module}/database.ps1' -user \"${each.key}\" -sqlserver_name \"${data.terraform_remote_state.layer2.outputs.sqlserver_name}\" -database \"${data.terraform_remote_state.layer2.outputs.sampledb_name}\"" + interpreter = ["pwsh", "-Command"] + } + +} + +//Puview +resource "null_resource" "purview_access_sampledb" { + /* triggers = { + always_run = "${timestamp()}" + } */ + count = var.deploy_purview ? 1 : 0 + provisioner "local-exec" { + working_dir = path.module + command = "pwsh -file database.ps1 -user ${data.terraform_remote_state.layer2.outputs.purview_name} -sqlserver_name ${data.terraform_remote_state.layer2.outputs.sqlserver_name} -database ${data.terraform_remote_state.layer2.outputs.sampledb_name}" + } + +} + +resource "null_resource" "purview_access_stagingdb" { + count = var.deploy_purview ? 1 : 0 + provisioner "local-exec" { + working_dir = path.module + command = "pwsh -file database.ps1 -user ${data.terraform_remote_state.layer2.outputs.purview_name} -sqlserver_name ${data.terraform_remote_state.layer2.outputs.sqlserver_name} -database ${data.terraform_remote_state.layer2.outputs.stagingdb_name}" + } + +} + +//Puview SP +resource "null_resource" "purview_sp_access_sampledb" { + count = var.deploy_purview ? 1 : 0 + provisioner "local-exec" { + working_dir = path.module + command = "pwsh -file database.ps1 -user ${data.terraform_remote_state.layer2.outputs.purview_sp_name} -sqlserver_name ${data.terraform_remote_state.layer2.outputs.sqlserver_name} -database ${data.terraform_remote_state.layer2.outputs.sampledb_name}" + } + +} + +resource "null_resource" "purview_sp_access_stagingdb" { + count = var.deploy_purview ? 1 : 0 + provisioner "local-exec" { + working_dir = path.module + command = "pwsh -file database.ps1 -user ${data.terraform_remote_state.layer2.outputs.purview_sp_name} -sqlserver_name ${data.terraform_remote_state.layer2.outputs.sqlserver_name} -database ${data.terraform_remote_state.layer2.outputs.stagingdb_name}" + } + +} + +//Data Factory +resource "null_resource" "datafactory_access_sampledb" { + count = var.deploy_data_factory ? 1 : 0 + provisioner "local-exec" { + working_dir = path.module + command = "pwsh -file database.ps1 -user ${data.terraform_remote_state.layer2.outputs.datafactory_name} -sqlserver_name ${data.terraform_remote_state.layer2.outputs.sqlserver_name} -database ${data.terraform_remote_state.layer2.outputs.sampledb_name}" + } + +} + +resource "null_resource" "datafactory_access_stagingdb" { + count = var.deploy_data_factory ? 1 : 0 + provisioner "local-exec" { + working_dir = path.module + command = "pwsh -file database.ps1 -user ${data.terraform_remote_state.layer2.outputs.datafactory_name} -sqlserver_name ${data.terraform_remote_state.layer2.outputs.sqlserver_name} -database ${data.terraform_remote_state.layer2.outputs.stagingdb_name}" + } + +} + +resource "null_resource" "datafactory_access_metadatadb" { + count = var.deploy_data_factory ? 1 : 0 + provisioner "local-exec" { + working_dir = path.module + command = "pwsh -file database.ps1 -user ${data.terraform_remote_state.layer2.outputs.datafactory_name} -sqlserver_name ${data.terraform_remote_state.layer2.outputs.sqlserver_name} -database ${data.terraform_remote_state.layer2.outputs.metadatadb_name}" + } + +} + diff --git a/solution/DeploymentV2/terraform_layer3/redo_sql_users.ps1 b/solution/DeploymentV2/terraform_layer3/redo_sql_users.ps1 new file mode 100644 index 00000000..13f1773d --- /dev/null +++ b/solution/DeploymentV2/terraform_layer3/redo_sql_users.ps1 @@ -0,0 +1,11 @@ +$state = terraform state list +foreach ($l in $state) { + if($l.StartsWith("null_resource.")) + { + #$name = $l.replace("""",'\"') + $name = $l + $cmd = "terraform state rm '$name'" + Write-Host $cmd + $cmd | bash + } +} \ No newline at end of file diff --git a/solution/Synapse/Patterns/UploadTaskTypeMappings.ps1 b/solution/Synapse/Patterns/UploadTaskTypeMappings.ps1 index b33df834..dbf03a9b 100644 --- a/solution/Synapse/Patterns/UploadTaskTypeMappings.ps1 +++ b/solution/Synapse/Patterns/UploadTaskTypeMappings.ps1 @@ -26,6 +26,6 @@ foreach ($pattern in ($patterns.Folder | Sort-Object | Get-Unique)) Write-Information "_____________________________" $sqlcommand = (Get-Content $file -raw) $token=$(az account get-access-token --resource=https://database.windows.net --query accessToken --output tsv) - Invoke-Sqlcmd -ServerInstance "$sqlserver_name.database.windows.net,1433" -Database $metadatadb_name -AccessToken $token -query $sqlcommand + Invoke-Sqlcmd -ServerInstance "$($tout.sqlserver_name).database.windows.net,1433" -Database $metadatadb_name -AccessToken $token -query $sqlcommand } From 4f0b739391a0e329f5f68dc2c316a21663e507eb Mon Sep 17 00:00:00 2001 From: John Rampono Date: Sat, 30 Jul 2022 13:09:16 +0800 Subject: [PATCH 068/151] Fixed Issue with SQL Users still being added in layer 2 instead of layer3 --- .github/workflows/02.continuous-delivery.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/02.continuous-delivery.yml b/.github/workflows/02.continuous-delivery.yml index ee2f7ee3..50cb6665 100644 --- a/.github/workflows/02.continuous-delivery.yml +++ b/.github/workflows/02.continuous-delivery.yml @@ -90,7 +90,7 @@ jobs: - name: Terragrunt Install id: terragrunt_install - working-directory: ./solution/DeploymentV2/terraform/terraform_layer2 + working-directory: ./solution/DeploymentV2/terraform_layer2 run: | brew install terragrunt From 3d85afd0a32f6bc17faa1261db2a0a72b1afeb4b Mon Sep 17 00:00:00 2001 From: John Rampono Date: Sat, 30 Jul 2022 13:13:34 +0800 Subject: [PATCH 069/151] Fixed Issue with SQL Users still being added in layer 2 instead of layer3 --- .github/workflows/02.continuous-delivery.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/02.continuous-delivery.yml b/.github/workflows/02.continuous-delivery.yml index 50cb6665..30582b78 100644 --- a/.github/workflows/02.continuous-delivery.yml +++ b/.github/workflows/02.continuous-delivery.yml @@ -110,8 +110,8 @@ jobs: env: TF_VAR_ip_address : ${{steps.ip.outputs.ipv4}} run: | - git update-index --chmod=+x ./02_deploy.ps1 - ./02_deploy.ps1 + git update-index --chmod=+x ./02-deploy.ps1 + ./02-deploy.ps1 - name: Build and Publish Code Artefacts id: solution-deployment-code @@ -120,8 +120,8 @@ jobs: env: TF_VAR_ip_address : ${{steps.ip.outputs.ipv4}} run: | - git update-index --chmod=+x ./02_publish.ps1 - ./02_publish.ps1 + git update-index --chmod=+x ./02-publish.ps1 + ./02-publish.ps1 #PROD ENVIRONMENT deploy-to-env-two: From c28853872bcd2a756b5d030e4f808136da7fef2e Mon Sep 17 00:00:00 2001 From: John Rampono Date: Sat, 30 Jul 2022 13:48:14 +0800 Subject: [PATCH 070/151] Fixed Issue with SQL Users still being added in layer 2 instead of layer3 --- solution/DeploymentV2/terraform_layer2/synapse.tf | 3 +++ 1 file changed, 3 insertions(+) diff --git a/solution/DeploymentV2/terraform_layer2/synapse.tf b/solution/DeploymentV2/terraform_layer2/synapse.tf index b52245d3..51e69dc2 100644 --- a/solution/DeploymentV2/terraform_layer2/synapse.tf +++ b/solution/DeploymentV2/terraform_layer2/synapse.tf @@ -173,6 +173,9 @@ resource "azurerm_synapse_role_assignment" "synapse_admin_assignments" { synapse_workspace_id = azurerm_synapse_workspace.synapse[0].id role_name = "Synapse Administrator" principal_id = each.value + lifecycle { + ignore_changes = all + } depends_on = [ azurerm_synapse_firewall_rule.public_access, time_sleep.azurerm_synapse_firewall_rule_wait_30_seconds_cicd From f9031be2f17e5d8f92d17bc7ed72a5702f0c4c85 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Sun, 31 Jul 2022 07:17:39 +0800 Subject: [PATCH 071/151] modified: solution/DeploymentV2/environments/vars/common_vars_template.jsonnet modified: solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc --- .../environments/vars/common_vars_template.jsonnet | 4 ++++ .../environments/vars/staging/common_vars_values.jsonc | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet index 7487493b..b93e7af3 100644 --- a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet +++ b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet @@ -487,6 +487,10 @@ local SecretFileSensitiveVars = { // Object comprehension. + + + + diff --git a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc index 45c40efc..e673f6f1 100644 --- a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc +++ b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc @@ -6,7 +6,7 @@ "domain": "microsoft.com", "subscription_id": "035a1364-f00d-48e2-b582-4fe125905ee3", "prefix": "ads", - "resource_group_name": "adf1", + "resource_group_name": "adf2", "ip_address": "144.138.148.220", "ip_address2": "144.138.148.220", "tenant_id": "72f988bf-86f1-41af-91ab-2d7cd011db47", From 9e46414cb403ff9e3479a2024749f591b45c56d3 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Sun, 31 Jul 2022 09:07:58 +0800 Subject: [PATCH 072/151] Added Ability to Suppress Environment Variable population during gitDeploy --- .../vars/common_vars_template.jsonnet | 119 ++++++++++++------ 1 file changed, 79 insertions(+), 40 deletions(-) diff --git a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet index b93e7af3..06530d29 100644 --- a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet +++ b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet @@ -1,4 +1,4 @@ -function (featuretemplatename="full_deployment",environment="staging") +function (featuretemplatename="full_deployment",environment="staging", gitDeploy=false) local locals = { /*DONOTREMOVETHISCOMMENT:ENVS*/ 'admz' : import './admz/common_vars_values.jsonc', @@ -23,6 +23,7 @@ local featuretemplate = [ // Object comprehension. ["EnvVarName"]: "TF_VAR_" + sd.Name, ["HCLName"]: "", ["Value"]: sd.Value, + ["DoNotReplaceDuringAgentDeployment"]: false } for sd in featuretemplates[featuretemplatename] ]; @@ -43,28 +44,32 @@ local AllVariables = [ "EnvVarName": "WEB_APP_ADMIN_USER", "HCLName": "", "Value": locals[environment].WEB_APP_ADMIN_USER, - "Sensitive": false + "Sensitive": false, + "DoNotReplaceDuringAgentDeployment":false }, { "CICDSecretName": "ARM_SYNAPSE_WORKSPACE_NAME", "EnvVarName": "ARM_SYNAPSE_WORKSPACE_NAME", "HCLName": "", "Value": locals[environment].ARM_SYNAPSE_WORKSPACE_NAME, - "Sensitive": false + "Sensitive": false, + "DoNotReplaceDuringAgentDeployment":false }, { "CICDSecretName": "ARM_KEYVAULT_NAME", "EnvVarName": "keyVaultName", "HCLName": "", "Value": locals[environment].ARM_KEYVAULT_NAME, - "Sensitive": false + "Sensitive": false, + "DoNotReplaceDuringAgentDeployment":false }, { "CICDSecretName": "ARM_DATALAKE_NAME", "EnvVarName": "datalakeName", "HCLName": "", "Value": locals[environment].ARM_DATALAKE_NAME, - "Sensitive": false + "Sensitive": false, + "DoNotReplaceDuringAgentDeployment":false }, /* Required for Automated CICD Deployment @@ -74,42 +79,48 @@ local AllVariables = [ "EnvVarName": "ARM_CLIENT_ID", "HCLName": "", "Value": "#####", - "Sensitive": true + "Sensitive": true, + "DoNotReplaceDuringAgentDeployment":false }, { "CICDSecretName": "ARM_PAL_PARTNER_ID", "EnvVarName": "ARM_PAL_PARTNER_ID", "HCLName": "", "Value": locals[environment].ARM_PAL_PARTNER_ID, - "Sensitive": false + "Sensitive": false, + "DoNotReplaceDuringAgentDeployment":false }, { "CICDSecretName": "ARM_CLIENT_SECRET", "EnvVarName": "ARM_CLIENT_SECRET", "HCLName": "", "Value": "#####", - "Sensitive": true + "Sensitive": true, + "DoNotReplaceDuringAgentDeployment":false }, { "CICDSecretName": "ARM_SUBSCRIPTION_ID", "EnvVarName": "ARM_SUBSCRIPTION_ID", "HCLName": "", "Value": locals[environment].subscription_id, - "Sensitive": false + "Sensitive": false, + "DoNotReplaceDuringAgentDeployment":false }, { "CICDSecretName": "ARM_TENANT_ID", "EnvVarName": "ARM_TENANT_ID", "HCLName": "tenant_id", "Value": locals[environment].tenant_id, - "Sensitive": false + "Sensitive": false, + "DoNotReplaceDuringAgentDeployment":false }, { "CICDSecretName": "", "EnvVarName": "TF_VAR_tenant_id", "HCLName": "", "Value": locals[environment].tenant_id, - "Sensitive": false + "Sensitive": false, + "DoNotReplaceDuringAgentDeployment":false }, /* @@ -120,126 +131,144 @@ local AllVariables = [ "EnvVarName": "", "HCLName": "owner_tag", "Value": locals[environment].owner_tag, - "Sensitive": false + "Sensitive": false, + "DoNotReplaceDuringAgentDeployment":false }, { "CICDSecretName": "", "EnvVarName": "", "HCLName": "deployment_principal_layers1and3", "Value": locals[environment].deployment_principal_layers1and3, - "Sensitive": false + "Sensitive": false, + "DoNotReplaceDuringAgentDeployment":false }, { "CICDSecretName": "", "EnvVarName": "", "HCLName": "resource_location", "Value": locals[environment].resource_location, - "Sensitive": false + "Sensitive": false, + "DoNotReplaceDuringAgentDeployment":false }, { "CICDSecretName": "ENVIRONMENT_TAG", "EnvVarName": "TF_VAR_environment_tag", "HCLName": "environment_tag", "Value": locals[environment].environment_tag, - "Sensitive": false + "Sensitive": false, + "DoNotReplaceDuringAgentDeployment":false }, { "CICDSecretName": "ARM_DOMAIN", "EnvVarName": "TF_VAR_domain", "HCLName": "domain", "Value": locals[environment].domain, - "Sensitive": false + "Sensitive": false, + "DoNotReplaceDuringAgentDeployment":false }, { "CICDSecretName": "", "EnvVarName": "TF_VAR_subscription_id", "HCLName": "subscription_id", "Value": locals[environment].subscription_id, - "Sensitive": false + "Sensitive": false, + "DoNotReplaceDuringAgentDeployment":false }, { "CICDSecretName": "", "EnvVarName": "", "HCLName": "prefix", "Value": locals[environment].prefix, - "Sensitive": false + "Sensitive": false, + "DoNotReplaceDuringAgentDeployment":false }, { "CICDSecretName": "ARM_RESOURCE_GROUP_NAME", "EnvVarName": "TF_VAR_resource_group_name", "HCLName": "resource_group_name", "Value": locals[environment].resource_group_name, - "Sensitive": false + "Sensitive": false, + "DoNotReplaceDuringAgentDeployment":false }, { "CICDSecretName": "ARM_IP_ADDRESS", "EnvVarName": "TF_VAR_ip_address", "HCLName": "ip_address", "Value": locals[environment].ip_address, - "Sensitive": false + "Sensitive": false, + "DoNotReplaceDuringAgentDeployment":true }, { "CICDSecretName": "ARM_IP_ADDRESS2", "EnvVarName": "TF_VAR_ip_address2", "HCLName": "ip_address2", "Value": locals[environment].ip_address2, - "Sensitive": false + "Sensitive": false, + "DoNotReplaceDuringAgentDeployment":false }, { "CICDSecretName": "", "EnvVarName": "", "HCLName": "synapse_administrators", "Value": locals[environment].synapse_administrators, - "Sensitive": false + "Sensitive": false, + "DoNotReplaceDuringAgentDeployment":false }, { "CICDSecretName": "", "EnvVarName": "TF_VAR_azure_sql_aad_administrators", "HCLName": "azure_sql_aad_administrators", "Value": locals[environment].azure_sql_aad_administrators, - "Sensitive": false + "Sensitive": false, + "DoNotReplaceDuringAgentDeployment":false }, { "CICDSecretName": "", "EnvVarName": "", "HCLName": "resource_owners", "Value": locals[environment].resource_owners, - "Sensitive": false + "Sensitive": false, + "DoNotReplaceDuringAgentDeployment":false }, { "CICDSecretName": "ARM_FEATURE_TEMPLATE", "EnvVarName": "ARM_FEATURE_TEMPLATE", "HCLName": "", "Value": featuretemplatename, - "Sensitive": false + "Sensitive": false, + "DoNotReplaceDuringAgentDeployment":false }, { "CICDSecretName": "ARM_STORAGE_NAME", "EnvVarName": "TF_VAR_state_storage_account_name", "HCLName": "", "Value": locals[environment].resource_group_name + "state", - "Sensitive": false + "Sensitive": false, + "DoNotReplaceDuringAgentDeployment":false }, { "CICDSecretName": "ARM_SYNAPSE_PASSWORD", "EnvVarName": "TF_VAR_synapse_sql_password", "HCLName": "", "Value": "#####", - "Sensitive": true + "Sensitive": true, + "DoNotReplaceDuringAgentDeployment":false }, { "CICDSecretName": "ARM_JUMPHOST_PASSWORD", "EnvVarName": "TF_VAR_jumphost_password", "HCLName": "", "Value": "#####", - "Sensitive": true + "Sensitive": true, + "DoNotReplaceDuringAgentDeployment":false }, { "CICDSecretName": "WEB_APP_ADMIN_SECURITY_GROUP", "EnvVarName": "TF_VAR_web_app_admin_security_group", "HCLName": "", "Value": locals[environment].WEB_APP_ADMIN_SECURITY_GROUP, - "Sensitive": false + "Sensitive": false, + "DoNotReplaceDuringAgentDeployment":false }, /* Git Integration Set-Up @@ -249,70 +278,80 @@ local AllVariables = [ "EnvVarName": "TF_VAR_synapse_git_repository_name", "HCLName": "", "Value": locals[environment].GIT_REPOSITORY_NAME, - "Sensitive": false + "Sensitive": false, + "DoNotReplaceDuringAgentDeployment":false }, { "CICDSecretName": "GIT_SYNAPSE_REPOSITORY_BRANCH_NAME", "EnvVarName": "TF_VAR_synapse_git_repository_branch_name", "HCLName": "", "Value": locals[environment].GIT_SYNAPSE_REPOSITORY_BRANCH_NAME, - "Sensitive": false + "Sensitive": false, + "DoNotReplaceDuringAgentDeployment":false }, { "CICDSecretName": "GIT_PAT", "EnvVarName": "TF_VAR_synapse_git_pat", "HCLName": "", "Value": "#####", - "Sensitive": true + "Sensitive": true, + "DoNotReplaceDuringAgentDeployment":false }, { "CICDSecretName": "GIT_USER_NAME", "EnvVarName": "TF_VAR_synapse_git_user_name", "HCLName": "", "Value": locals[environment].GIT_USER_NAME, - "Sensitive": false + "Sensitive": false, + "DoNotReplaceDuringAgentDeployment":false }, { "CICDSecretName": "GIT_EMAIL_ADDRESS", "EnvVarName": "TF_VAR_synapse_git_email_address", "HCLName": "", "Value": locals[environment].GIT_EMAIL_ADDRESS, - "Sensitive": false + "Sensitive": false, + "DoNotReplaceDuringAgentDeployment":false }, { "CICDSecretName": "GIT_ADF_REPOSITORY_NAME", "EnvVarName": "TF_VAR_adf_git_repository_name", "HCLName": "", "Value": locals[environment].GIT_ADF_REPOSITORY_NAME, - "Sensitive": false + "Sensitive": false, + "DoNotReplaceDuringAgentDeployment":false }, { "CICDSecretName": "GIT_ADF_REPOSITORY_BRANCH_NAME", "EnvVarName": "TF_VAR_adf_git_repository_branch_name", "HCLName": "", "Value": locals[environment].GIT_ADF_REPOSITORY_BRANCH_NAME, - "Sensitive": false + "Sensitive": false, + "DoNotReplaceDuringAgentDeployment":false }, { "CICDSecretName": "GIT_ADF_PAT", "EnvVarName": "TF_VAR_adf_git_pat", "HCLName": "", "Value": "#####", - "Sensitive": true + "Sensitive": true, + "DoNotReplaceDuringAgentDeployment":false }, { "CICDSecretName": "GIT_ADF_USER_NAME", "EnvVarName": "TF_VAR_adf_git_user_name", "HCLName": "", "Value": locals[environment].GIT_ADF_USER_NAME, - "Sensitive": false + "Sensitive": false, + "DoNotReplaceDuringAgentDeployment":false }, { "CICDSecretName": "GIT_ADF_EMAIL_ADDRESS", "EnvVarName": "TF_VAR_adf_git_email_address", "HCLName": "", "Value": locals[environment].GIT_ADF_EMAIL_ADDRESS, - "Sensitive": false + "Sensitive": false, + "DoNotReplaceDuringAgentDeployment":false } ]+featuretemplate; @@ -327,7 +366,7 @@ local HCLVariables = { // Object comprehension. local EnvironmentVariables = { // Object comprehension. [sd.EnvVarName]: sd.Value for sd in AllVariables - if sd.EnvVarName != "" + if sd.EnvVarName != "" && ((gitDeploy == false) || (gitDeploy == true && sd.DoNotReplaceDuringAgentDeployment == false)) }; local SecretFileVars = { // Object comprehension. From bc777c3726baa54c936968356db0f4f81674c4f4 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Sun, 31 Jul 2022 09:17:01 +0800 Subject: [PATCH 073/151] Added gitDeploy to Jsonnet generation --- .../DeploymentV2/environments/vars/PreprocessEnvironment.ps1 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 b/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 index bdd597ac..3fb19681 100644 --- a/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 +++ b/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 @@ -50,7 +50,7 @@ Write-Host "Preparing Environment: $Environment Using $FeatureTemplate Template" $newfolder = "./../../bin/environments/$Environment/" $hiddenoutput = !(Test-Path $newfolder) ? ($F = New-Item -itemType Directory -Name $newfolder) : ($F = "") -(jsonnet "./common_vars_template.jsonnet" --tla-str featuretemplatename=$FeatureTemplate --tla-str environment=$Environment ) | Set-Content($newfolder +"/common_vars.json") +(jsonnet "./common_vars_template.jsonnet" --tla-str featuretemplatename=$FeatureTemplate --tla-str environment=$Environment --tla-str gitDeploy=$gitDeploy ) | Set-Content($newfolder +"/common_vars.json") $obj = Get-Content ($newfolder + "/common_vars.json") | ConvertFrom-Json foreach($t in ($obj.ForEnvVar | Get-Member | Where-Object {$_.MemberType -eq "NoteProperty"})) From b3ffc5a947ef2c0bb2b2b53b71da06c57c65c830 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Sun, 31 Jul 2022 09:30:32 +0800 Subject: [PATCH 074/151] CICD Debug --- solution/DeploymentV2/terraform_layer2/02-deploy.ps1 | 2 ++ 1 file changed, 2 insertions(+) diff --git a/solution/DeploymentV2/terraform_layer2/02-deploy.ps1 b/solution/DeploymentV2/terraform_layer2/02-deploy.ps1 index 9ad883c6..084aea20 100644 --- a/solution/DeploymentV2/terraform_layer2/02-deploy.ps1 +++ b/solution/DeploymentV2/terraform_layer2/02-deploy.ps1 @@ -33,8 +33,10 @@ import-Module ./../pwshmodules/Deploy_0_Prep.psm1 -force $PathToReturnTo = (Get-Location).Path $deploymentFolderPath = Convert-Path -Path ((Get-Location).tostring() + './../') $gitDeploy = ([System.Environment]::GetEnvironmentVariable('gitDeploy') -eq 'true') +Write-Host "gitDeploy: " & $gitDeploy.ToString() $skipTerraformDeployment = ([System.Environment]::GetEnvironmentVariable('skipTerraformDeployment') -eq 'true') $ipaddress = $env:TF_VAR_ip_address +Write-Host "ipaddress: " & $ipaddress.ToString() $ipaddress2 = $env:TF_VAR_ip_address2 PrepareDeployment -gitDeploy $gitDeploy -deploymentFolderPath $deploymentFolderPath -FeatureTemplate $FeatureTemplate -PathToReturnTo $PathToReturnTo From e2330cbc0731f8a1f80195098d0f6c9d33c428ba Mon Sep 17 00:00:00 2001 From: John Rampono Date: Sun, 31 Jul 2022 09:41:09 +0800 Subject: [PATCH 075/151] Removed Synapse Workspace Creator --- .../DeploymentV2/environments/vars/common_vars_schema.json | 2 +- .../environments/vars/staging/common_vars_values.jsonc | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/solution/DeploymentV2/environments/vars/common_vars_schema.json b/solution/DeploymentV2/environments/vars/common_vars_schema.json index 5355ed38..068996d6 100644 --- a/solution/DeploymentV2/environments/vars/common_vars_schema.json +++ b/solution/DeploymentV2/environments/vars/common_vars_schema.json @@ -92,7 +92,7 @@ "required": [], "properties": {}, "default": {}, - "description": "Object with a property for each user that is to be a Synapse Administrator. The property name is descriptive only and should be unique. The property value is the object_id of the user. Leave as empty object when not is use.", + "description": "Object with a property for each user that is to be a Synapse Administrator. The property name is descriptive only and should be unique. The property value is the object_id of the user. Leave as empty object when not is use. NOTE - do not include the Synpase Workspace creator in this list as it will be added automatically.", "examples": [ { "user_1": "5c736d19-4076-4a76-87f3-6fbfd77f007d", diff --git a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc index e673f6f1..5090821e 100644 --- a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc +++ b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc @@ -20,8 +20,8 @@ }, "azure_sql_aad_administrators": { "sql_aad_admin": "4c732d19-4076-4a76-87f3-6fbfd77f007d", - "jorampon@microsoft.com": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", - "AdsGoFastDeployerjkcgkaibkungm": "4c732d19-4076-4a76-87f3-6fbfd77f007d" + "jorampon@microsoft.com": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578"//, + //"AdsGoFastDeployerjkcgkaibkungm": "4c732d19-4076-4a76-87f3-6fbfd77f007d" }, "synapse_publishers": {}, "synapse_contributors": {}, From 9f071faca655e263686f75bc24ff86acdec4aeb9 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Sun, 31 Jul 2022 10:11:22 +0800 Subject: [PATCH 076/151] Removed Deploy Agent from Synapse Admins as it should be added auto --- .../environments/vars/staging/common_vars_values.jsonc | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc index 5090821e..b2cae593 100644 --- a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc +++ b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc @@ -16,7 +16,8 @@ "ccbdbba4-669c-48d6-86b8-75c9ab2ee578" ], "synapse_administrators": { - "deploy_agent": "4c732d19-4076-4a76-87f3-6fbfd77f007d" + //"deploy_agent": "4c732d19-4076-4a76-87f3-6fbfd77f007d"//, + "jorampon@microsoft.com": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578" }, "azure_sql_aad_administrators": { "sql_aad_admin": "4c732d19-4076-4a76-87f3-6fbfd77f007d", From 2ee5632002cd817b54161c48398aa3312c68d730 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Mon, 1 Aug 2022 11:18:42 +0800 Subject: [PATCH 077/151] CICD Test --- .../vars/common_vars_template.jsonnet | 20 +++++++++++++++-- .../vars/staging/common_vars_values.jsonc | 6 ++--- .../pwshmodules/Deploy_0_Prep.psm1 | 22 +++++++++++++++++++ .../terraform_layer2/02-deploy.ps1 | 2 -- .../DeploymentV2/terraform_layer3/layer2.tf | 4 ++-- 5 files changed, 45 insertions(+), 9 deletions(-) diff --git a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet index 06530d29..900abab3 100644 --- a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet +++ b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet @@ -1,4 +1,4 @@ -function (featuretemplatename="full_deployment",environment="staging", gitDeploy=false) +function (featuretemplatename="full_deployment",environment="staging", gitDeploy="False") local locals = { /*DONOTREMOVETHISCOMMENT:ENVS*/ 'admz' : import './admz/common_vars_values.jsonc', @@ -366,7 +366,7 @@ local HCLVariables = { // Object comprehension. local EnvironmentVariables = { // Object comprehension. [sd.EnvVarName]: sd.Value for sd in AllVariables - if sd.EnvVarName != "" && ((gitDeploy == false) || (gitDeploy == true && sd.DoNotReplaceDuringAgentDeployment == false)) + if sd.EnvVarName != "" && ((std.asciiLower(gitDeploy) == "false") || (std.asciiLower(gitDeploy) == "true" && sd.DoNotReplaceDuringAgentDeployment == false)) }; local SecretFileVars = { // Object comprehension. @@ -518,6 +518,22 @@ local SecretFileSensitiveVars = { // Object comprehension. + + + + + + + + + + + + + + + + diff --git a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc index b2cae593..d79e8adc 100644 --- a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc +++ b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc @@ -28,9 +28,9 @@ "synapse_contributors": {}, "WEB_APP_ADMIN_USER": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", "WEB_APP_ADMIN_SECURITY_GROUP": "#####", - "ARM_SYNAPSE_WORKSPACE_NAME": "adsstgsynwadslwra", - "ARM_KEYVAULT_NAME": "ads-stg-kv-ads-lwra", - "ARM_DATALAKE_NAME": "adsstgdlsadslwraadsl", + "ARM_SYNAPSE_WORKSPACE_NAME": "adsstgsynwadshqve", + "ARM_KEYVAULT_NAME": "ads-stg-kv-ads-hqve", + "ARM_DATALAKE_NAME": "adsstgdlsadshqveadsl", "ARM_PAL_PARTNER_ID": "0", "GIT_REPOSITORY_NAME": "#####", "GIT_SYNAPSE_REPOSITORY_BRANCH_NAME": "#####", diff --git a/solution/DeploymentV2/pwshmodules/Deploy_0_Prep.psm1 b/solution/DeploymentV2/pwshmodules/Deploy_0_Prep.psm1 index 46b1c6a5..a45fef9f 100644 --- a/solution/DeploymentV2/pwshmodules/Deploy_0_Prep.psm1 +++ b/solution/DeploymentV2/pwshmodules/Deploy_0_Prep.psm1 @@ -75,6 +75,28 @@ function PrepareDeployment ( [System.Environment]::SetEnvironmentVariable('TFenvironmentName',$environmentName) + try + { + $hiddenoutput = az keyvault network-rule add -g $env:TF_VAR_resource_group_name --name $env:keyVaultName --ip-address $env:TF_VAR_ip_address/32 + $hiddenoutput = az synapse workspace firewall-rule create --name AllowCICD --resource-group $env:TF_VAR_resource_group_name --start-ip-address $env:TF_VAR_ip_address --end-ip-address $env:TF_VAR_ip_address --workspace-name $env:ARM_SYNAPSE_WORKSPACE_NAME + $hiddenoutput = az storage account network-rule add --resource-group $env:TF_VAR_resource_group_name --account-name $env:datalakeName --ip-address $env:TF_VAR_ip_address + } + catch + { + Write-Warning 'Opening Firewalls for IP Address One Failed' + } + + try + { + $hiddenoutput = az keyvault network-rule add -g $env:TF_VAR_resource_group_name --name $env:keyVaultName --ip-address $env:TF_VAR_ip_address2/32 + $hiddenoutput = az synapse workspace firewall-rule create --name AllowCICD --resource-group $env:TF_VAR_resource_group_name --start-ip-address $env:TF_VAR_ip_address2 --end-ip-address $env:TF_VAR_ip_address2 --workspace-name $env:ARM_SYNAPSE_WORKSPACE_NAME + $hiddenoutput = az storage account network-rule add --resource-group $env:TF_VAR_resource_group_name --account-name $env:datalakeName --ip-address $env:TF_VAR_ip_address2 + } + catch + { + Write-Warning 'Opening Firewalls for IP Address Two Failed' + } + if([string]::IsNullOrEmpty($PathToReturnTo) -ne $true) { Write-Debug "Returning to $PathToReturnTo" diff --git a/solution/DeploymentV2/terraform_layer2/02-deploy.ps1 b/solution/DeploymentV2/terraform_layer2/02-deploy.ps1 index 084aea20..9ad883c6 100644 --- a/solution/DeploymentV2/terraform_layer2/02-deploy.ps1 +++ b/solution/DeploymentV2/terraform_layer2/02-deploy.ps1 @@ -33,10 +33,8 @@ import-Module ./../pwshmodules/Deploy_0_Prep.psm1 -force $PathToReturnTo = (Get-Location).Path $deploymentFolderPath = Convert-Path -Path ((Get-Location).tostring() + './../') $gitDeploy = ([System.Environment]::GetEnvironmentVariable('gitDeploy') -eq 'true') -Write-Host "gitDeploy: " & $gitDeploy.ToString() $skipTerraformDeployment = ([System.Environment]::GetEnvironmentVariable('skipTerraformDeployment') -eq 'true') $ipaddress = $env:TF_VAR_ip_address -Write-Host "ipaddress: " & $ipaddress.ToString() $ipaddress2 = $env:TF_VAR_ip_address2 PrepareDeployment -gitDeploy $gitDeploy -deploymentFolderPath $deploymentFolderPath -FeatureTemplate $FeatureTemplate -PathToReturnTo $PathToReturnTo diff --git a/solution/DeploymentV2/terraform_layer3/layer2.tf b/solution/DeploymentV2/terraform_layer3/layer2.tf index 8a4d6cb9..a4e1c95f 100644 --- a/solution/DeploymentV2/terraform_layer3/layer2.tf +++ b/solution/DeploymentV2/terraform_layer3/layer2.tf @@ -7,7 +7,7 @@ config = { container_name = "tstate" key = "terraform_layer2.tfstate" - resource_group_name = "adf1" - storage_account_name = "adf1state" + resource_group_name = "adf2" + storage_account_name = "adf2state" } } From b5cd3aeb029699dd58bf89534a41e17b5e02a6be Mon Sep 17 00:00:00 2001 From: John Rampono Date: Mon, 1 Aug 2022 18:02:29 +1000 Subject: [PATCH 078/151] Fixed issue with CICD IP Address --- solution/DeploymentV2/pwshmodules/Deploy_0_Prep.psm1 | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/solution/DeploymentV2/pwshmodules/Deploy_0_Prep.psm1 b/solution/DeploymentV2/pwshmodules/Deploy_0_Prep.psm1 index a45fef9f..41209fa7 100644 --- a/solution/DeploymentV2/pwshmodules/Deploy_0_Prep.psm1 +++ b/solution/DeploymentV2/pwshmodules/Deploy_0_Prep.psm1 @@ -78,7 +78,7 @@ function PrepareDeployment ( try { $hiddenoutput = az keyvault network-rule add -g $env:TF_VAR_resource_group_name --name $env:keyVaultName --ip-address $env:TF_VAR_ip_address/32 - $hiddenoutput = az synapse workspace firewall-rule create --name AllowCICD --resource-group $env:TF_VAR_resource_group_name --start-ip-address $env:TF_VAR_ip_address --end-ip-address $env:TF_VAR_ip_address --workspace-name $env:ARM_SYNAPSE_WORKSPACE_NAME + $hiddenoutput = az synapse workspace firewall-rule create --name AllowCICD_Agent --resource-group $env:TF_VAR_resource_group_name --start-ip-address $env:TF_VAR_ip_address --end-ip-address $env:TF_VAR_ip_address --workspace-name $env:ARM_SYNAPSE_WORKSPACE_NAME $hiddenoutput = az storage account network-rule add --resource-group $env:TF_VAR_resource_group_name --account-name $env:datalakeName --ip-address $env:TF_VAR_ip_address } catch @@ -89,7 +89,7 @@ function PrepareDeployment ( try { $hiddenoutput = az keyvault network-rule add -g $env:TF_VAR_resource_group_name --name $env:keyVaultName --ip-address $env:TF_VAR_ip_address2/32 - $hiddenoutput = az synapse workspace firewall-rule create --name AllowCICD --resource-group $env:TF_VAR_resource_group_name --start-ip-address $env:TF_VAR_ip_address2 --end-ip-address $env:TF_VAR_ip_address2 --workspace-name $env:ARM_SYNAPSE_WORKSPACE_NAME + $hiddenoutput = az synapse workspace firewall-rule create --name AllowCICD_User --resource-group $env:TF_VAR_resource_group_name --start-ip-address $env:TF_VAR_ip_address2 --end-ip-address $env:TF_VAR_ip_address2 --workspace-name $env:ARM_SYNAPSE_WORKSPACE_NAME $hiddenoutput = az storage account network-rule add --resource-group $env:TF_VAR_resource_group_name --account-name $env:datalakeName --ip-address $env:TF_VAR_ip_address2 } catch @@ -106,4 +106,4 @@ function PrepareDeployment ( Write-Debug "Path to return to is null" } -} \ No newline at end of file +} From 6110056be46fc9e658d3714e4962a1a4aa6d4c8f Mon Sep 17 00:00:00 2001 From: "jorampon@microsoft.com" Date: Tue, 2 Aug 2022 21:41:23 +1000 Subject: [PATCH 079/151] Changing SQL Admin to Group --- .../vars/PreprocessEnvironment.ps1 | 1 + .../terraform_layer3/database.ps1 | 26 +++++++++++++++++-- 2 files changed, 25 insertions(+), 2 deletions(-) diff --git a/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 b/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 index 3fb19681..64c5b6a9 100644 --- a/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 +++ b/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 @@ -57,6 +57,7 @@ foreach($t in ($obj.ForEnvVar | Get-Member | Where-Object {$_.MemberType -eq "No { $Name = $t.Name $Value = $obj.ForEnvVar[0].$Name + if($Value.GetType().Name -eq "Boolean") { $Value = $Value.ToString().ToLower() diff --git a/solution/DeploymentV2/terraform_layer3/database.ps1 b/solution/DeploymentV2/terraform_layer3/database.ps1 index 5decd6e8..47327759 100644 --- a/solution/DeploymentV2/terraform_layer3/database.ps1 +++ b/solution/DeploymentV2/terraform_layer3/database.ps1 @@ -17,6 +17,25 @@ if($PublishSQLLogins -eq $false) { } else { Write-Host "Configuring SQL Server Users" + + #Add this deployment principal as SQL Server Admin -- Need to revert afterwards + + $currentsqladmin = (az sql server ad-admin list -g $env:TF_VAR_resource_group_name --server-name $tout.sqlserver_name | ConvertFrom-Json) + + $currentAccount = (az account show | ConvertFrom-Json) + az sql server ad-admin create -g $env:TF_VAR_resource_group_name --server-name $tout.sqlserver_name --object-id $currentAccount.id --display-name $currentAccount.name + + #OpenFirewall + $myIp = $env:TF_VAR_ip_address + $myIp2 = $env:TF_VAR_ip_address2 + + if ($myIp -ne $null) { + $result = az sql server firewall-rule create -g $tout.resource_group_name -s $tout.sqlserver_name -n "CICDAgent" --start-ip-address $myIp --end-ip-address $myIp + } + if ($myIp2 -ne $null) { + $result = az sql server firewall-rule create -g $tout.resource_group_name -s $tout.sqlserver_name -n "CICDUser" --start-ip-address $myIp2 --end-ip-address $myIp2 + } + $databases = @($tout.stagingdb_name, $tout.sampledb_name, $tout.metadatadb_name) $aadUsers = @($tout.datafactory_name,$tout.functionapp_name, $tout.webapp_name ) @@ -60,8 +79,8 @@ else { GO " - - write-host "Granting MSI Privileges on $database DB to $user" + + write-host ("Granting MSI Privileges on Database: " + $database + "to " + $user) Invoke-Sqlcmd -ServerInstance "$($tout.sqlserver_name).database.windows.net,1433" -Database $database -AccessToken $token -query $sqlcommand } } @@ -73,6 +92,9 @@ else { write-host "Granting DDL Role on $database DB to $($tout.datafactory_name)" Invoke-Sqlcmd -ServerInstance "$($tout.sqlserver_name).database.windows.net,1433" -Database $database -AccessToken $token -query $ddlCommand } + + #Replace Original SQL Admin + az sql server ad-admin create -g $env:TF_VAR_resource_group_name --server-name "ads-stg-sql-ads-hqve" --object-id $currentsqladmin.sid --display-name $currentsqladmin.login } From 5732913e6505576e66606b69df46f8a5e25bff4b Mon Sep 17 00:00:00 2001 From: "jorampon@microsoft.com" Date: Tue, 2 Aug 2022 21:41:43 +1000 Subject: [PATCH 080/151] Changing SQL Admin To Group --- .github/workflows/02.continuous-delivery.yml | 2 +- .../vars/common_vars_template.jsonnet | 38 +++++++++++++++ .../vars/staging/common_vars_values.jsonc | 16 +++---- .../pwshmodules/Deploy_0_Prep.psm1 | 4 +- .../pwshmodules/Deploy_7_MetadataDB.psm1 | 4 +- solution/DeploymentV2/terraform/synapse.tf | 2 +- .../DeploymentV2/terraform_layer2/synapse.tf | 2 +- .../terraform_layer3/03-publish.ps1 | 48 +++++++++++++++++++ .../DeploymentV2/terraform_layer3/outputs.tf | 13 +++++ 9 files changed, 113 insertions(+), 16 deletions(-) create mode 100644 solution/DeploymentV2/terraform_layer3/03-publish.ps1 diff --git a/.github/workflows/02.continuous-delivery.yml b/.github/workflows/02.continuous-delivery.yml index 30582b78..d4b96aab 100644 --- a/.github/workflows/02.continuous-delivery.yml +++ b/.github/workflows/02.continuous-delivery.yml @@ -78,7 +78,7 @@ jobs: working-directory: ./solution/DeploymentV2/terraform run: | az keyvault network-rule add -g ${{ env.TF_VAR_resource_group_name }} --name ${{ env.keyVaultName }} --ip-address ${{ steps.ip.outputs.ipv4 }}/32 - az synapse workspace firewall-rule create --name AllowCICD --resource-group ${{ env.TF_VAR_resource_group_name }} --start-ip-address ${{ steps.ip.outputs.ipv4 }} --end-ip-address ${{ steps.ip.outputs.ipv4 }} --workspace-name ${{ env.ARM_SYNAPSE_WORKSPACE_NAME }} + az synapse workspace firewall-rule create --name CICDAgent --resource-group ${{ env.TF_VAR_resource_group_name }} --start-ip-address ${{ steps.ip.outputs.ipv4 }} --end-ip-address ${{ steps.ip.outputs.ipv4 }} --workspace-name ${{ env.ARM_SYNAPSE_WORKSPACE_NAME }} az storage account network-rule add --resource-group ${{ env.TF_VAR_resource_group_name }} --account-name ${{ env.datalakeName }} --ip-address ${{ steps.ip.outputs.ipv4 }} - name: Set PAL diff --git a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet index 900abab3..9859e407 100644 --- a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet +++ b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet @@ -531,6 +531,44 @@ local SecretFileSensitiveVars = { // Object comprehension. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc index d79e8adc..14c21fd9 100644 --- a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc +++ b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc @@ -6,9 +6,9 @@ "domain": "microsoft.com", "subscription_id": "035a1364-f00d-48e2-b582-4fe125905ee3", "prefix": "ads", - "resource_group_name": "adf2", + "resource_group_name": "gft9", "ip_address": "144.138.148.220", - "ip_address2": "144.138.148.220", + "ip_address2": "163.47.54.2", "tenant_id": "72f988bf-86f1-41af-91ab-2d7cd011db47", "deployment_principal_layers1and3": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", "resource_owners": [ @@ -16,21 +16,19 @@ "ccbdbba4-669c-48d6-86b8-75c9ab2ee578" ], "synapse_administrators": { - //"deploy_agent": "4c732d19-4076-4a76-87f3-6fbfd77f007d"//, "jorampon@microsoft.com": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578" }, "azure_sql_aad_administrators": { - "sql_aad_admin": "4c732d19-4076-4a76-87f3-6fbfd77f007d", - "jorampon@microsoft.com": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578"//, - //"AdsGoFastDeployerjkcgkaibkungm": "4c732d19-4076-4a76-87f3-6fbfd77f007d" + "sql_aad_admin": "6f467924-4d92-40e3-b348-b3154d5cd437", + "jorampon@microsoft.com": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578" }, "synapse_publishers": {}, "synapse_contributors": {}, "WEB_APP_ADMIN_USER": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", "WEB_APP_ADMIN_SECURITY_GROUP": "#####", - "ARM_SYNAPSE_WORKSPACE_NAME": "adsstgsynwadshqve", - "ARM_KEYVAULT_NAME": "ads-stg-kv-ads-hqve", - "ARM_DATALAKE_NAME": "adsstgdlsadshqveadsl", + "ARM_SYNAPSE_WORKSPACE_NAME": "adsstgsynwadspmu3", + "ARM_KEYVAULT_NAME": "ads-stg-kv-ads-pmu3", + "ARM_DATALAKE_NAME": "adsstgdlsadspmu3adsl", "ARM_PAL_PARTNER_ID": "0", "GIT_REPOSITORY_NAME": "#####", "GIT_SYNAPSE_REPOSITORY_BRANCH_NAME": "#####", diff --git a/solution/DeploymentV2/pwshmodules/Deploy_0_Prep.psm1 b/solution/DeploymentV2/pwshmodules/Deploy_0_Prep.psm1 index 41209fa7..6aabb04b 100644 --- a/solution/DeploymentV2/pwshmodules/Deploy_0_Prep.psm1 +++ b/solution/DeploymentV2/pwshmodules/Deploy_0_Prep.psm1 @@ -78,7 +78,7 @@ function PrepareDeployment ( try { $hiddenoutput = az keyvault network-rule add -g $env:TF_VAR_resource_group_name --name $env:keyVaultName --ip-address $env:TF_VAR_ip_address/32 - $hiddenoutput = az synapse workspace firewall-rule create --name AllowCICD_Agent --resource-group $env:TF_VAR_resource_group_name --start-ip-address $env:TF_VAR_ip_address --end-ip-address $env:TF_VAR_ip_address --workspace-name $env:ARM_SYNAPSE_WORKSPACE_NAME + $hiddenoutput = az synapse workspace firewall-rule create --name CICDAgent --resource-group $env:TF_VAR_resource_group_name --start-ip-address $env:TF_VAR_ip_address --end-ip-address $env:TF_VAR_ip_address --workspace-name $env:ARM_SYNAPSE_WORKSPACE_NAME $hiddenoutput = az storage account network-rule add --resource-group $env:TF_VAR_resource_group_name --account-name $env:datalakeName --ip-address $env:TF_VAR_ip_address } catch @@ -89,7 +89,7 @@ function PrepareDeployment ( try { $hiddenoutput = az keyvault network-rule add -g $env:TF_VAR_resource_group_name --name $env:keyVaultName --ip-address $env:TF_VAR_ip_address2/32 - $hiddenoutput = az synapse workspace firewall-rule create --name AllowCICD_User --resource-group $env:TF_VAR_resource_group_name --start-ip-address $env:TF_VAR_ip_address2 --end-ip-address $env:TF_VAR_ip_address2 --workspace-name $env:ARM_SYNAPSE_WORKSPACE_NAME + $hiddenoutput = az synapse workspace firewall-rule create --name CICDUser --resource-group $env:TF_VAR_resource_group_name --start-ip-address $env:TF_VAR_ip_address2 --end-ip-address $env:TF_VAR_ip_address2 --workspace-name $env:ARM_SYNAPSE_WORKSPACE_NAME $hiddenoutput = az storage account network-rule add --resource-group $env:TF_VAR_resource_group_name --account-name $env:datalakeName --ip-address $env:TF_VAR_ip_address2 } catch diff --git a/solution/DeploymentV2/pwshmodules/Deploy_7_MetadataDB.psm1 b/solution/DeploymentV2/pwshmodules/Deploy_7_MetadataDB.psm1 index 6082c1df..a871f9f8 100644 --- a/solution/DeploymentV2/pwshmodules/Deploy_7_MetadataDB.psm1 +++ b/solution/DeploymentV2/pwshmodules/Deploy_7_MetadataDB.psm1 @@ -31,10 +31,10 @@ function DeployMataDataDB ( $myIp2 = $env:TF_VAR_ip_address2 if ($myIp -ne $null) { - $result = az sql server firewall-rule create -g $tout.resource_group_name -s $tout.sqlserver_name -n "DeploymentAgent" --start-ip-address $myIp --end-ip-address $myIp + $result = az sql server firewall-rule create -g $tout.resource_group_name -s $tout.sqlserver_name -n "CICDAgent" --start-ip-address $myIp --end-ip-address $myIp } if ($myIp2 -ne $null) { - $result = az sql server firewall-rule create -g $tout.resource_group_name -s $tout.sqlserver_name -n "DeploymentUser" --start-ip-address $myIp2 --end-ip-address $myIp2 + $result = az sql server firewall-rule create -g $tout.resource_group_name -s $tout.sqlserver_name -n "CICDUser" --start-ip-address $myIp2 --end-ip-address $myIp2 } #Allow Azure services and resources to access this server $result = az sql server firewall-rule create -g $tout.resource_group_name -s $tout.sqlserver_name -n "Azure" --start-ip-address 0.0.0.0 --end-ip-address 0.0.0.0 diff --git a/solution/DeploymentV2/terraform/synapse.tf b/solution/DeploymentV2/terraform/synapse.tf index b1446472..87c0759b 100644 --- a/solution/DeploymentV2/terraform/synapse.tf +++ b/solution/DeploymentV2/terraform/synapse.tf @@ -122,7 +122,7 @@ resource "azurerm_synapse_spark_pool" "synapse_spark_pool" { # -------------------------------------------------------------------------------------------------------------------- resource "azurerm_synapse_firewall_rule" "cicd" { count = var.deploy_adls && var.deploy_synapse ? 1 : 0 - name = "AllowGitHub" + name = "CICDAgent" synapse_workspace_id = azurerm_synapse_workspace.synapse[0].id start_ip_address = var.ip_address end_ip_address = var.ip_address diff --git a/solution/DeploymentV2/terraform_layer2/synapse.tf b/solution/DeploymentV2/terraform_layer2/synapse.tf index 51e69dc2..6e56203a 100644 --- a/solution/DeploymentV2/terraform_layer2/synapse.tf +++ b/solution/DeploymentV2/terraform_layer2/synapse.tf @@ -122,7 +122,7 @@ resource "azurerm_synapse_spark_pool" "synapse_spark_pool" { # -------------------------------------------------------------------------------------------------------------------- resource "azurerm_synapse_firewall_rule" "cicd" { count = var.deploy_adls && var.deploy_synapse ? 1 : 0 - name = "AllowGitHub" + name = "CICDAgent" synapse_workspace_id = azurerm_synapse_workspace.synapse[0].id start_ip_address = var.ip_address end_ip_address = var.ip_address diff --git a/solution/DeploymentV2/terraform_layer3/03-publish.ps1 b/solution/DeploymentV2/terraform_layer3/03-publish.ps1 new file mode 100644 index 00000000..d75e407c --- /dev/null +++ b/solution/DeploymentV2/terraform_layer3/03-publish.ps1 @@ -0,0 +1,48 @@ +#---------------------------------------------------------------------------------------------------------------- +# You must be logged into the Azure CLI to run this script +#---------------------------------------------------------------------------------------------------------------- +# This script will: +# - Deploy the required AAD objects (Application Registrations etc) +# +# This is intended for creating a once off deployment from your development machine. You should setup the +# GitHub actions for your long term prod/non-prod environments +# +# Intructions +# - Ensure that you have run the Prepare.ps1 script first. This will prepare your azure subscription for deployment +# - Ensure that you have run az login and az account set +# - Ensure you have Contributor Access to the subscription you are deploying to. +# - Ensure you have Application.ReadWrite.OwnedBy on the Azure AD. +# - Run this script +# +# You can run this script multiple times if needed. +# +#---------------------------------------------------------------------------------------------------------------- +param ( + [Parameter(Mandatory=$false)] + [string]$FeatureTemplate="basic_deployment" +) + +#------------------------------------------------------------------------------------------------------------ +# Module Imports #Mandatory +#------------------------------------------------------------------------------------------------------------ +import-Module ./../pwshmodules/GatherOutputsFromTerraform.psm1 -force +import-Module ./../pwshmodules/Deploy_0_Prep.psm1 -force +#------------------------------------------------------------------------------------------------------------ +# Preparation #Mandatory +#------------------------------------------------------------------------------------------------------------ +$PathToReturnTo = (Get-Location).Path +$deploymentFolderPath = Convert-Path -Path ((Get-Location).tostring() + './../') + +$gitDeploy = ([System.Environment]::GetEnvironmentVariable('gitDeploy') -eq 'true') +$skipTerraformDeployment = ([System.Environment]::GetEnvironmentVariable('skipTerraformDeployment') -eq 'true') +$ipaddress = $env:TF_VAR_ip_address +$ipaddress2 = $env:TF_VAR_ip_address2 + +PrepareDeployment -gitDeploy $gitDeploy -deploymentFolderPath $deploymentFolderPath -FeatureTemplate $FeatureTemplate -PathToReturnTo $PathToReturnTo + +#------------------------------------------------------------------------------------------------------------ +# Get Outputs #Mandatory +#------------------------------------------------------------------------------------------------------------ +$tout = GatherOutputsFromTerraform -TerraformFolderPath $PathToReturnTo + +./database.ps1 -tout $tout -deploymentFolderPath $deploymentFolderPath -PathToReturnTo $PathToReturnTo -PublishSQLLogins $true \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer3/outputs.tf b/solution/DeploymentV2/terraform_layer3/outputs.tf index 723810cf..81e73ca4 100644 --- a/solution/DeploymentV2/terraform_layer3/outputs.tf +++ b/solution/DeploymentV2/terraform_layer3/outputs.tf @@ -6,6 +6,19 @@ output "resource_group_name" { value = var.resource_group_name } +output "sqlserver_name" { + value = data.terraform_remote_state.layer2.outputs.sqlserver_name +} +output "stagingdb_name" { + value = data.terraform_remote_state.layer2.outputs.stagingdb_name +} +output "sampledb_name" { + value = data.terraform_remote_state.layer2.outputs.sampledb_name +} +output "metadatadb_name" { + value = data.terraform_remote_state.layer2.outputs.metadatadb_name +} + /*Variables for Naming Module*/ output "naming_unique_seed" { value = data.terraform_remote_state.layer2.outputs.naming_unique_seed From cc5e78171b63dba89331ed7a823485fe8792ca93 Mon Sep 17 00:00:00 2001 From: "jorampon@microsoft.com" Date: Wed, 3 Aug 2022 07:04:02 +1000 Subject: [PATCH 081/151] saved gh secret command --- solution/DeploymentV2/environments/Sample.env | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/solution/DeploymentV2/environments/Sample.env b/solution/DeploymentV2/environments/Sample.env index dc17a9bc..4d9bbbf4 100644 --- a/solution/DeploymentV2/environments/Sample.env +++ b/solution/DeploymentV2/environments/Sample.env @@ -20,4 +20,6 @@ GIT_USER_NAME="Usernames of git user used for publishing artefacts. Only used i GIT_EMAIL_ADDRESS="Email address of git user used for publishing artefacts. Only used if synapse_git_toggle_integration or adf_git_toggle_integration is true in the hcl file." GIT_SYNAPSE_REPOSITORY_BRANCH_NAME="Git Branch Name associated with the branch that will be used to publish Synapse artefacts. Only used if synapse_git_toggle_integration is true in the hcl file." GIT_ADF_REPOSITORY_BRANCH_NAME="Git Branch Name associated with the branch that will be used to publish ADF artefacts. Only used if adf_git_toggle_integration is true in the hcl file." -ARM_PAL_PARTNER_ID="ID of Implementation Partner for PAL purposes" \ No newline at end of file +ARM_PAL_PARTNER_ID="ID of Implementation Partner for PAL purposes" + +# gh secret set -f Sample.env -e Development \ No newline at end of file From 592b58d7775d4879dcd6d86a10f328f8f79c2c27 Mon Sep 17 00:00:00 2001 From: "jorampon@microsoft.com" Date: Wed, 3 Aug 2022 11:50:20 +1000 Subject: [PATCH 082/151] Added uat --- ...yml => 02.continuous-delivery-staging.yml} | 0 .../workflows/02.continuous-delivery-uat.yml | 180 ++++++++++++++++++ solution/DeploymentV2/Prepare.ps1 | 9 +- .../environments/vars/common_vars_schema.json | 2 +- .../vars/common_vars_template.jsonnet | 41 ++++ .../vars/staging/common_vars_values.jsonc | 3 +- .../vars/uat/common_vars_values.jsonc | 42 ++++ .../terraform_layer1/vars/uat/terragrunt.hcl | 38 ++++ .../terraform_layer2/vars/uat/terragrunt.hcl | 78 ++++++++ .../terraform_layer3/database.ps1 | 37 +++- .../DeploymentV2/terraform_layer3/layer2.tf | 4 +- .../DeploymentV2/terraform_layer3/outputs.tf | 10 + .../terraform_layer3/vars/uat/terragrunt.hcl | 56 ++++++ 13 files changed, 486 insertions(+), 14 deletions(-) rename .github/workflows/{02.continuous-delivery.yml => 02.continuous-delivery-staging.yml} (100%) create mode 100644 .github/workflows/02.continuous-delivery-uat.yml create mode 100644 solution/DeploymentV2/environments/vars/uat/common_vars_values.jsonc create mode 100644 solution/DeploymentV2/terraform_layer1/vars/uat/terragrunt.hcl create mode 100644 solution/DeploymentV2/terraform_layer2/vars/uat/terragrunt.hcl create mode 100644 solution/DeploymentV2/terraform_layer3/vars/uat/terragrunt.hcl diff --git a/.github/workflows/02.continuous-delivery.yml b/.github/workflows/02.continuous-delivery-staging.yml similarity index 100% rename from .github/workflows/02.continuous-delivery.yml rename to .github/workflows/02.continuous-delivery-staging.yml diff --git a/.github/workflows/02.continuous-delivery-uat.yml b/.github/workflows/02.continuous-delivery-uat.yml new file mode 100644 index 00000000..be6a73f8 --- /dev/null +++ b/.github/workflows/02.continuous-delivery-uat.yml @@ -0,0 +1,180 @@ +name: Continuous Delivery + +on: + workflow_dispatch: + push: + branches: feature-1.0.4 + +jobs: + deploy-to-env-one: + name: Deploy to Environment One + concurrency: terraform + environment: + name: development + env: + # This determines the location of the .hcl file that will be used + environmentName: uat + gitDeploy : true + skipTerraformDeployment: false + featureTemplate: ${{ secrets.ARM_FEATURE_TEMPLATE }} + WEB_APP_ADMIN_USER: ${{ secrets.WEB_APP_ADMIN_USER }} + keyVaultName: ${{ secrets.ARM_KEYVAULT_NAME }} + ARM_SYNAPSE_WORKSPACE_NAME: ${{ secrets.ARM_SYNAPSE_WORKSPACE_NAME }} + datalakeName: ${{ secrets.ARM_DATALAKE_NAME }} + # Required for Terraform + ARM_CLIENT_ID: ${{ secrets.ARM_CLIENT_ID }} + ARM_PAL_PARTNER_ID: ${{ secrets.ARM_PAL_PARTNER_ID }} + ARM_CLIENT_SECRET: ${{ secrets.ARM_CLIENT_SECRET }} + ARM_SUBSCRIPTION_ID: ${{ secrets.ARM_SUBSCRIPTION_ID }} + ARM_TENANT_ID: ${{ secrets.ARM_TENANT_ID }} + # Customizing Terraform vars + TF_VAR_ip_address2 : ${{ secrets.ARM_IP_ADDRESS2 }} + TF_VAR_domain : ${{ secrets.ARM_DOMAIN }} + TF_VAR_subscription_id : ${{ secrets.ARM_SUBSCRIPTION_ID }} + TF_VAR_tenant_id : ${{ secrets.ARM_TENANT_ID }} + TF_VAR_environment_tag : ${{ secrets.ENVIRONMENT_TAG }} + TF_VAR_resource_group_name : ${{ secrets.ARM_RESOURCE_GROUP_NAME }} + TF_VAR_state_storage_account_name : ${{ secrets.ARM_STORAGE_NAME }} + TF_VAR_jumphost_password : ${{secrets.ARM_JUMPHOST_PASSWORD}} + TF_VAR_synapse_sql_password : ${{secrets.ARM_SYNAPSE_PASSWORD}} + TF_VAR_web_app_admin_security_group : ${{secrets.WEB_APP_ADMIN_SECURITY_GROUP}} + # GIT Integration set up + TF_VAR_synapse_git_repository_name : ${{secrets.GIT_REPOSITORY_NAME}} + TF_VAR_synapse_git_repository_branch_name : ${{secrets.GIT_SYNAPSE_REPOSITORY_BRANCH_NAME}} + TF_VAR_synapse_git_pat : ${{secrets.GIT_PAT}} + TF_VAR_synapse_git_user_name : ${{secrets.GIT_USER_NAME}} + TF_VAR_synapse_git_email_address : ${{secrets.GIT_EMAIL_ADDRESS}} + TF_VAR_adf_git_repository_name : ${{secrets.GIT_REPOSITORY_NAME}} + TF_VAR_adf_git_repository_branch_name : ${{secrets.GIT_ADF_REPOSITORY_BRANCH_NAME}} + TF_VAR_adf_git_pat : ${{secrets.GIT_PAT}} + TF_VAR_adf_git_user_name : ${{secrets.GIT_USER_NAME}} + TF_VAR_adf_git_email_address : ${{secrets.GIT_EMAIL_ADDRESS}} + + #TF_LOG : TRACE + + runs-on: ubuntu-latest + steps: + - name: PrintInfo + run: | + echo "Deploying to Resource Group: ${{ env.TF_VAR_resource_group_name }} " + echo "echo Hcl file name: ${{ env.environmentName}} " + + - name: Checkout + uses: actions/checkout@v3.0.0 + + - name: Get public IP + id: ip + uses: haythem/public-ip@v1.2 + + - name: Login via Az module + uses: azure/login@v1 + with: + creds: '{"clientId":"${{ secrets.ARM_CLIENT_ID }}","clientSecret":"${{ secrets.ARM_CLIENT_SECRET }}","subscriptionId":"${{ secrets.ARM_SUBSCRIPTION_ID }}","tenantId":"${{ secrets.ARM_TENANT_ID }}"}' + enable-AzPSSession: true + + - name: Open Firewalls for Agent + id: open_firewalls + continue-on-error: true + working-directory: ./solution/DeploymentV2/terraform + run: | + az keyvault network-rule add -g ${{ env.TF_VAR_resource_group_name }} --name ${{ env.keyVaultName }} --ip-address ${{ steps.ip.outputs.ipv4 }}/32 + az synapse workspace firewall-rule create --name CICDAgent --resource-group ${{ env.TF_VAR_resource_group_name }} --start-ip-address ${{ steps.ip.outputs.ipv4 }} --end-ip-address ${{ steps.ip.outputs.ipv4 }} --workspace-name ${{ env.ARM_SYNAPSE_WORKSPACE_NAME }} + az storage account network-rule add --resource-group ${{ env.TF_VAR_resource_group_name }} --account-name ${{ env.datalakeName }} --ip-address ${{ steps.ip.outputs.ipv4 }} + + - name: Set PAL + id: set_pal + continue-on-error: true + run: | + az extension add --name managementpartner + az managementpartner update --partner-id ${{ secrets.ARM_PAL_PARTNER_ID }} || az managementpartner create --partner-id ${{ secrets.ARM_PAL_PARTNER_ID }} + + - name: Terragrunt Install + id: terragrunt_install + working-directory: ./solution/DeploymentV2/terraform_layer2 + run: | + brew install terragrunt + + - name: Install Jsonnet + id: jsonnet-install + working-directory: ./solution/DeploymentV2/ + env: + TF_VAR_ip_address : ${{steps.ip.outputs.ipv4}} + run: | + wget https://github.com/google/go-jsonnet/releases/download/v0.17.0/jsonnet-go_0.17.0_linux_amd64.deb + sudo dpkg -i jsonnet-go_0.17.0_linux_amd64.deb + + - name: Deploy Solution IAC + id: solution-deployment-iac + working-directory: ./solution/DeploymentV2/terraform_layer2 + shell: pwsh + env: + TF_VAR_ip_address : ${{steps.ip.outputs.ipv4}} + run: | + git update-index --chmod=+x ./02-deploy.ps1 + ./02-deploy.ps1 + + - name: Build and Publish Code Artefacts + id: solution-deployment-code + working-directory: ./solution/DeploymentV2/terraform_layer2 + shell: pwsh + env: + TF_VAR_ip_address : ${{steps.ip.outputs.ipv4}} + run: | + git update-index --chmod=+x ./02-publish.ps1 + ./02-publish.ps1 + + #PROD ENVIRONMENT + deploy-to-env-two: + name: Deploy to Environment Two + concurrency: terraform + needs: [deploy-to-env-one] + environment: + name: Prod + env: + environmentName: production + gitDeploy : true + skipTerraformDeployment: false + WEB_APP_ADMIN_USER: ${{ secrets.WEB_APP_ADMIN_USER }} + keyVaultName: ${{ secrets.ARM_KEYVAULT_NAME }} + ARM_SYNAPSE_WORKSPACE_NAME: ${{ secrets.ARM_SYNAPSE_WORKSPACE_NAME }} + datalakeName: ${{ secrets.ARM_DATALAKE_NAME }} + # Required for Terraform + ARM_CLIENT_ID: ${{ secrets.ARM_CLIENT_ID }} + ARM_PAL_PARTNER_ID: ${{ secrets.ARM_PAL_PARTNER_ID }} + ARM_CLIENT_SECRET: ${{ secrets.ARM_CLIENT_SECRET }} + ARM_SUBSCRIPTION_ID: ${{ secrets.ARM_SUBSCRIPTION_ID }} + ARM_TENANT_ID: ${{ secrets.ARM_TENANT_ID }} + # Customizing Terraform vars + TF_VAR_domain : ${{ secrets.ARM_DOMAIN }} + TF_VAR_subscription_id : ${{ secrets.ARM_SUBSCRIPTION_ID }} + TF_VAR_tenant_id : ${{ secrets.ARM_TENANT_ID }} + TF_VAR_environment_tag : ${{ secrets.ENVIRONMENT_TAG }} + TF_VAR_resource_group_name : ${{ secrets.ARM_RESOURCE_GROUP_NAME }} + TF_VAR_state_storage_account_name : ${{ secrets.ARM_STORAGE_NAME }} + TF_VAR_jumphost_password : ${{secrets.ARM_JUMPHOST_PASSWORD}} + TF_VAR_synapse_sql_password : ${{secrets.ARM_SYNAPSE_PASSWORD}} + TF_VAR_web_app_admin_security_group : ${{secrets.WEB_APP_ADMIN_SECURITY_GROUP}} + # GIT Integration set up + TF_VAR_synapse_git_repository_name : ${{secrets.GIT_REPOSITORY_NAME}} + TF_VAR_synapse_git_repository_branch_name : ${{secrets.GIT_SYNAPSE_REPOSITORY_BRANCH_NAME}} + TF_VAR_synapse_git_pat : ${{secrets.GIT_PAT}} + TF_VAR_synapse_git_user_name : ${{secrets.GIT_USER_NAME}} + TF_VAR_synapse_git_email_address : ${{secrets.GIT_EMAIL_ADDRESS}} + TF_VAR_adf_git_repository_name : ${{secrets.GIT_REPOSITORY_NAME}} + TF_VAR_adf_git_repository_branch_name : ${{secrets.GIT_ADF_REPOSITORY_BRANCH_NAME}} + TF_VAR_adf_git_pat : ${{secrets.GIT_PAT}} + TF_VAR_adf_git_user_name : ${{secrets.GIT_USER_NAME}} + TF_VAR_adf_git_email_address : ${{secrets.GIT_EMAIL_ADDRESS}} + + #PROD ENVIRONMENT + #TF_LOG : TRACE + + runs-on: ubuntu-latest + steps: + + - name: Checkout + uses: actions/checkout@v3.0.0 + + - name: Get public IP + id: ip + uses: haythem/public-ip@v1.2 diff --git a/solution/DeploymentV2/Prepare.ps1 b/solution/DeploymentV2/Prepare.ps1 index c81fbde8..66387a80 100644 --- a/solution/DeploymentV2/Prepare.ps1 +++ b/solution/DeploymentV2/Prepare.ps1 @@ -224,8 +224,8 @@ else $common_vars_values.subscription_id = $env:TF_VAR_subscription_id $common_vars_values.ip_address2 = $env:TF_VAR_ip_address $common_vars_values.tenant_id = $env:TF_VAR_tenant_id - $common_vars_values.WEB_APP_ADMIN_USER = (az ad signed-in-user show --query id -o tsv) - $common_vars_values.deployment_principal_layers1and3 = $common_vars_values.WEB_APP_ADMIN_USER + $common_vars_values.WEB_APP_ADMIN_USER = (az ad signed-in-user show | ConvertFrom-Json).id + $common_vars_values.deployment_principal_layers1and3 = (az ad signed-in-user show | ConvertFrom-Json).id $foundUser = $false foreach($u in $common_vars_values.synapse_administrators) @@ -237,8 +237,9 @@ else } } if($foundUser -eq $true) - { - $common_vars_values.synapse_administrators.Deploy_User = $common_vars_values.WEB_APP_ADMIN_USER + { + $userPrincipalName = (az ad signed-in-user show | ConvertFrom-Json).userPrincipalName + $common_vars_values.synapse_administrators.$userPrincipalName = (az ad signed-in-user show | ConvertFrom-Json).id } $common_vars_values | Convertto-Json -Depth 10 | Set-Content ./environments/vars/$environmentName/common_vars_values.jsonc diff --git a/solution/DeploymentV2/environments/vars/common_vars_schema.json b/solution/DeploymentV2/environments/vars/common_vars_schema.json index 068996d6..f17066f5 100644 --- a/solution/DeploymentV2/environments/vars/common_vars_schema.json +++ b/solution/DeploymentV2/environments/vars/common_vars_schema.json @@ -103,7 +103,7 @@ "type": "object", "required": [], "default": {"sql_aad_admin": ""}, - "description": "Object with a property for each user that is to be a Azure SQL Administrator. The property name should be the username of the user in AAD and should be unique. The property value is the object_id of the user. Leave as empty object when not is use. You MUST provide the sql_aad_admin property.", + "description": "Object with a property for each user that is to be a Azure SQL Administrator. This should be a user or AAD Group. The property name should be the username of the user in AAD and should be unique. The property value is the object_id of the user. You MUST provide the sql_aad_admin property which will determine the AAD Admin setting for the Azure SQL Server. ", "properties": { "sql_aad_admin": { "type": "string" diff --git a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet index 9859e407..9e518ac7 100644 --- a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet +++ b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet @@ -5,6 +5,7 @@ local locals = { 'local' : import './local/common_vars_values.jsonc', 'production' : import './production/common_vars_values.jsonc', 'staging' : import './staging/common_vars_values.jsonc', + 'uat' : import './uat/common_vars_values.jsonc', /*DONOTREMOVETHISCOMMENT:ENVS*/ }; @@ -569,6 +570,46 @@ local SecretFileSensitiveVars = { // Object comprehension. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc index 14c21fd9..25d0d1c1 100644 --- a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc +++ b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc @@ -8,7 +8,7 @@ "prefix": "ads", "resource_group_name": "gft9", "ip_address": "144.138.148.220", - "ip_address2": "163.47.54.2", + "ip_address2": "167.220.242.194", "tenant_id": "72f988bf-86f1-41af-91ab-2d7cd011db47", "deployment_principal_layers1and3": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", "resource_owners": [ @@ -20,6 +20,7 @@ }, "azure_sql_aad_administrators": { "sql_aad_admin": "6f467924-4d92-40e3-b348-b3154d5cd437", + "AdsGoFastDeployerjkcgkaibkungm":"4c732d19-4076-4a76-87f3-6fbfd77f007d", "jorampon@microsoft.com": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578" }, "synapse_publishers": {}, diff --git a/solution/DeploymentV2/environments/vars/uat/common_vars_values.jsonc b/solution/DeploymentV2/environments/vars/uat/common_vars_values.jsonc new file mode 100644 index 00000000..95bb9faf --- /dev/null +++ b/solution/DeploymentV2/environments/vars/uat/common_vars_values.jsonc @@ -0,0 +1,42 @@ +{ + "$schema": "./../common_vars_schema.json", + "owner_tag": "Contoso", + "resource_location": "australiaeast", + "environment_tag": "stg", + "domain": "microsoft.com", + "subscription_id": "035a1364-f00d-48e2-b582-4fe125905ee3", + "prefix": "ads", + "resource_group_name": "gft8", + "ip_address": "144.138.148.220", + "ip_address2": "167.220.242.66", + "tenant_id": "72f988bf-86f1-41af-91ab-2d7cd011db47", + "deployment_principal_layers1and3": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", + "resource_owners": [ + "4c732d19-4076-4a76-87f3-6fbfd77f007d", + "ccbdbba4-669c-48d6-86b8-75c9ab2ee578" + ], + "synapse_administrators": { + "jorampon@microsoft.com": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578" + }, + "azure_sql_aad_administrators": { + "sql_aad_admin": "6f467924-4d92-40e3-b348-b3154d5cd437", + "AdsGoFastDeployerjkcgkaibkungm": "4c732d19-4076-4a76-87f3-6fbfd77f007d", + "jorampon@microsoft.com": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578" + }, + "synapse_publishers": {}, + "synapse_contributors": {}, + "WEB_APP_ADMIN_USER": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", + "WEB_APP_ADMIN_SECURITY_GROUP": "#####", + "ARM_SYNAPSE_WORKSPACE_NAME": "adsstgsynwadsbgrh", + "ARM_KEYVAULT_NAME": "ads-stg-kv-ads-bgrh", + "ARM_DATALAKE_NAME": "adsstgdlsadsbgrhadsl", + "ARM_PAL_PARTNER_ID": "0", + "GIT_REPOSITORY_NAME": "#####", + "GIT_SYNAPSE_REPOSITORY_BRANCH_NAME": "#####", + "GIT_USER_NAME": "#####", + "GIT_EMAIL_ADDRESS": "#####", + "GIT_ADF_REPOSITORY_NAME": "#####", + "GIT_ADF_REPOSITORY_BRANCH_NAME": "#####", + "GIT_ADF_USER_NAME": "#####", + "GIT_ADF_EMAIL_ADDRESS": "#####" +} diff --git a/solution/DeploymentV2/terraform_layer1/vars/uat/terragrunt.hcl b/solution/DeploymentV2/terraform_layer1/vars/uat/terragrunt.hcl new file mode 100644 index 00000000..c31076a1 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer1/vars/uat/terragrunt.hcl @@ -0,0 +1,38 @@ +locals { + common_vars = jsondecode(file("../../../bin/environments/uat/common_vars_for_hcl.json")) +} + + +remote_state { + backend = "azurerm" + generate = { + path = "backend.tf" + if_exists = "overwrite_terragrunt" + } + config = { + # You need to update the resource group and storage account here. + # You should have created these with the Prepare.ps1 script. + resource_group_name = "${local.common_vars.resource_group_name}" + storage_account_name = "${local.common_vars.resource_group_name}state" + container_name = "tstate" + key = "terraform_layer1.tfstate" + } +} + +# These inputs are provided to the terraform variables when deploying the environment +# If you are deploying using pipelines, these can be overridden from environment variables +# using TF_VAR_variablename +inputs = { + prefix = "${local.common_vars.prefix}" # All azure resources will be prefixed with this + domain = "${local.common_vars.domain}" # Used when configuring AAD config for Azure functions + tenant_id = "${local.common_vars.tenant_id}" # This is the Azure AD tenant ID + subscription_id = "${local.common_vars.subscription_id}" # The azure subscription id to deploy to + resource_location = "${local.common_vars.resource_location}" # The location of the resources + resource_group_name = "${local.common_vars.resource_group_name}" # The resource group all resources will be deployed to + owner_tag = "${local.common_vars.owner_tag}" # Owner tag value for Azure resources + environment_tag = "${local.common_vars.environment_tag}" # This is used on Azure tags as well as all resource names + ip_address = "${local.common_vars.ip_address}" # This is the ip address of the agent/current IP. Used to create firewall exemptions. + resource_owners = "${local.common_vars.resource_owners}" + deploy_web_app = true + deploy_function_app = true +} diff --git a/solution/DeploymentV2/terraform_layer2/vars/uat/terragrunt.hcl b/solution/DeploymentV2/terraform_layer2/vars/uat/terragrunt.hcl new file mode 100644 index 00000000..23562dbe --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/vars/uat/terragrunt.hcl @@ -0,0 +1,78 @@ +locals { + common_vars = jsondecode(file("../../../bin/environments/uat/common_vars_for_hcl.json")) +} + + +generate "layer1.tf" { + path = "layer1.tf" + if_exists = "overwrite_terragrunt" + contents = < Date: Thu, 4 Aug 2022 07:48:20 +1000 Subject: [PATCH 083/151] Changing uat to full_deployment --- .../environments/vars/common_vars_template.jsonnet | 4 ++++ .../environments/vars/uat/common_vars_values.jsonc | 2 +- solution/DeploymentV2/terraform_layer3/layer2.tf | 4 ++-- 3 files changed, 7 insertions(+), 3 deletions(-) diff --git a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet index 9e518ac7..fb873ef7 100644 --- a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet +++ b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet @@ -625,6 +625,10 @@ local SecretFileSensitiveVars = { // Object comprehension. + + + + diff --git a/solution/DeploymentV2/environments/vars/uat/common_vars_values.jsonc b/solution/DeploymentV2/environments/vars/uat/common_vars_values.jsonc index 95bb9faf..210d6009 100644 --- a/solution/DeploymentV2/environments/vars/uat/common_vars_values.jsonc +++ b/solution/DeploymentV2/environments/vars/uat/common_vars_values.jsonc @@ -8,7 +8,7 @@ "prefix": "ads", "resource_group_name": "gft8", "ip_address": "144.138.148.220", - "ip_address2": "167.220.242.66", + "ip_address2": "163.47.54.2", "tenant_id": "72f988bf-86f1-41af-91ab-2d7cd011db47", "deployment_principal_layers1and3": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", "resource_owners": [ diff --git a/solution/DeploymentV2/terraform_layer3/layer2.tf b/solution/DeploymentV2/terraform_layer3/layer2.tf index f4bc9516..953008c3 100644 --- a/solution/DeploymentV2/terraform_layer3/layer2.tf +++ b/solution/DeploymentV2/terraform_layer3/layer2.tf @@ -7,7 +7,7 @@ config = { container_name = "tstate" key = "terraform_layer2.tfstate" - resource_group_name = "gft9" - storage_account_name = "gft9state" + resource_group_name = "gft8" + storage_account_name = "gft8state" } } From 237ab98982cf71723083e83365d74196589a7b2f Mon Sep 17 00:00:00 2001 From: "jorampon@microsoft.com" Date: Thu, 4 Aug 2022 14:40:18 +1000 Subject: [PATCH 084/151] Testing CICD --- .github/workflows/02.continuous-delivery-staging.yml | 2 +- .github/workflows/02.continuous-delivery-uat.yml | 2 +- .../DeploymentV2/environments/vars/common_vars_template.jsonnet | 2 ++ .../DeploymentV2/environments/vars/uat/common_vars_values.jsonc | 2 +- 4 files changed, 5 insertions(+), 3 deletions(-) diff --git a/.github/workflows/02.continuous-delivery-staging.yml b/.github/workflows/02.continuous-delivery-staging.yml index d4b96aab..642efaa2 100644 --- a/.github/workflows/02.continuous-delivery-staging.yml +++ b/.github/workflows/02.continuous-delivery-staging.yml @@ -1,4 +1,4 @@ -name: Continuous Delivery +name: Continuous Delivery - Staging on: workflow_dispatch: diff --git a/.github/workflows/02.continuous-delivery-uat.yml b/.github/workflows/02.continuous-delivery-uat.yml index be6a73f8..873c1557 100644 --- a/.github/workflows/02.continuous-delivery-uat.yml +++ b/.github/workflows/02.continuous-delivery-uat.yml @@ -1,4 +1,4 @@ -name: Continuous Delivery +name: Continuous Delivery - Uat on: workflow_dispatch: diff --git a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet index fb873ef7..7a4f4bf4 100644 --- a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet +++ b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet @@ -631,6 +631,8 @@ local SecretFileSensitiveVars = { // Object comprehension. + + diff --git a/solution/DeploymentV2/environments/vars/uat/common_vars_values.jsonc b/solution/DeploymentV2/environments/vars/uat/common_vars_values.jsonc index 210d6009..c3d12cef 100644 --- a/solution/DeploymentV2/environments/vars/uat/common_vars_values.jsonc +++ b/solution/DeploymentV2/environments/vars/uat/common_vars_values.jsonc @@ -8,7 +8,7 @@ "prefix": "ads", "resource_group_name": "gft8", "ip_address": "144.138.148.220", - "ip_address2": "163.47.54.2", + "ip_address2": "167.220.242.131", "tenant_id": "72f988bf-86f1-41af-91ab-2d7cd011db47", "deployment_principal_layers1and3": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", "resource_owners": [ From e2cd2652ee923b2df2a9c1d30d13560036205e3d Mon Sep 17 00:00:00 2001 From: "jorampon@microsoft.com" Date: Thu, 4 Aug 2022 15:02:35 +1000 Subject: [PATCH 085/151] Updated UAT --- .github/workflows/02.continuous-delivery-uat.yml | 2 +- solution/DeploymentV2/Prepare.ps1 | 2 +- solution/DeploymentV2/pwshmodules/Deploy_0_Prep.psm1 | 2 ++ 3 files changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/workflows/02.continuous-delivery-uat.yml b/.github/workflows/02.continuous-delivery-uat.yml index 873c1557..e808e23a 100644 --- a/.github/workflows/02.continuous-delivery-uat.yml +++ b/.github/workflows/02.continuous-delivery-uat.yml @@ -10,7 +10,7 @@ jobs: name: Deploy to Environment One concurrency: terraform environment: - name: development + name: uat env: # This determines the location of the .hcl file that will be used environmentName: uat diff --git a/solution/DeploymentV2/Prepare.ps1 b/solution/DeploymentV2/Prepare.ps1 index 66387a80..b0c5ffce 100644 --- a/solution/DeploymentV2/Prepare.ps1 +++ b/solution/DeploymentV2/Prepare.ps1 @@ -72,7 +72,7 @@ if ($gitDeploy) az provider register --namespace $provider } - az storage account create --resource-group $resourceGroupName --name $stateStorageName --sku Standard_LRS --allow-blob-public-access false --https-only true --min-tls-version TLS1_2 + az storage account create --resource-group $resourceGroupName --name $stateStorageName --sku Standard_LRS --allow-blob-public-access false --https-only true --min-tls-version TLS1_2 --public-network-access Disabled az storage container create --name tstate --account-name $stateStorageName --auth-mode login } else diff --git a/solution/DeploymentV2/pwshmodules/Deploy_0_Prep.psm1 b/solution/DeploymentV2/pwshmodules/Deploy_0_Prep.psm1 index 6aabb04b..6ee32a2b 100644 --- a/solution/DeploymentV2/pwshmodules/Deploy_0_Prep.psm1 +++ b/solution/DeploymentV2/pwshmodules/Deploy_0_Prep.psm1 @@ -77,6 +77,7 @@ function PrepareDeployment ( try { + $hiddenoutput = az storage account network-rule add --resource-group $env:TF_VAR_resource_group_name --account-name $env:TF_VAR_state_storage_account_name --ip-address $env:TF_VAR_ip_address $hiddenoutput = az keyvault network-rule add -g $env:TF_VAR_resource_group_name --name $env:keyVaultName --ip-address $env:TF_VAR_ip_address/32 $hiddenoutput = az synapse workspace firewall-rule create --name CICDAgent --resource-group $env:TF_VAR_resource_group_name --start-ip-address $env:TF_VAR_ip_address --end-ip-address $env:TF_VAR_ip_address --workspace-name $env:ARM_SYNAPSE_WORKSPACE_NAME $hiddenoutput = az storage account network-rule add --resource-group $env:TF_VAR_resource_group_name --account-name $env:datalakeName --ip-address $env:TF_VAR_ip_address @@ -88,6 +89,7 @@ function PrepareDeployment ( try { + $hiddenoutput = az storage account network-rule add --resource-group $env:TF_VAR_resource_group_name --account-name $env:TF_VAR_state_storage_account_name --ip-address $env:TF_VAR_ip_address2 $hiddenoutput = az keyvault network-rule add -g $env:TF_VAR_resource_group_name --name $env:keyVaultName --ip-address $env:TF_VAR_ip_address2/32 $hiddenoutput = az synapse workspace firewall-rule create --name CICDUser --resource-group $env:TF_VAR_resource_group_name --start-ip-address $env:TF_VAR_ip_address2 --end-ip-address $env:TF_VAR_ip_address2 --workspace-name $env:ARM_SYNAPSE_WORKSPACE_NAME $hiddenoutput = az storage account network-rule add --resource-group $env:TF_VAR_resource_group_name --account-name $env:datalakeName --ip-address $env:TF_VAR_ip_address2 From 7c8f844b5333bfaeba4b093da3f6fce9e8711eea Mon Sep 17 00:00:00 2001 From: "jorampon@microsoft.com" Date: Thu, 4 Aug 2022 15:22:46 +1000 Subject: [PATCH 086/151] Added FeatureTemplate Param to Deploy and Publish --- .github/workflows/02.continuous-delivery-staging.yml | 4 ++-- .github/workflows/02.continuous-delivery-uat.yml | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/02.continuous-delivery-staging.yml b/.github/workflows/02.continuous-delivery-staging.yml index 642efaa2..33b482ea 100644 --- a/.github/workflows/02.continuous-delivery-staging.yml +++ b/.github/workflows/02.continuous-delivery-staging.yml @@ -111,7 +111,7 @@ jobs: TF_VAR_ip_address : ${{steps.ip.outputs.ipv4}} run: | git update-index --chmod=+x ./02-deploy.ps1 - ./02-deploy.ps1 + ./02-deploy.ps1 -FeatureTemplate $featureTemplate - name: Build and Publish Code Artefacts id: solution-deployment-code @@ -121,7 +121,7 @@ jobs: TF_VAR_ip_address : ${{steps.ip.outputs.ipv4}} run: | git update-index --chmod=+x ./02-publish.ps1 - ./02-publish.ps1 + ./02-publish.ps1 -FeatureTemplate $featureTemplate #PROD ENVIRONMENT deploy-to-env-two: diff --git a/.github/workflows/02.continuous-delivery-uat.yml b/.github/workflows/02.continuous-delivery-uat.yml index e808e23a..3e9df3e4 100644 --- a/.github/workflows/02.continuous-delivery-uat.yml +++ b/.github/workflows/02.continuous-delivery-uat.yml @@ -111,7 +111,7 @@ jobs: TF_VAR_ip_address : ${{steps.ip.outputs.ipv4}} run: | git update-index --chmod=+x ./02-deploy.ps1 - ./02-deploy.ps1 + ./02-deploy.ps1 -FeatureTemplate $featureTemplate - name: Build and Publish Code Artefacts id: solution-deployment-code @@ -121,7 +121,7 @@ jobs: TF_VAR_ip_address : ${{steps.ip.outputs.ipv4}} run: | git update-index --chmod=+x ./02-publish.ps1 - ./02-publish.ps1 + ./02-publish.ps1 -FeatureTemplate $featureTemplate #PROD ENVIRONMENT deploy-to-env-two: From 5cd15d50dab882773abe1bdb7c4d49d3141c57df Mon Sep 17 00:00:00 2001 From: "jorampon@microsoft.com" Date: Thu, 4 Aug 2022 15:31:39 +1000 Subject: [PATCH 087/151] Fixed FeatureTemplate bug --- .github/workflows/02.continuous-delivery-staging.yml | 4 ++-- .github/workflows/02.continuous-delivery-uat.yml | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/02.continuous-delivery-staging.yml b/.github/workflows/02.continuous-delivery-staging.yml index 33b482ea..9abbd83b 100644 --- a/.github/workflows/02.continuous-delivery-staging.yml +++ b/.github/workflows/02.continuous-delivery-staging.yml @@ -111,7 +111,7 @@ jobs: TF_VAR_ip_address : ${{steps.ip.outputs.ipv4}} run: | git update-index --chmod=+x ./02-deploy.ps1 - ./02-deploy.ps1 -FeatureTemplate $featureTemplate + ./02-deploy.ps1 -FeatureTemplate $env:featureTemplate - name: Build and Publish Code Artefacts id: solution-deployment-code @@ -121,7 +121,7 @@ jobs: TF_VAR_ip_address : ${{steps.ip.outputs.ipv4}} run: | git update-index --chmod=+x ./02-publish.ps1 - ./02-publish.ps1 -FeatureTemplate $featureTemplate + ./02-publish.ps1 -FeatureTemplate $env:featureTemplate #PROD ENVIRONMENT deploy-to-env-two: diff --git a/.github/workflows/02.continuous-delivery-uat.yml b/.github/workflows/02.continuous-delivery-uat.yml index 3e9df3e4..7fbfbe66 100644 --- a/.github/workflows/02.continuous-delivery-uat.yml +++ b/.github/workflows/02.continuous-delivery-uat.yml @@ -111,7 +111,7 @@ jobs: TF_VAR_ip_address : ${{steps.ip.outputs.ipv4}} run: | git update-index --chmod=+x ./02-deploy.ps1 - ./02-deploy.ps1 -FeatureTemplate $featureTemplate + ./02-deploy.ps1 -FeatureTemplate $env:featureTemplate - name: Build and Publish Code Artefacts id: solution-deployment-code @@ -121,7 +121,7 @@ jobs: TF_VAR_ip_address : ${{steps.ip.outputs.ipv4}} run: | git update-index --chmod=+x ./02-publish.ps1 - ./02-publish.ps1 -FeatureTemplate $featureTemplate + ./02-publish.ps1 -FeatureTemplate $env:featureTemplate #PROD ENVIRONMENT deploy-to-env-two: From 4e633a7cc97ee20b78feb50120c93b29fc914fcd Mon Sep 17 00:00:00 2001 From: "jorampon@microsoft.com" Date: Thu, 4 Aug 2022 20:42:32 +1000 Subject: [PATCH 088/151] Added Resource Group Ownership assignment to prepare --- .github/workflows/02.continuous-delivery-staging.yml | 2 +- solution/DeploymentV2/Prepare.ps1 | 4 ++++ .../environments/vars/common_vars_template.jsonnet | 4 ++++ .../environments/vars/staging/common_vars_values.jsonc | 6 +++--- .../environments/vars/uat/common_vars_values.jsonc | 4 ++-- 5 files changed, 14 insertions(+), 6 deletions(-) diff --git a/.github/workflows/02.continuous-delivery-staging.yml b/.github/workflows/02.continuous-delivery-staging.yml index 9abbd83b..84cbd2be 100644 --- a/.github/workflows/02.continuous-delivery-staging.yml +++ b/.github/workflows/02.continuous-delivery-staging.yml @@ -3,7 +3,7 @@ name: Continuous Delivery - Staging on: workflow_dispatch: push: - branches: feature-1.0.4 + branches: main #feature-1.0.4 jobs: deploy-to-env-one: diff --git a/solution/DeploymentV2/Prepare.ps1 b/solution/DeploymentV2/Prepare.ps1 index b0c5ffce..cd4207c9 100644 --- a/solution/DeploymentV2/Prepare.ps1 +++ b/solution/DeploymentV2/Prepare.ps1 @@ -157,6 +157,10 @@ else } + + $assigneeobject = Read-Host "Enter the object id of the AAD account that you would like to have ownership of the new resource group" + + az role assignment create --role "Owner" --scope "/subscriptions/${env:TF_VAR_subscription_id}/resourcegroups/${env:TF_VAR_resource_group_name}" --assignee-object-id $assigneeobject #------------------------------------------------------------------------------------------------------------ # Print pretty output for user #------------------------------------------------------------------------------------------------------------ diff --git a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet index 7a4f4bf4..efa8fd3a 100644 --- a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet +++ b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet @@ -631,6 +631,10 @@ local SecretFileSensitiveVars = { // Object comprehension. + + + + diff --git a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc index 25d0d1c1..6c2970ba 100644 --- a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc +++ b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc @@ -6,9 +6,9 @@ "domain": "microsoft.com", "subscription_id": "035a1364-f00d-48e2-b582-4fe125905ee3", "prefix": "ads", - "resource_group_name": "gft9", + "resource_group_name": "gfuat", "ip_address": "144.138.148.220", - "ip_address2": "167.220.242.194", + "ip_address2": "163.47.54.2", "tenant_id": "72f988bf-86f1-41af-91ab-2d7cd011db47", "deployment_principal_layers1and3": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", "resource_owners": [ @@ -20,7 +20,7 @@ }, "azure_sql_aad_administrators": { "sql_aad_admin": "6f467924-4d92-40e3-b348-b3154d5cd437", - "AdsGoFastDeployerjkcgkaibkungm":"4c732d19-4076-4a76-87f3-6fbfd77f007d", + "AdsGoFastDeployerjkcgkaibkungm": "4c732d19-4076-4a76-87f3-6fbfd77f007d", "jorampon@microsoft.com": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578" }, "synapse_publishers": {}, diff --git a/solution/DeploymentV2/environments/vars/uat/common_vars_values.jsonc b/solution/DeploymentV2/environments/vars/uat/common_vars_values.jsonc index c3d12cef..c2486731 100644 --- a/solution/DeploymentV2/environments/vars/uat/common_vars_values.jsonc +++ b/solution/DeploymentV2/environments/vars/uat/common_vars_values.jsonc @@ -6,9 +6,9 @@ "domain": "microsoft.com", "subscription_id": "035a1364-f00d-48e2-b582-4fe125905ee3", "prefix": "ads", - "resource_group_name": "gft8", + "resource_group_name": "gfuat", "ip_address": "144.138.148.220", - "ip_address2": "167.220.242.131", + "ip_address2": "163.47.54.2", "tenant_id": "72f988bf-86f1-41af-91ab-2d7cd011db47", "deployment_principal_layers1and3": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", "resource_owners": [ From c3ae054926ba1c63dfb73051fe5b1865e928b895 Mon Sep 17 00:00:00 2001 From: "jorampon@microsoft.com" Date: Thu, 4 Aug 2022 21:29:04 +1000 Subject: [PATCH 089/151] NewUATDeployment --- .../environments/vars/common_vars_template.jsonnet | 4 ++++ .../environments/vars/uat/common_vars_values.jsonc | 6 +++--- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet index efa8fd3a..942ef933 100644 --- a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet +++ b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet @@ -635,6 +635,10 @@ local SecretFileSensitiveVars = { // Object comprehension. + + + + diff --git a/solution/DeploymentV2/environments/vars/uat/common_vars_values.jsonc b/solution/DeploymentV2/environments/vars/uat/common_vars_values.jsonc index c2486731..1a6c9794 100644 --- a/solution/DeploymentV2/environments/vars/uat/common_vars_values.jsonc +++ b/solution/DeploymentV2/environments/vars/uat/common_vars_values.jsonc @@ -27,9 +27,9 @@ "synapse_contributors": {}, "WEB_APP_ADMIN_USER": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", "WEB_APP_ADMIN_SECURITY_GROUP": "#####", - "ARM_SYNAPSE_WORKSPACE_NAME": "adsstgsynwadsbgrh", - "ARM_KEYVAULT_NAME": "ads-stg-kv-ads-bgrh", - "ARM_DATALAKE_NAME": "adsstgdlsadsbgrhadsl", + "ARM_SYNAPSE_WORKSPACE_NAME": "adsstgsynwadsnvmz", + "ARM_KEYVAULT_NAME": "ads-stg-kv-ads-nvmz", + "ARM_DATALAKE_NAME": "adsstgdlsadsnvmzadsl", "ARM_PAL_PARTNER_ID": "0", "GIT_REPOSITORY_NAME": "#####", "GIT_SYNAPSE_REPOSITORY_BRANCH_NAME": "#####", From 42ea558c901ea358de36348214bd4fda1871139f Mon Sep 17 00:00:00 2001 From: "jorampon@microsoft.com" Date: Fri, 5 Aug 2022 08:57:24 +1000 Subject: [PATCH 090/151] Updated Layer 1 & 2 ownership --- .../DeploymentV2/environments/vars/common_vars_schema.json | 4 +++- .../environments/vars/common_vars_template.jsonnet | 6 ++++++ .../environments/vars/uat/common_vars_values.jsonc | 2 +- solution/DeploymentV2/terraform_layer3/03-publish.ps1 | 3 ++- solution/DeploymentV2/terraform_layer3/layer2.tf | 4 ++-- 5 files changed, 14 insertions(+), 5 deletions(-) diff --git a/solution/DeploymentV2/environments/vars/common_vars_schema.json b/solution/DeploymentV2/environments/vars/common_vars_schema.json index f17066f5..e51ba379 100644 --- a/solution/DeploymentV2/environments/vars/common_vars_schema.json +++ b/solution/DeploymentV2/environments/vars/common_vars_schema.json @@ -79,7 +79,9 @@ "type": "string" }, "deployment_principal_layers1and3": { - "type": "string" + "description": "Object Id of the AAD account that will manage layer's 1 & 3. Note leave this blank if you are going to also include this principal in the resource owner's collection.", + "type": "string", + "default": "" }, "resource_owners": { "type": "array", diff --git a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet index 942ef933..e511724c 100644 --- a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet +++ b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet @@ -637,6 +637,12 @@ local SecretFileSensitiveVars = { // Object comprehension. + + + + + + diff --git a/solution/DeploymentV2/environments/vars/uat/common_vars_values.jsonc b/solution/DeploymentV2/environments/vars/uat/common_vars_values.jsonc index 1a6c9794..5b0bc1b6 100644 --- a/solution/DeploymentV2/environments/vars/uat/common_vars_values.jsonc +++ b/solution/DeploymentV2/environments/vars/uat/common_vars_values.jsonc @@ -10,7 +10,7 @@ "ip_address": "144.138.148.220", "ip_address2": "163.47.54.2", "tenant_id": "72f988bf-86f1-41af-91ab-2d7cd011db47", - "deployment_principal_layers1and3": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", + "deployment_principal_layers1and3": "", "resource_owners": [ "4c732d19-4076-4a76-87f3-6fbfd77f007d", "ccbdbba4-669c-48d6-86b8-75c9ab2ee578" diff --git a/solution/DeploymentV2/terraform_layer3/03-publish.ps1 b/solution/DeploymentV2/terraform_layer3/03-publish.ps1 index d75e407c..d0da0bf9 100644 --- a/solution/DeploymentV2/terraform_layer3/03-publish.ps1 +++ b/solution/DeploymentV2/terraform_layer3/03-publish.ps1 @@ -45,4 +45,5 @@ PrepareDeployment -gitDeploy $gitDeploy -deploymentFolderPath $deploymentFolderP #------------------------------------------------------------------------------------------------------------ $tout = GatherOutputsFromTerraform -TerraformFolderPath $PathToReturnTo -./database.ps1 -tout $tout -deploymentFolderPath $deploymentFolderPath -PathToReturnTo $PathToReturnTo -PublishSQLLogins $true \ No newline at end of file +./database.ps1 -tout $tout -deploymentFolderPath $deploymentFolderPath -PathToReturnTo $PathToReturnTo -PublishSQLLogins $true +./app_service.ps1 -aad_webreg_id $tout.aad_webreg_id \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer3/layer2.tf b/solution/DeploymentV2/terraform_layer3/layer2.tf index 953008c3..5aacfbae 100644 --- a/solution/DeploymentV2/terraform_layer3/layer2.tf +++ b/solution/DeploymentV2/terraform_layer3/layer2.tf @@ -7,7 +7,7 @@ config = { container_name = "tstate" key = "terraform_layer2.tfstate" - resource_group_name = "gft8" - storage_account_name = "gft8state" + resource_group_name = "gfuat" + storage_account_name = "gfuatstate" } } From 3c4a640360acab8092025c14095510e68e9d2ae0 Mon Sep 17 00:00:00 2001 From: "jorampon@microsoft.com" Date: Sat, 6 Aug 2022 09:36:53 +1000 Subject: [PATCH 091/151] working on purview endpoint permissions issue --- .../workflows/02.continuous-delivery-uat.yml | 2 +- cloc.md | 28 ++ .../DeploymentV2/PrivateEndPointApprover.json | 14 + solution/DeploymentV2/Test.ps1 | 17 +- .../vars/common_vars_template.jsonnet | 4 + .../vars/uat/common_vars_values.jsonc | 2 +- .../DeploymentV2/terraform_layer1/outputs.tf | 2 +- .../DeploymentV2/terraform_layer2/layer1.tf | 4 +- .../DeploymentV2/terraform_layer2/outputs.tf | 4 +- .../DeploymentV2/terraform_layer2/purview.tf | 80 ---- .../{ => legacy}/app_service.tf.bak | 0 .../{ => legacy}/database.tf.bak | 0 .../{ => legacy}/redo_sql_users.ps1 | 0 .../DeploymentV2/terraform_layer3/purview.tf | 83 +++- .../arm/privatelinks.json | 355 ++++++++++++++++++ .../main.tf | 44 +++ .../outputs.tf | 0 .../vars.tf | 62 +++ .../terraform_layer3/vars/uat/terragrunt.hcl | 2 +- 19 files changed, 600 insertions(+), 103 deletions(-) create mode 100644 cloc.md create mode 100644 solution/DeploymentV2/PrivateEndPointApprover.json rename solution/DeploymentV2/terraform_layer3/{ => legacy}/app_service.tf.bak (100%) rename solution/DeploymentV2/terraform_layer3/{ => legacy}/database.tf.bak (100%) rename solution/DeploymentV2/terraform_layer3/{ => legacy}/redo_sql_users.ps1 (100%) create mode 100644 solution/DeploymentV2/terraform_layer3/purview_ingestion_private_endpoints/arm/privatelinks.json create mode 100644 solution/DeploymentV2/terraform_layer3/purview_ingestion_private_endpoints/main.tf create mode 100644 solution/DeploymentV2/terraform_layer3/purview_ingestion_private_endpoints/outputs.tf create mode 100644 solution/DeploymentV2/terraform_layer3/purview_ingestion_private_endpoints/vars.tf diff --git a/.github/workflows/02.continuous-delivery-uat.yml b/.github/workflows/02.continuous-delivery-uat.yml index 7fbfbe66..08482b0a 100644 --- a/.github/workflows/02.continuous-delivery-uat.yml +++ b/.github/workflows/02.continuous-delivery-uat.yml @@ -3,7 +3,7 @@ name: Continuous Delivery - Uat on: workflow_dispatch: push: - branches: feature-1.0.4 + branches: main #feature-1.0.4 jobs: deploy-to-env-one: diff --git a/cloc.md b/cloc.md new file mode 100644 index 00000000..dea003cd --- /dev/null +++ b/cloc.md @@ -0,0 +1,28 @@ +cloc|github.com/AlDanial/cloc v 1.82 T=12.07 s (299.1 files/s, 91359.7 lines/s) +--- | --- + +Language|files|blank|comment|code +:-------|-------:|-------:|-------:|-------: +JSON|772|58|0|541329 +YAML|22|4145|4217|186336 +JavaScript|1455|22426|21559|118459 +SQL|188|2090|2098|59893 +CSS|47|7877|447|33617 +C#|248|3934|1517|19616 +Razor|322|1757|268|18142 +SVG|197|0|42|15098 +HCL|101|1318|581|13006 +Sass|90|1333|1138|5545 +PowerShell|74|897|728|3761 +Jupyter Notebook|15|0|1937|3755 +Markdown|60|352|0|1546 +MSBuild script|8|60|2|634 +Bourne Shell|3|62|72|423 +Go|1|8|5|73 +Python|2|14|64|55 +Dockerfile|1|6|9|34 +make|1|6|0|22 +DOS Batch|1|4|3|1 +HTML|1|1|0|0 +--------|--------|--------|--------|-------- +SUM:|3609|46348|34687|1021345 diff --git a/solution/DeploymentV2/PrivateEndPointApprover.json b/solution/DeploymentV2/PrivateEndPointApprover.json new file mode 100644 index 00000000..da1ac44f --- /dev/null +++ b/solution/DeploymentV2/PrivateEndPointApprover.json @@ -0,0 +1,14 @@ +{ + "Name": "Private Endpoint Approver", + "IsCustom": true, + "Description": "Approve Private Endpoints", + "Actions": [ + "PrivateEndpointConnectionsApproval/action" + ], + "NotActions": [], + "DataActions": [], + "NotDataActions": [], + "AssignableScopes": [ + "/" + ] +} \ No newline at end of file diff --git a/solution/DeploymentV2/Test.ps1 b/solution/DeploymentV2/Test.ps1 index ae098abe..d2efdb6a 100644 --- a/solution/DeploymentV2/Test.ps1 +++ b/solution/DeploymentV2/Test.ps1 @@ -1,17 +1,4 @@ -#Next Add MSIs Permissions -#Function App MSI Access to App Role to allow chained function calls -write-host "Granting Function App MSI Access to App Role to allow chained function calls" -$authapp = az ad app show --id "api://$env:AdsOpts_CD_ServicePrincipals_FunctionAppAuthenticationSP_Name" | ConvertFrom-Json -$callingappid = ((az functionapp identity show --name $env:AdsOpts_CD_Services_CoreFunctionApp_Name --resource-group $env:AdsOpts_CD_ResourceGroup_Name) | ConvertFrom-Json).principalId -$authappid = $authapp.appId -$permissionid = $authapp.oauth2Permissions.id + -$authappobjectid = (az ad sp show --id $authappid | ConvertFrom-Json).objectId -$body = '{"principalId": "@principalid","resourceId":"@resourceId","appRoleId": "@appRoleId"}' | ConvertFrom-Json -$body.resourceId = $authappobjectid -$body.appRoleId = ($authapp.appRoles | Where-Object {$_.value -eq "FunctionAPICaller" }).id -$body.principalId = $callingappid -$body = ($body | ConvertTo-Json -compress | Out-String).Replace('"','\"') - -$result = az rest --method post --uri "https://graph.microsoft.com/v1.0/servicePrincipals/$authappobjectid/appRoleAssignedTo" --headers '{\"Content-Type\":\"application/json\"}' --body $body +az role definition create --role-definition "./PrivateEndPointApprover.json" \ No newline at end of file diff --git a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet index e511724c..aaab0094 100644 --- a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet +++ b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet @@ -645,6 +645,10 @@ local SecretFileSensitiveVars = { // Object comprehension. + + + + diff --git a/solution/DeploymentV2/environments/vars/uat/common_vars_values.jsonc b/solution/DeploymentV2/environments/vars/uat/common_vars_values.jsonc index 5b0bc1b6..1be1b5b4 100644 --- a/solution/DeploymentV2/environments/vars/uat/common_vars_values.jsonc +++ b/solution/DeploymentV2/environments/vars/uat/common_vars_values.jsonc @@ -8,7 +8,7 @@ "prefix": "ads", "resource_group_name": "gfuat", "ip_address": "144.138.148.220", - "ip_address2": "163.47.54.2", + "ip_address2": "144.138.148.220", "tenant_id": "72f988bf-86f1-41af-91ab-2d7cd011db47", "deployment_principal_layers1and3": "", "resource_owners": [ diff --git a/solution/DeploymentV2/terraform_layer1/outputs.tf b/solution/DeploymentV2/terraform_layer1/outputs.tf index 83f06f9e..11fe0e09 100644 --- a/solution/DeploymentV2/terraform_layer1/outputs.tf +++ b/solution/DeploymentV2/terraform_layer1/outputs.tf @@ -56,5 +56,5 @@ output "purview_sp_id" { } output "purview_sp_object_id" { - value = var.deploy_purview && var.is_vnet_isolated ? azuread_application.purview_ir[0].object_id : "0" + value = var.deploy_purview && var.is_vnet_isolated ? azuread_service_principal.purview_ir[0].object_id : "0" } diff --git a/solution/DeploymentV2/terraform_layer2/layer1.tf b/solution/DeploymentV2/terraform_layer2/layer1.tf index 49ce5b16..bb563b93 100644 --- a/solution/DeploymentV2/terraform_layer2/layer1.tf +++ b/solution/DeploymentV2/terraform_layer2/layer1.tf @@ -7,7 +7,7 @@ config = { container_name = "tstate" key = "terraform_layer1.tfstate" - resource_group_name = "adf1" - storage_account_name = "adf1state" + resource_group_name = "gfuat" + storage_account_name = "gfuatstate" } } diff --git a/solution/DeploymentV2/terraform_layer2/outputs.tf b/solution/DeploymentV2/terraform_layer2/outputs.tf index 5115e679..0064ebe0 100644 --- a/solution/DeploymentV2/terraform_layer2/outputs.tf +++ b/solution/DeploymentV2/terraform_layer2/outputs.tf @@ -279,4 +279,6 @@ output "naming_unique_seed" { output "naming_unique_suffix" { value = data.terraform_remote_state.layer1.outputs.naming_unique_suffix -} \ No newline at end of file +} + + diff --git a/solution/DeploymentV2/terraform_layer2/purview.tf b/solution/DeploymentV2/terraform_layer2/purview.tf index 8fc48d46..eec5913c 100644 --- a/solution/DeploymentV2/terraform_layer2/purview.tf +++ b/solution/DeploymentV2/terraform_layer2/purview.tf @@ -19,83 +19,3 @@ resource "azurerm_purview_account" "purview" { } -resource "azurerm_private_endpoint" "purview_account_private_endpoint_with_dns" { - count = var.is_vnet_isolated && var.deploy_purview ? 1 : 0 - name = local.purview_account_plink - location = var.resource_location - resource_group_name = var.resource_group_name - subnet_id = local.plink_subnet_id - - private_service_connection { - name = "${local.purview_account_plink}-conn" - private_connection_resource_id = azurerm_purview_account.purview[0].id - is_manual_connection = false - subresource_names = ["account"] - } - - private_dns_zone_group { - name = "privatednszonegroup" - private_dns_zone_ids = [local.private_dns_zone_purview_id] - } - - depends_on = [ - azurerm_purview_account.purview[0] - ] - - tags = local.tags - lifecycle { - ignore_changes = [ - tags - ] - } -} - -resource "azurerm_private_endpoint" "purview_portal_private_endpoint_with_dns" { - count = var.is_vnet_isolated && var.deploy_purview ? 1 : 0 - name = local.purview_portal_plink - location = var.resource_location - resource_group_name = var.resource_group_name - subnet_id = local.plink_subnet_id - - private_service_connection { - name = "${local.purview_portal_plink}-conn" - private_connection_resource_id = azurerm_purview_account.purview[0].id - is_manual_connection = false - subresource_names = ["portal"] - } - - private_dns_zone_group { - name = "privatednszonegroup" - private_dns_zone_ids = [local.private_dns_zone_purview_studio_id] - } - - depends_on = [ - azurerm_purview_account.purview[0] - ] - - tags = local.tags - lifecycle { - ignore_changes = [ - tags - ] - } -} - -# Azure private endpoints -module "purview_ingestion_private_endpoints" { - source = "./modules/purview_ingestion_private_endpoints" - count = var.is_vnet_isolated && var.deploy_purview ? 1 : 0 - resource_group_name = var.resource_group_name - purview_account_name = azurerm_purview_account.purview[0].name - resource_location = var.resource_location - queue_privatelink_name = "${local.purview_name}-queue-plink" - storage_privatelink_name = "${local.purview_name}-storage-plink" - eventhub_privatelink_name = "${local.purview_name}-event-plink" - blob_private_dns_id = local.private_dns_zone_blob_id - queue_private_dns_id = local.private_dns_zone_queue_id - servicebus_private_dns_id = local.private_dns_zone_servicebus_id - subnet_id = local.plink_subnet_id - managed_resource_group_name = local.purview_resource_group_name - name_suffix = random_id.rg_deployment_unique.id - subscription_id = var.subscription_id -} diff --git a/solution/DeploymentV2/terraform_layer3/app_service.tf.bak b/solution/DeploymentV2/terraform_layer3/legacy/app_service.tf.bak similarity index 100% rename from solution/DeploymentV2/terraform_layer3/app_service.tf.bak rename to solution/DeploymentV2/terraform_layer3/legacy/app_service.tf.bak diff --git a/solution/DeploymentV2/terraform_layer3/database.tf.bak b/solution/DeploymentV2/terraform_layer3/legacy/database.tf.bak similarity index 100% rename from solution/DeploymentV2/terraform_layer3/database.tf.bak rename to solution/DeploymentV2/terraform_layer3/legacy/database.tf.bak diff --git a/solution/DeploymentV2/terraform_layer3/redo_sql_users.ps1 b/solution/DeploymentV2/terraform_layer3/legacy/redo_sql_users.ps1 similarity index 100% rename from solution/DeploymentV2/terraform_layer3/redo_sql_users.ps1 rename to solution/DeploymentV2/terraform_layer3/legacy/redo_sql_users.ps1 diff --git a/solution/DeploymentV2/terraform_layer3/purview.tf b/solution/DeploymentV2/terraform_layer3/purview.tf index ef29edd1..317d31b1 100644 --- a/solution/DeploymentV2/terraform_layer3/purview.tf +++ b/solution/DeploymentV2/terraform_layer3/purview.tf @@ -1,4 +1,85 @@ resource "azuread_application_password" "purview_ir" { count = var.deploy_purview && var.is_vnet_isolated ? 1 : 0 application_object_id = data.terraform_remote_state.layer2.outputs.purview_sp_object_id -} \ No newline at end of file +} + +resource "azurerm_private_endpoint" "purview_account_private_endpoint_with_dns" { + count = var.is_vnet_isolated && var.deploy_purview ? 1 : 0 + name = local.purview_account_plink + location = var.resource_location + resource_group_name = var.resource_group_name + subnet_id = local.plink_subnet_id + + private_service_connection { + name = "${local.purview_account_plink}-conn" + private_connection_resource_id = azurerm_purview_account.purview[0].id + is_manual_connection = false + subresource_names = ["account"] + } + + private_dns_zone_group { + name = "privatednszonegroup" + private_dns_zone_ids = [local.private_dns_zone_purview_id] + } + + depends_on = [ + azurerm_purview_account.purview[0] + ] + + tags = local.tags + lifecycle { + ignore_changes = [ + tags + ] + } +} + +resource "azurerm_private_endpoint" "purview_portal_private_endpoint_with_dns" { + count = var.is_vnet_isolated && var.deploy_purview ? 1 : 0 + name = local.purview_portal_plink + location = var.resource_location + resource_group_name = var.resource_group_name + subnet_id = local.plink_subnet_id + + private_service_connection { + name = "${local.purview_portal_plink}-conn" + private_connection_resource_id = azurerm_purview_account.purview[0].id + is_manual_connection = false + subresource_names = ["portal"] + } + + private_dns_zone_group { + name = "privatednszonegroup" + private_dns_zone_ids = [local.private_dns_zone_purview_studio_id] + } + + depends_on = [ + azurerm_purview_account.purview[0] + ] + + tags = local.tags + lifecycle { + ignore_changes = [ + tags + ] + } +} + +# Azure private endpoints +module "purview_ingestion_private_endpoints" { + source = "./modules/purview_ingestion_private_endpoints" + count = var.is_vnet_isolated && var.deploy_purview ? 1 : 0 + resource_group_name = var.resource_group_name + purview_account_name = azurerm_purview_account.purview[0].name + resource_location = var.resource_location + queue_privatelink_name = "${local.purview_name}-queue-plink" + storage_privatelink_name = "${local.purview_name}-storage-plink" + eventhub_privatelink_name = "${local.purview_name}-event-plink" + blob_private_dns_id = local.private_dns_zone_blob_id + queue_private_dns_id = local.private_dns_zone_queue_id + servicebus_private_dns_id = local.private_dns_zone_servicebus_id + subnet_id = local.plink_subnet_id + managed_resource_group_name = local.purview_resource_group_name + name_suffix = random_id.rg_deployment_unique.id + subscription_id = var.subscription_id +} diff --git a/solution/DeploymentV2/terraform_layer3/purview_ingestion_private_endpoints/arm/privatelinks.json b/solution/DeploymentV2/terraform_layer3/purview_ingestion_private_endpoints/arm/privatelinks.json new file mode 100644 index 00000000..5ffeaed2 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer3/purview_ingestion_private_endpoints/arm/privatelinks.json @@ -0,0 +1,355 @@ +{ + "$schema": "http://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "subscriptionId" : { + "type" : "string" + }, + "purviewAccountName": { + "type": "String" + }, + "location": { + "type": "String" + }, + "queuePrivateLinkName": { + "type": "String" + }, + "storagePrivateLinkName": { + "type": "String" + }, + "eventHubPrivateLinkName": { + "type": "String" + }, + "resourceGroupName" : { + "type" : "string" + }, + "managedResourceGroupName": { + "type": "String" + }, + "subnetId": { + "type": "String" + }, + "queueDnsId": { + "type": "String" + }, + "storageDnsId": { + "type": "String" + }, + "serviceBusDnsId": { + "type": "String" + } + }, + "variables": {}, + "resources": [ + { + "type": "Microsoft.Purview/accounts", + "apiVersion": "2021-07-01", + "name": "[parameters('purviewAccountName')]", + "location": "[parameters('location')]", + "dependsOn": [], + "tags": {}, + "sku": { + "name": "Standard", + "capacity": 1 + }, + "identity": { + "type": "SystemAssigned" + }, + "properties": { + "managedResourceGroupName": "[parameters('managedResourceGroupName')]" + } + }, + { + "type": "Microsoft.Resources/deployments", + "apiVersion": "2017-05-10", + "name": "blob_1df2c831-46e3-41fd-831f-9c8ede5a9040", + "dependsOn": [], + "properties": { + "mode": "Incremental", + "parameters": {}, + "template": { + "$schema": "http://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": {}, + "variables": {}, + "resources": [ + { + "apiVersion": "2020-03-01", + "name": "purview-blob", + "type": "Microsoft.Network/privateEndpoints", + "location": "[parameters('location')]", + "properties": { + "privateLinkServiceConnections": [ + { + "name": "purview-blob", + "properties": { + "privateLinkServiceId": "[reference(parameters('purviewAccountName')).managedResources.storageAccount]", + "groupIds": [ + "blob" + ] + } + } + ], + "subnet": { + "id": "[parameters('subnetId')]" + } + }, + "tags": {} + } + ], + "outputs": {} + } + }, + "resources": [], + "subscriptionId": "[parameters('subscriptionId')]", + "resourceGroup": "[parameters('resourceGroupName')]" + }, + { + "type": "Microsoft.Resources/deployments", + "apiVersion": "2017-05-10", + "name": "queue_1df2c831-46e3-41fd-831f-9c8ede5a9043", + "dependsOn": [], + "properties": { + "mode": "Incremental", + "parameters": {}, + "template": { + "$schema": "http://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": {}, + "variables": {}, + "resources": [ + { + "apiVersion": "2020-03-01", + "name": "purview-queue", + "type": "Microsoft.Network/privateEndpoints", + "location": "[parameters('location')]", + "properties": { + "privateLinkServiceConnections": [ + { + "name": "purview-queue", + "properties": { + "privateLinkServiceId": "[reference(parameters('purviewAccountName')).managedResources.storageAccount]", + "groupIds": [ + "queue" + ] + } + } + ], + "subnet": { + "id": "[parameters('subnetId')]" + } + }, + "tags": {} + } + ], + "outputs": {} + } + }, + "resources": [], + "subscriptionId": "[parameters('subscriptionId')]", + "resourceGroup": "[parameters('resourceGroupName')]" + }, + { + "type": "Microsoft.Resources/deployments", + "apiVersion": "2017-05-10", + "name": "namespace_1df2c831-46e3-41fd-831f-9c8ede5a9046", + "dependsOn": [], + "properties": { + "mode": "Incremental", + "parameters": {}, + "template": { + "$schema": "http://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": {}, + "variables": {}, + "resources": [ + { + "apiVersion": "2020-03-01", + "name": "purview-namespace", + "type": "Microsoft.Network/privateEndpoints", + "location": "[parameters('location')]", + "properties": { + "privateLinkServiceConnections": [ + { + "name": "purview-namespace", + "properties": { + "privateLinkServiceId": "[reference(parameters('purviewAccountName')).managedResources.eventHubNamespace]", + "groupIds": [ + "namespace" + ] + } + } + ], + "subnet": { + "id": "[parameters('subnetId')]" + } + }, + "tags": {} + } + ], + "outputs": {} + } + }, + "resources": [], + "subscriptionId": "[parameters('subscriptionId')]", + "resourceGroup": "[parameters('resourceGroupName')]" + }, + { + "type": "Microsoft.Resources/deployments", + "apiVersion": "2017-05-10", + "name": "PrivateDns-blob-1df2c83146e341fd831f9c8ede5a9041", + "dependsOn": [ + "blob_1df2c831-46e3-41fd-831f-9c8ede5a9040" + ], + "properties": { + "mode": "Incremental", + "template": { + "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "resources": [ + { + "apiVersion": "2017-05-10", + "name": "DnsZoneGroup-1df2c83146e341fd831f9c8ede5a9041", + "type": "Microsoft.Resources/deployments", + "resourceGroup": "[parameters('resourceGroupName')]", + "subscriptionId": "[parameters('subscriptionId')]", + "dependsOn": [], + "properties": { + "mode": "Incremental", + "template": { + "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "resources": [ + { + "type": "Microsoft.Network/privateEndpoints/privateDnsZoneGroups", + "apiVersion": "2020-03-01", + "name": "[concat('purview-blob', '/', 'default')]", + "location": "[parameters('location')]", + "properties": { + "privateDnsZoneConfigs": [ + { + "name": "privatelink-blob-core-windows-net", + "properties": { + "privateDnsZoneId": "[parameters('storageDnsId')]" + } + } + ] + } + } + ] + } + } + } + ] + } + }, + "subscriptionId": "[parameters('subscriptionId')]", + "resourceGroup": "[parameters('resourceGroupName')]" + }, + { + "type": "Microsoft.Resources/deployments", + "apiVersion": "2017-05-10", + "name": "PrivateDns-queue-1df2c83146e341fd831f9c8ede5a9044", + "dependsOn": [ + "queue_1df2c831-46e3-41fd-831f-9c8ede5a9043" + ], + "properties": { + "mode": "Incremental", + "template": { + "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "resources": [ + { + "apiVersion": "2017-05-10", + "name": "DnsZoneGroup-1df2c83146e341fd831f9c8ede5a9044", + "type": "Microsoft.Resources/deployments", + "resourceGroup": "[parameters('resourceGroupName')]", + "subscriptionId": "[parameters('subscriptionId')]", + "dependsOn": [], + "properties": { + "mode": "Incremental", + "template": { + "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "resources": [ + { + "type": "Microsoft.Network/privateEndpoints/privateDnsZoneGroups", + "apiVersion": "2020-03-01", + "name": "[concat('purview-queue', '/', 'default')]", + "location": "[parameters('location')]", + "properties": { + "privateDnsZoneConfigs": [ + { + "name": "privatelink-queue-core-windows-net", + "properties": { + "privateDnsZoneId": "[parameters('queueDnsId')]" + } + } + ] + } + } + ] + } + } + } + ] + } + }, + "subscriptionId": "[parameters('subscriptionId')]", + "resourceGroup": "[parameters('resourceGroupName')]" + }, + { + "type": "Microsoft.Resources/deployments", + "apiVersion": "2017-05-10", + "name": "PrivateDns-servicebus-1df2c83146e341fd831f9c8ede5a9047", + "dependsOn": [ + "namespace_1df2c831-46e3-41fd-831f-9c8ede5a9046" + ], + "properties": { + "mode": "Incremental", + "template": { + "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "resources": [ + { + "apiVersion": "2017-05-10", + "name": "DnsZoneGroup-1df2c83146e341fd831f9c8ede5a9047", + "type": "Microsoft.Resources/deployments", + "resourceGroup": "[parameters('resourceGroupName')]", + "subscriptionId": "[parameters('subscriptionId')]", + "dependsOn": [], + "properties": { + "mode": "Incremental", + "template": { + "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "resources": [ + { + "type": "Microsoft.Network/privateEndpoints/privateDnsZoneGroups", + "apiVersion": "2020-03-01", + "name": "[concat('purview-namespace', '/', 'default')]", + "location": "[parameters('location')]", + "properties": { + "privateDnsZoneConfigs": [ + { + "name": "privatelink-servicebus-windows-net", + "properties": { + "privateDnsZoneId": "[parameters('serviceBusDnsId')]" + } + } + ] + } + } + ] + } + } + } + ] + } + }, + "subscriptionId": "[parameters('subscriptionId')]", + "resourceGroup": "[parameters('resourceGroupName')]" + } + ], + "outputs": {} +} \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer3/purview_ingestion_private_endpoints/main.tf b/solution/DeploymentV2/terraform_layer3/purview_ingestion_private_endpoints/main.tf new file mode 100644 index 00000000..b333a712 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer3/purview_ingestion_private_endpoints/main.tf @@ -0,0 +1,44 @@ +resource "azurerm_resource_group_template_deployment" "ingestion_private_endpoints" { + name = "purview_ingestion_private_endpoints_${var.name_suffix}" + resource_group_name = var.resource_group_name + deployment_mode = "Incremental" + parameters_content = jsonencode({ + "purviewAccountName" = { + value = var.purview_account_name + }, + "subscriptionId" = { + value = var.subscription_id + }, + "location" = { + value = var.resource_location + }, + "queuePrivateLinkName" = { + value = var.queue_privatelink_name + }, + "storagePrivateLinkName" = { + value = var.storage_privatelink_name + }, + "eventHubPrivateLinkName" = { + value = var.eventhub_privatelink_name + }, + "subnetId" = { + value = var.subnet_id + }, + "managedResourceGroupName" = { + value = var.managed_resource_group_name + }, + "resourceGroupName" = { + value = var.resource_group_name + }, + "queueDnsId" = { + value = var.queue_private_dns_id + }, + "storageDnsId" = { + value = var.blob_private_dns_id + }, + "serviceBusDnsId" = { + value = var.servicebus_private_dns_id + } + }) + template_content = file("${path.module}/arm/privatelinks.json") +} diff --git a/solution/DeploymentV2/terraform_layer3/purview_ingestion_private_endpoints/outputs.tf b/solution/DeploymentV2/terraform_layer3/purview_ingestion_private_endpoints/outputs.tf new file mode 100644 index 00000000..e69de29b diff --git a/solution/DeploymentV2/terraform_layer3/purview_ingestion_private_endpoints/vars.tf b/solution/DeploymentV2/terraform_layer3/purview_ingestion_private_endpoints/vars.tf new file mode 100644 index 00000000..59c213f7 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer3/purview_ingestion_private_endpoints/vars.tf @@ -0,0 +1,62 @@ +variable "resource_group_name" { + description = "The name of the resource group to deploy into" + type = string +} + +variable "purview_account_name" { + description = "The name of the data factory" + type = string +} +variable "resource_location" { + description = "The uri of the shared keyvault" + type = string +} + +variable "subscription_id" { + description = "The Id of the azure sub" + type = string +} + +variable "queue_privatelink_name" { + description = "The name of the queue private link" + type = string +} + +variable "storage_privatelink_name" { + description = "The name of the storage private link" + type = string +} + +variable "eventhub_privatelink_name" { + description = "The name of the eventhub private link" + type = string +} + +variable "queue_private_dns_id" { + description = "The id of the queue private DNS" + type = string +} + +variable "blob_private_dns_id" { + description = "The id of the queue private DNS" + type = string +} + +variable "servicebus_private_dns_id" { + description = "The id of the queue private DNS" + type = string +} + +variable "subnet_id" { + description = "The id of the subnet to attach the purview ingestion resources" + type = string +} +variable "managed_resource_group_name" { + description = "The name of the purview managed resource group" + type = string +} +variable "name_suffix" { + description = "Used to give resource group deployments unique names for an environment" + type = string +} + diff --git a/solution/DeploymentV2/terraform_layer3/vars/uat/terragrunt.hcl b/solution/DeploymentV2/terraform_layer3/vars/uat/terragrunt.hcl index 9470faa3..7c54a09c 100644 --- a/solution/DeploymentV2/terraform_layer3/vars/uat/terragrunt.hcl +++ b/solution/DeploymentV2/terraform_layer3/vars/uat/terragrunt.hcl @@ -1,5 +1,5 @@ locals { - common_vars = jsondecode(file("../../../bin/environments/uat/common_vars_for_hcl.json")) + common_vars = jsondecode(file("../../../bin/environments/staging/common_vars_for_hcl.json")) } generate "layer2.tf" { From eb3a9abe25a6567700aa347d2b3fc3962ab417ac Mon Sep 17 00:00:00 2001 From: John Rampono Date: Sat, 6 Aug 2022 12:48:21 +0800 Subject: [PATCH 092/151] Moving Purview Private Endpoints to Layer3 --- .../vars/common_vars_template.jsonnet | 2 ++ solution/DeploymentV2/terraform/.gitignore | 2 ++ solution/DeploymentV2/terraform/outputs.tf | 21 +++++++++++ .../DeploymentV2/terraform_layer1/outputs.tf | 5 +++ .../DeploymentV2/terraform_layer2/outputs.tf | 36 +++++++++++++++++++ .../DeploymentV2/terraform_layer3/locals.tf | 6 ++++ .../DeploymentV2/terraform_layer3/purview.tf | 14 ++++---- .../terraform_layer3/vars/uat/terragrunt.hcl | 2 +- 8 files changed, 79 insertions(+), 9 deletions(-) diff --git a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet index aaab0094..a0bbb34b 100644 --- a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet +++ b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet @@ -651,6 +651,8 @@ local SecretFileSensitiveVars = { // Object comprehension. + + diff --git a/solution/DeploymentV2/terraform/.gitignore b/solution/DeploymentV2/terraform/.gitignore index cc143939..a9da2aca 100644 --- a/solution/DeploymentV2/terraform/.gitignore +++ b/solution/DeploymentV2/terraform/.gitignore @@ -37,3 +37,5 @@ terraform.rc backend.tf +layer1.tf +layer2.tf diff --git a/solution/DeploymentV2/terraform/outputs.tf b/solution/DeploymentV2/terraform/outputs.tf index 66cc64fc..b664a4b6 100644 --- a/solution/DeploymentV2/terraform/outputs.tf +++ b/solution/DeploymentV2/terraform/outputs.tf @@ -64,6 +64,9 @@ output "purview_name" { output "purview_sp_name" { value = local.purview_ir_app_reg_name } +output "azurerm_purview_account_purview_id" { + value = azurerm_purview_account.purview[0].id +} output "is_vnet_isolated" { value = var.is_vnet_isolated } @@ -245,4 +248,22 @@ output "naming_unique_seed" { output "naming_unique_suffix" { value = data.terraform_remote_state.layer1.outputs.naming_unique_suffix +} + + +/*DNS Zone*/ +output "private_dns_zone_servicebus_id" { + value = local.private_dns_zone_servicebus_id +} + +output "private_dns_zone_queue_id" { + value = local.naming_unique_suffix +} + +output "private_dns_zone_blob_id" { + value = local.naming_unique_suffix +} + +output "plink_subnet_id" { + value = local.plink_subnet_id } \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer1/outputs.tf b/solution/DeploymentV2/terraform_layer1/outputs.tf index 11fe0e09..a74de29d 100644 --- a/solution/DeploymentV2/terraform_layer1/outputs.tf +++ b/solution/DeploymentV2/terraform_layer1/outputs.tf @@ -55,6 +55,11 @@ output "purview_sp_id" { value = var.deploy_purview && var.is_vnet_isolated ? azuread_application.purview_ir[0].application_id : "0" } +output "azuread_application_purview_ir_object_id" { + value = var.deploy_purview && var.is_vnet_isolated ? azuread_application.purview_ir[0].object_id : "0" +} + + output "purview_sp_object_id" { value = var.deploy_purview && var.is_vnet_isolated ? azuread_service_principal.purview_ir[0].object_id : "0" } diff --git a/solution/DeploymentV2/terraform_layer2/outputs.tf b/solution/DeploymentV2/terraform_layer2/outputs.tf index 0064ebe0..20c1113c 100644 --- a/solution/DeploymentV2/terraform_layer2/outputs.tf +++ b/solution/DeploymentV2/terraform_layer2/outputs.tf @@ -69,6 +69,9 @@ output "purview_name" { output "purview_sp_name" { value = local.purview_ir_app_reg_name } +output "azurerm_purview_account_purview_id" { + value = azurerm_purview_account.purview[0].id +} output "is_vnet_isolated" { value = var.is_vnet_isolated } @@ -272,6 +275,12 @@ output "purview_account_principal_id" { value = var.deploy_purview && var.is_vnet_isolated ? azurerm_purview_account.purview[0].identity[0].principal_id : "0" } + +output "azuread_application_purview_ir_object_id" { + value = data.terraform_remote_state.layer1.outputs.azuread_application_purview_ir_object_id +} + + /*Variables for Naming Module*/ output "naming_unique_seed" { value = data.terraform_remote_state.layer1.outputs.naming_unique_seed @@ -281,4 +290,31 @@ output "naming_unique_suffix" { value = data.terraform_remote_state.layer1.outputs.naming_unique_suffix } +/*DNS Zone*/ +output "private_dns_zone_servicebus_id" { + value = local.private_dns_zone_servicebus_id +} +output "private_dns_zone_queue_id" { + value = local.private_dns_zone_queue_id +} + +output "private_dns_zone_blob_id" { + value = local.private_dns_zone_blob_id +} + +output "private_dns_zone_purview_id" { + value = local.private_dns_zone_purview_id +} + +output "private_dns_zone_purview_studio_id" { + value = local.private_dns_zone_purview_studio_id +} + +output "azurerm_purview_account_purview_name" { + value = azurerm_purview_account.purview[0].name +} + +output "plink_subnet_id" { + value = local.plink_subnet_id +} diff --git a/solution/DeploymentV2/terraform_layer3/locals.tf b/solution/DeploymentV2/terraform_layer3/locals.tf index 01e9b844..80353201 100644 --- a/solution/DeploymentV2/terraform_layer3/locals.tf +++ b/solution/DeploymentV2/terraform_layer3/locals.tf @@ -10,6 +10,12 @@ locals { purview_portal_plink = (data.terraform_remote_state.layer2.outputs.purview_name != "" ? data.terraform_remote_state.layer2.outputs.purview_name : "${var.prefix}-${var.environment_tag}-purp-${lower(var.app_name)}-plink-${element(split("-", module.naming.data_factory.name_unique),length(split("-", module.naming.data_factory.name_unique))-1)}") purview_resource_group_name = "managed-${module.naming.resource_group.name_unique}-purview" purview_ir_app_reg_name = data.terraform_remote_state.layer2.outputs.purview_sp_name + private_dns_zone_servicebus_id = data.terraform_remote_state.layer2.outputs.private_dns_zone_servicebus_id + private_dns_zone_queue_id = data.terraform_remote_state.layer2.outputs.private_dns_zone_queue_id + private_dns_zone_blob_id = data.terraform_remote_state.layer2.outputs.private_dns_zone_blob_id + private_dns_zone_purview_id = data.terraform_remote_state.layer2.outputs.private_dns_zone_purview_id + private_dns_zone_purview_studio_id = data.terraform_remote_state.layer2.outputs.private_dns_zone_purview_studio_id + plink_subnet_id = data.terraform_remote_state.layer2.outputs.plink_subnet_id tags = { Environment = var.environment_tag diff --git a/solution/DeploymentV2/terraform_layer3/purview.tf b/solution/DeploymentV2/terraform_layer3/purview.tf index 317d31b1..26e450b6 100644 --- a/solution/DeploymentV2/terraform_layer3/purview.tf +++ b/solution/DeploymentV2/terraform_layer3/purview.tf @@ -1,6 +1,6 @@ resource "azuread_application_password" "purview_ir" { count = var.deploy_purview && var.is_vnet_isolated ? 1 : 0 - application_object_id = data.terraform_remote_state.layer2.outputs.purview_sp_object_id + application_object_id = data.terraform_remote_state.layer2.outputs.azuread_application_purview_ir_object_id } resource "azurerm_private_endpoint" "purview_account_private_endpoint_with_dns" { @@ -12,7 +12,7 @@ resource "azurerm_private_endpoint" "purview_account_private_endpoint_with_dns" private_service_connection { name = "${local.purview_account_plink}-conn" - private_connection_resource_id = azurerm_purview_account.purview[0].id + private_connection_resource_id = data.terraform_remote_state.layer2.outputs.azurerm_purview_account_purview_id is_manual_connection = false subresource_names = ["account"] } @@ -23,7 +23,6 @@ resource "azurerm_private_endpoint" "purview_account_private_endpoint_with_dns" } depends_on = [ - azurerm_purview_account.purview[0] ] tags = local.tags @@ -43,7 +42,7 @@ resource "azurerm_private_endpoint" "purview_portal_private_endpoint_with_dns" { private_service_connection { name = "${local.purview_portal_plink}-conn" - private_connection_resource_id = azurerm_purview_account.purview[0].id + private_connection_resource_id = data.terraform_remote_state.layer2.outputs.azurerm_purview_account_purview_id is_manual_connection = false subresource_names = ["portal"] } @@ -53,8 +52,7 @@ resource "azurerm_private_endpoint" "purview_portal_private_endpoint_with_dns" { private_dns_zone_ids = [local.private_dns_zone_purview_studio_id] } - depends_on = [ - azurerm_purview_account.purview[0] + depends_on = [ ] tags = local.tags @@ -67,10 +65,10 @@ resource "azurerm_private_endpoint" "purview_portal_private_endpoint_with_dns" { # Azure private endpoints module "purview_ingestion_private_endpoints" { - source = "./modules/purview_ingestion_private_endpoints" + source = "./purview_ingestion_private_endpoints" count = var.is_vnet_isolated && var.deploy_purview ? 1 : 0 resource_group_name = var.resource_group_name - purview_account_name = azurerm_purview_account.purview[0].name + purview_account_name = data.terraform_remote_state.layer2.outputs.azurerm_purview_account_purview_name resource_location = var.resource_location queue_privatelink_name = "${local.purview_name}-queue-plink" storage_privatelink_name = "${local.purview_name}-storage-plink" diff --git a/solution/DeploymentV2/terraform_layer3/vars/uat/terragrunt.hcl b/solution/DeploymentV2/terraform_layer3/vars/uat/terragrunt.hcl index 7c54a09c..9470faa3 100644 --- a/solution/DeploymentV2/terraform_layer3/vars/uat/terragrunt.hcl +++ b/solution/DeploymentV2/terraform_layer3/vars/uat/terragrunt.hcl @@ -1,5 +1,5 @@ locals { - common_vars = jsondecode(file("../../../bin/environments/staging/common_vars_for_hcl.json")) + common_vars = jsondecode(file("../../../bin/environments/uat/common_vars_for_hcl.json")) } generate "layer2.tf" { From b0426ed845b57fd7313fa5189a1ecafc077927d3 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Sat, 6 Aug 2022 12:48:47 +0800 Subject: [PATCH 093/151] Update CICD --- .github/workflows/02.continuous-delivery-uat.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/02.continuous-delivery-uat.yml b/.github/workflows/02.continuous-delivery-uat.yml index 08482b0a..7fbfbe66 100644 --- a/.github/workflows/02.continuous-delivery-uat.yml +++ b/.github/workflows/02.continuous-delivery-uat.yml @@ -3,7 +3,7 @@ name: Continuous Delivery - Uat on: workflow_dispatch: push: - branches: main #feature-1.0.4 + branches: feature-1.0.4 jobs: deploy-to-env-one: From 203c4dd6358c2db84ab2b207e46cb664b547b93d Mon Sep 17 00:00:00 2001 From: John Rampono Date: Sat, 6 Aug 2022 14:53:44 +0800 Subject: [PATCH 094/151] Added Private Links Approval to Layer 2 --- solution/DeploymentV2/Deploy_1_Infra0.ps1 | 65 ---------- .../DeploymentV2/Deploy_2_Infra0_Outputs.ps1 | 45 ------- .../DeploymentV2/Deploy_4_PrivateLinks.ps1 | 52 -------- solution/DeploymentV2/Deploy_8_SQLLogins.ps1 | 122 ------------------ .../DeploymentV2/PrivateEndPointApprover.json | 14 -- .../vars/common_vars_template.jsonnet | 2 + .../{ => pwshmodules}/Deploy_3_Infra1.ps1 | 0 .../pwshmodules/Deploy_4_PrivateLinks.ps1 | 58 +++++++++ .../terraform_layer2/02-publish.ps1 | 3 + .../terraform_layer3/{ => legacy}/synapse.tf | 0 .../DeploymentV2/terraform_layer3/purview.tf | 4 +- .../main.tf | 6 + .../DeploymentV2/terraform_layer3/readme.md | 2 +- 13 files changed, 71 insertions(+), 302 deletions(-) delete mode 100644 solution/DeploymentV2/Deploy_1_Infra0.ps1 delete mode 100644 solution/DeploymentV2/Deploy_2_Infra0_Outputs.ps1 delete mode 100644 solution/DeploymentV2/Deploy_4_PrivateLinks.ps1 delete mode 100644 solution/DeploymentV2/Deploy_8_SQLLogins.ps1 delete mode 100644 solution/DeploymentV2/PrivateEndPointApprover.json rename solution/DeploymentV2/{ => pwshmodules}/Deploy_3_Infra1.ps1 (100%) create mode 100644 solution/DeploymentV2/pwshmodules/Deploy_4_PrivateLinks.ps1 rename solution/DeploymentV2/terraform_layer3/{ => legacy}/synapse.tf (100%) diff --git a/solution/DeploymentV2/Deploy_1_Infra0.ps1 b/solution/DeploymentV2/Deploy_1_Infra0.ps1 deleted file mode 100644 index 520bb6a4..00000000 --- a/solution/DeploymentV2/Deploy_1_Infra0.ps1 +++ /dev/null @@ -1,65 +0,0 @@ -param ( - [Parameter(Mandatory=$false)] - [System.Boolean]$skipTerraformDeployment=$false, - [Parameter(Mandatory=$false)] - [System.Boolean]$RunTerraformLayer1=$false, - [Parameter(Mandatory=$false)] - [System.Boolean]$RunTerraformLayer2=$false, - [Parameter(Mandatory=$false)] - [System.Boolean]$RunTerraformLayer3=$false -) -#Write-Host $RunTerraformLayer1 -#Write-Host $RunTerraformLayer2 -#Write-Host $RunTerraformLayer3 -#---------------------------------------------------------------------------------------------------------------- -# Deploy Infrastructure -#---------------------------------------------------------------------------------------------------------------- -# DEBUGGING HINTS: -# - If you don't have an access policy for the KeyVault to set the secret values, run this -# az keyvault set-policy -n {keyVaultName} --secret-permissions all --object-id <> -# - If the firewall is blocking you, add your IP as firewall rule / exemption to the appropriate resource -# - If you havn't run prepare but want to run this script on its own, set the TF_VAR_jumphost_password and TF_VAR_domain env vars -#------------------------------------------------------------------------------------------------------------ - -Set-Location "./terraform_layer1" - -terragrunt init --terragrunt-config vars/$env:environmentName/terragrunt.hcl -reconfigure - -if ($skipTerraformDeployment -or $RunTerraformLayer1 -ne $true) { - Write-Host "Skipping Terraform Deployment - Layer 1" -} -else { - Write-Host "Starting Terraform Deployment- Layer 1" - terragrunt apply -auto-approve --terragrunt-config vars/$env:environmentName/terragrunt.hcl -} - -Set-Location $deploymentFolderPath - -Set-Location "./terraform_layer2" - -terragrunt init --terragrunt-config vars/$env:environmentName/terragrunt.hcl -reconfigure - -if ($skipTerraformDeployment -or $RunTerraformLayer2 -ne $true) { - Write-Host "Skipping Terraform Deployment- Layer 2" -} -else { - Write-Host "Starting Terraform Deployment- Layer 2" - terragrunt apply -auto-approve --terragrunt-config vars/$env:environmentName/terragrunt.hcl -} - -Set-Location $deploymentFolderPath - -Set-Location "./terraform_layer3" - -terragrunt init --terragrunt-config vars/$env:environmentName/terragrunt.hcl -reconfigure - -if ($skipTerraformDeployment -or $RunTerraformLayer3 -ne $true) { - Write-Host "Skipping Terraform Deployment- Layer 3" -} -else { - Write-Host "Starting Terraform Deployment- Layer 3" - terragrunt apply -auto-approve --terragrunt-config vars/$env:environmentName/terragrunt.hcl -} - -Set-Location $deploymentFolderPath - diff --git a/solution/DeploymentV2/Deploy_2_Infra0_Outputs.ps1 b/solution/DeploymentV2/Deploy_2_Infra0_Outputs.ps1 deleted file mode 100644 index b5028e5e..00000000 --- a/solution/DeploymentV2/Deploy_2_Infra0_Outputs.ps1 +++ /dev/null @@ -1,45 +0,0 @@ -#------------------------------------------------------------------------------------------------------------ -# Get all the outputs from terraform so we can use them in subsequent steps -#------------------------------------------------------------------------------------------------------------ -Set-Location "./terraform" -Write-Host "Reading Terraform Outputs" -Import-Module .\pwshmodules\GatherOutputsFromTerraform.psm1 -force -$tout = GatherOutputsFromTerraform - -$outputs = terragrunt output -json --terragrunt-config ./vars/$env:environmentName/terragrunt.hcl | ConvertFrom-Json - -$subscription_id =$outputs.subscription_id.value -$resource_group_name =$outputs.resource_group_name.value -$webapp_name =$outputs.webapp_name.value -$functionapp_name=$outputs.functionapp_name.value -$purview_name=$outputs.purview_name.value -$sqlserver_name=$outputs.sqlserver_name.value -$blobstorage_name=$outputs.blobstorage_name.value -$adlsstorage_name=$outputs.adlsstorage_name.value -$datafactory_name=$outputs.datafactory_name.value -$keyvault_name=$outputs.keyvault_name.value -#sif database name -$sifdb_name = if([string]::IsNullOrEmpty($outputs.sifdb_name.value)){"SIFDM"} - -$stagingdb_name=$outputs.stagingdb_name.value -$sampledb_name=$outputs.sampledb_name.value -$metadatadb_name=$outputs.metadatadb_name.value -$loganalyticsworkspace_id=$outputs.loganalyticsworkspace_id.value -$purview_sp_name=$outputs.purview_sp_name.value -$synapse_workspace_name=if([string]::IsNullOrEmpty($outputs.synapse_workspace_name.value)) {"Dummy"} else {$outputs.synapse_workspace_name.value} -$synapse_sql_pool_name=if([string]::IsNullOrEmpty($outputs.synapse_sql_pool_name.value)) {"Dummy"} else {$outputs.synapse_sql_pool_name.value} -$synapse_spark_pool_name=if([string]::IsNullOrEmpty($outputs.synapse_spark_pool_name.value)) {"Dummy"} else {$outputs.synapse_spark_pool_name.value} -$skipCustomTerraform = if($tout.deploy_custom_terraform) {$false} else {$true} -$skipWebApp = if($tout.publish_web_app -and $tout.deploy_web_app) {$false} else {$true} -$skipFunctionApp = if($tout.publish_function_app -and $tout.deploy_function_app) {$false} else {$true} -$skipDatabase = if($tout.publish_metadata_database -and $tout.deploy_metadata_database) {$false} else {$true} -$skipSQLLogins = if($tout.publish_sql_logins -and $tout.deploy_sql_server) {$false} else {$true} -$skipSynapseLogins = if($tout.publish_sql_logins -and $tout.deploy_synapse) {$false} else {$true} -$skipSampleFiles = if($tout.publish_sample_files){$false} else {$true} -$skipSIF= if($tout.publish_sif_database){$false} else {$true} -$skipNetworking = if($tout.configure_networking){$false} else {$true} -$skipDataFactoryPipelines = if($tout.publish_datafactory_pipelines) {$false} else {$true} -$skipFunctionalTests = if($tout.publish_functional_tests) {$false} else {$true} -$skipConfigurePurview = if($tout.publish_configure_purview) {$false} else {$true} -$AddCurrentUserAsWebAppAdmin = if($tout.publish_web_app_addcurrentuserasadmin) {$true} else {$false} -Set-Location $deploymentFolderPath \ No newline at end of file diff --git a/solution/DeploymentV2/Deploy_4_PrivateLinks.ps1 b/solution/DeploymentV2/Deploy_4_PrivateLinks.ps1 deleted file mode 100644 index dc868587..00000000 --- a/solution/DeploymentV2/Deploy_4_PrivateLinks.ps1 +++ /dev/null @@ -1,52 +0,0 @@ - -if ($skipNetworking -or $tout.is_vnet_isolated -eq $false) { - Write-Host "Skipping Private Link Connnections" -} -else { - #------------------------------------------------------------------------------------------------------------ - # Approve the Private Link Connections that get generated from the Managed Private Links in ADF - #------------------------------------------------------------------------------------------------------------ - Write-Host "Approving Private Link Connections" - $links = az network private-endpoint-connection list -g $resource_group_name -n $keyvault_name --type 'Microsoft.KeyVault/vaults' | ConvertFrom-Json - foreach($link in $links){ - if($link.properties.privateLinkServiceConnectionState.status -eq "Pending"){ - $id_parts = $link.id.Split("/"); - Write-Host "- " + $id_parts[$id_parts.length-1] - $result = az network private-endpoint-connection approve -g $resource_group_name -n $id_parts[$id_parts.length-1] --resource-name $keyvault_name --type Microsoft.Keyvault/vaults --description "Approved by Deploy.ps1" - } - } - $links = az network private-endpoint-connection list -g $resource_group_name -n $sqlserver_name --type 'Microsoft.Sql/servers' | ConvertFrom-Json - foreach($link in $links){ - if($link.properties.privateLinkServiceConnectionState.status -eq "Pending"){ - $id_parts = $link.id.Split("/"); - Write-Host "- " + $id_parts[$id_parts.length-1] - $result = az network private-endpoint-connection approve -g $resource_group_name -n $id_parts[$id_parts.length-1] --resource-name $sqlserver_name --type Microsoft.Sql/servers --description "Approved by Deploy.ps1" - } - } - - $links = az network private-endpoint-connection list -g $resource_group_name -n $synapse_workspace_name --type 'Microsoft.Synapse/workspaces' | ConvertFrom-Json - foreach($link in $links){ - if($link.properties.privateLinkServiceConnectionState.status -eq "Pending"){ - $id_parts = $link.id.Split("/"); - Write-Host "- " + $id_parts[$id_parts.length-1] - $result = az network private-endpoint-connection approve -g $resource_group_name -n $id_parts[$id_parts.length-1] --resource-name $synapse_workspace_name --type Microsoft.Synapse/workspaces --description "Approved by Deploy.ps1" - } - } - - $links = az network private-endpoint-connection list -g $resource_group_name -n $blobstorage_name --type 'Microsoft.Storage/storageAccounts' | ConvertFrom-Json - foreach($link in $links){ - if($link.properties.privateLinkServiceConnectionState.status -eq "Pending"){ - $id_parts = $link.id.Split("/"); - Write-Host "- " + $id_parts[$id_parts.length-1] - $result = az network private-endpoint-connection approve -g $resource_group_name -n $id_parts[$id_parts.length-1] --resource-name $blobstorage_name --type Microsoft.Storage/storageAccounts --description "Approved by Deploy.ps1" - } - } - $links = az network private-endpoint-connection list -g $resource_group_name -n $adlsstorage_name --type 'Microsoft.Storage/storageAccounts' | ConvertFrom-Json - foreach($link in $links){ - if($link.properties.privateLinkServiceConnectionState.status -eq "Pending"){ - $id_parts = $link.id.Split("/"); - Write-Host "- " + $id_parts[$id_parts.length-1] - $result = az network private-endpoint-connection approve -g $resource_group_name -n $id_parts[$id_parts.length-1] --resource-name $adlsstorage_name --type Microsoft.Storage/storageAccounts --description "Approved by Deploy.ps1" - } - } -} diff --git a/solution/DeploymentV2/Deploy_8_SQLLogins.ps1 b/solution/DeploymentV2/Deploy_8_SQLLogins.ps1 deleted file mode 100644 index 9cf70c84..00000000 --- a/solution/DeploymentV2/Deploy_8_SQLLogins.ps1 +++ /dev/null @@ -1,122 +0,0 @@ -param ( - [Parameter(Mandatory=$false)] - [bool]$PublishSQLLogins=$false -) -#---------------------------------------------------------------------------------------------------------------- -# Configure SQL Server Logins -#---------------------------------------------------------------------------------------------------------------- -if($PublishSQLLogins -eq $false) { - Write-Host "Skipping configuration of SQL Server Users" -} -else { - Write-Host "Configuring SQL Server Users" - $databases = @($stagingdb_name, $sampledb_name, $metadatadb_name) - - $aadUsers = @($datafactory_name) - - if(!$purview_sp_id -eq 0) - { - $aadUsers += $purview_name - $aadUsers += $purview_sp_name - } - - $sqladmins = ($env:TF_VAR_azure_sql_aad_administrators | ConvertFrom-Json -Depth 10) - $sqladmins2 = ($Sqladmins | Get-Member) | Where-Object {$_.MemberType -eq "NoteProperty"} | Select-Object -Property Name - foreach($user in $sqladmins2) - { - if($user.Name -ne "sql_aad_admin") - { - $aadUsers += $user.Name - } - } - - - $token=$(az account get-access-token --resource=https://database.windows.net --query accessToken --output tsv) - foreach($database in $databases) - { - - foreach($user in $aadUsers) - { - if (![string]::IsNullOrEmpty($user)) - { - $sqlcommand = " - - IF NOT EXISTS (SELECT * - FROM [sys].[database_principals] - WHERE [type] = N'E' AND [name] = N'$user') - BEGIN - CREATE USER [$user] FROM EXTERNAL PROVIDER; - END - ALTER ROLE db_datareader ADD MEMBER [$user]; - ALTER ROLE db_datawriter ADD MEMBER [$user]; - GRANT EXECUTE ON SCHEMA::[dbo] TO [$user]; - GO - - " - - write-host "Granting MSI Privileges on $database DB to $user" - Invoke-Sqlcmd -ServerInstance "$sqlserver_name.database.windows.net,1433" -Database $database -AccessToken $token -query $sqlcommand - } - } - } - - $ddlCommand = "ALTER ROLE db_ddladmin ADD MEMBER [$datafactory_name];" - foreach($database in $databases) - { - write-host "Granting DDL Role on $database DB to $datafactory_name" - Invoke-Sqlcmd -ServerInstance "$sqlserver_name.database.windows.net,1433" -Database $database -AccessToken $token -query $ddlCommand - } - -} - -#---------------------------------------------------------------------------------------------------------------- -# Configure Synapse Logins -#---------------------------------------------------------------------------------------------------------------- -if($PublishSQLLogins -eq $false) { - Write-Host "Skipping configuration of Synapse SQL Users" -} -else { - Write-Host "Configuring Synapse SQL Users" - - $myIp = $env:TF_VAR_ip_address - $myIp2 = $env:TF_VAR_ip_address2 - - #Add Ip to SQL Firewall - #$result = az synapse workspace update -n $synapse_workspace_name -g $resource_group_name --set publicNetworkAccess="Enabled" - $result = az synapse workspace firewall-rule create --resource-group $resource_group_name --workspace-name $synapse_workspace_name --name "DeploymentAgent" --start-ip-address $myIp --end-ip-address $myIp - $result = az synapse workspace firewall-rule create --resource-group $resource_group_name --workspace-name $synapse_workspace_name --name "DeploymentUser" --start-ip-address $myIp2 --end-ip-address $myIp2 - - if ($tout.is_vnet_isolated -eq $false) - { - $result = az synapse workspace firewall-rule create --resource-group $resource_group_name --workspace-name $synapse_workspace_name --name "AllowAllWindowsAzureIps" --start-ip-address "0.0.0.0" --end-ip-address "0.0.0.0" - } - - if([string]::IsNullOrEmpty($synapse_sql_pool_name) ) - { - write-host "Synapse pool is not deployed." - } - else - { - # Fix the MSI registrations on the other databases. I'd like a better way of doing this in the future - $SqlInstalled = Get-InstalledModule SqlServer - if($null -eq $SqlInstalled) - { - write-host "Installing SqlServer Module" - Install-Module -Name SqlServer -Scope CurrentUser -Force - } - - - - $token=$(az account get-access-token --resource=https://sql.azuresynapse.net --query accessToken --output tsv) - if ((![string]::IsNullOrEmpty($datafactory_name)) -and ($synapse_sql_pool_name -ne 'Dummy') -and (![string]::IsNullOrEmpty($synapse_sql_pool_name))) - { - # For a Spark user to read and write directly from Spark into or from a SQL pool, db_owner permission is required. - Invoke-Sqlcmd -ServerInstance "$synapse_workspace_name.sql.azuresynapse.net,1433" -Database $synapse_sql_pool_name -AccessToken $token -query "IF NOT EXISTS (SELECT name - FROM [sys].[database_principals] - WHERE [type] = 'E' AND name = N'$datafactory_name') BEGIN CREATE USER [$datafactory_name] FROM EXTERNAL PROVIDER END" - Invoke-Sqlcmd -ServerInstance "$synapse_workspace_name.sql.azuresynapse.net,1433" -Database $synapse_sql_pool_name -AccessToken $token -query "EXEC sp_addrolemember 'db_owner', '$datafactory_name'" - } - } - - -} \ No newline at end of file diff --git a/solution/DeploymentV2/PrivateEndPointApprover.json b/solution/DeploymentV2/PrivateEndPointApprover.json deleted file mode 100644 index da1ac44f..00000000 --- a/solution/DeploymentV2/PrivateEndPointApprover.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "Name": "Private Endpoint Approver", - "IsCustom": true, - "Description": "Approve Private Endpoints", - "Actions": [ - "PrivateEndpointConnectionsApproval/action" - ], - "NotActions": [], - "DataActions": [], - "NotDataActions": [], - "AssignableScopes": [ - "/" - ] -} \ No newline at end of file diff --git a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet index a0bbb34b..d5d076d2 100644 --- a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet +++ b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet @@ -653,6 +653,8 @@ local SecretFileSensitiveVars = { // Object comprehension. + + diff --git a/solution/DeploymentV2/Deploy_3_Infra1.ps1 b/solution/DeploymentV2/pwshmodules/Deploy_3_Infra1.ps1 similarity index 100% rename from solution/DeploymentV2/Deploy_3_Infra1.ps1 rename to solution/DeploymentV2/pwshmodules/Deploy_3_Infra1.ps1 diff --git a/solution/DeploymentV2/pwshmodules/Deploy_4_PrivateLinks.ps1 b/solution/DeploymentV2/pwshmodules/Deploy_4_PrivateLinks.ps1 new file mode 100644 index 00000000..54a26033 --- /dev/null +++ b/solution/DeploymentV2/pwshmodules/Deploy_4_PrivateLinks.ps1 @@ -0,0 +1,58 @@ + +function DeployPrivateLinks ( + [Parameter(Mandatory = $true)] + [pscustomobject]$tout = $false +) { + $skipNetworking = if($tout.configure_networking){$false} else {$true} + if ($skipNetworking -or $tout.is_vnet_isolated -eq $false) { + Write-Host "Skipping Private Link Connnections" + } + else { + #------------------------------------------------------------------------------------------------------------ + # Approve the Private Link Connections that get generated from the Managed Private Links in ADF + #------------------------------------------------------------------------------------------------------------ + Write-Host "Approving Private Link Connections" + $links = az network private-endpoint-connection list -g $tout.resource_group_name -n $tout.keyvault_name --type 'Microsoft.KeyVault/vaults' | ConvertFrom-Json + foreach ($link in $links) { + if ($link.properties.privateLinkServiceConnectionState.status -eq "Pending") { + $id_parts = $link.id.Split("/"); + Write-Host "- " + $id_parts[$id_parts.length - 1] + $result = az network private-endpoint-connection approve -g $tout.resource_group_name -n $id_parts[$id_parts.length - 1] --resource-name $tout.keyvault_name --type Microsoft.Keyvault/vaults --description "Approved by Deploy.ps1" + } + } + $links = az network private-endpoint-connection list -g $tout.resource_group_name -n $sqlserver_name --type 'Microsoft.Sql/servers' | ConvertFrom-Json + foreach ($link in $links) { + if ($link.properties.privateLinkServiceConnectionState.status -eq "Pending") { + $id_parts = $link.id.Split("/"); + Write-Host "- " + $id_parts[$id_parts.length - 1] + $result = az network private-endpoint-connection approve -g $tout.resource_group_name -n $id_parts[$id_parts.length - 1] --resource-name $tout.sqlserver_name --type Microsoft.Sql/servers --description "Approved by Deploy.ps1" + } + } + + $links = az network private-endpoint-connection list -g $tout.resource_group_name -n $synapse_workspace_name --type 'Microsoft.Synapse/workspaces' | ConvertFrom-Json + foreach ($link in $links) { + if ($link.properties.privateLinkServiceConnectionState.status -eq "Pending") { + $id_parts = $link.id.Split("/"); + Write-Host "- " + $id_parts[$id_parts.length - 1] + $result = az network private-endpoint-connection approve -g $tout.resource_group_name -n $id_parts[$id_parts.length - 1] --resource-name $tout.synapse_workspace_name --type Microsoft.Synapse/workspaces --description "Approved by Deploy.ps1" + } + } + + $links = az network private-endpoint-connection list -g $tout.resource_group_name -n $tout.blobstorage_name --type 'Microsoft.Storage/storageAccounts' | ConvertFrom-Json + foreach ($link in $links) { + if ($link.properties.privateLinkServiceConnectionState.status -eq "Pending") { + $id_parts = $link.id.Split("/"); + Write-Host "- " + $id_parts[$id_parts.length - 1] + $result = az network private-endpoint-connection approve -g $tout.resource_group_name -n $id_parts[$id_parts.length - 1] --resource-name $blobstorage_name --type Microsoft.Storage/storageAccounts --description "Approved by Deploy.ps1" + } + } + $links = az network private-endpoint-connection list -g $tout.resource_group_name -n $tout.adlsstorage_name --type 'Microsoft.Storage/storageAccounts' | ConvertFrom-Json + foreach ($link in $links) { + if ($link.properties.privateLinkServiceConnectionState.status -eq "Pending") { + $id_parts = $link.id.Split("/"); + Write-Host "- " + $id_parts[$id_parts.length - 1] + $result = az network private-endpoint-connection approve -g $tout.resource_group_name -n $id_parts[$id_parts.length - 1] --resource-name $tout.adlsstorage_name --type Microsoft.Storage/storageAccounts --description "Approved by Deploy.ps1" + } + } + } +} \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer2/02-publish.ps1 b/solution/DeploymentV2/terraform_layer2/02-publish.ps1 index f0f9d91d..0064c097 100644 --- a/solution/DeploymentV2/terraform_layer2/02-publish.ps1 +++ b/solution/DeploymentV2/terraform_layer2/02-publish.ps1 @@ -48,6 +48,9 @@ $tout = GatherOutputsFromTerraform -TerraformFolderPath $PathToReturnTo #------------------------------------------------------------------------------------------------------------ # Publish #------------------------------------------------------------------------------------------------------------ +import-Module ./../pwshmodules/Deploy_4_PrivateLinks.psm1 -force +DeployPrivateLinks -tout $tout + import-Module ./../pwshmodules/Deploy_5_WebApp.psm1 -force DeployWebApp -tout $tout -deploymentFolderPath $deploymentFolderPath -PathToReturnTo $PathToReturnTo diff --git a/solution/DeploymentV2/terraform_layer3/synapse.tf b/solution/DeploymentV2/terraform_layer3/legacy/synapse.tf similarity index 100% rename from solution/DeploymentV2/terraform_layer3/synapse.tf rename to solution/DeploymentV2/terraform_layer3/legacy/synapse.tf diff --git a/solution/DeploymentV2/terraform_layer3/purview.tf b/solution/DeploymentV2/terraform_layer3/purview.tf index 26e450b6..177460e0 100644 --- a/solution/DeploymentV2/terraform_layer3/purview.tf +++ b/solution/DeploymentV2/terraform_layer3/purview.tf @@ -27,9 +27,7 @@ resource "azurerm_private_endpoint" "purview_account_private_endpoint_with_dns" tags = local.tags lifecycle { - ignore_changes = [ - tags - ] + ignore_changes = all } } diff --git a/solution/DeploymentV2/terraform_layer3/purview_ingestion_private_endpoints/main.tf b/solution/DeploymentV2/terraform_layer3/purview_ingestion_private_endpoints/main.tf index b333a712..55d1752d 100644 --- a/solution/DeploymentV2/terraform_layer3/purview_ingestion_private_endpoints/main.tf +++ b/solution/DeploymentV2/terraform_layer3/purview_ingestion_private_endpoints/main.tf @@ -40,5 +40,11 @@ resource "azurerm_resource_group_template_deployment" "ingestion_private_endpoin value = var.servicebus_private_dns_id } }) + lifecycle { + ignore_changes = [ + tags, + template_content + ] + } template_content = file("${path.module}/arm/privatelinks.json") } diff --git a/solution/DeploymentV2/terraform_layer3/readme.md b/solution/DeploymentV2/terraform_layer3/readme.md index 6b4ab57a..cc5f4047 100644 --- a/solution/DeploymentV2/terraform_layer3/readme.md +++ b/solution/DeploymentV2/terraform_layer3/readme.md @@ -1,7 +1,7 @@ This is layer three of the ADS Go Fast Terraform deployment # Notes regarding permissions need to deploy this layer -This layer needs to read & write to the Azure Function Enterprise Application and Service Principal. Therefore the deployment account needs Application.ReadWrite.OwnedBy as well as Directory.Read.All in the Azure Graph API. It also needs to be an owner of the Azure Function Enterprise Application and Service Principal. +This layer needs to read & write to the Azure Function Enterprise Application and Service Principal. Therefore the deployment account needs Application.ReadWrite.OwnedBy as well as Directory.Read.All in the Azure Graph API. It also needs to be an owner of the Azure Function Enterprise Application and Service Principal. If the deployment is Vnet Isolated it will also require the Azure network contributor role. In order to write the function app secret into key vault it also requires secret read/write on the key vault used by the deployment. From 3b8f215c894c261e80022ce81620fb7925bd34e5 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Sat, 6 Aug 2022 15:55:28 +0800 Subject: [PATCH 095/151] Adding in feature template override ability --- .../featuretemplates/functional_tests.jsonc | 4 +- .../vars/PreprocessEnvironment.ps1 | 13 ++++ .../vars/admz/common_vars_values.jsonc | 5 +- .../vars/common_vars_template.jsonnet | 30 ++++++++ .../vars/local/common_vars_values.jsonc | 5 +- .../vars/production/common_vars_values.jsonc | 5 +- .../vars/staging/common_vars_values.jsonc | 5 +- .../vars/uat/common_vars_values.jsonc | 5 +- .../pwshmodules/Deploy_0_Prep.psm1 | 72 ++++++++++--------- .../terraform_layer1/01-deploy.ps1 | 2 +- .../terraform_layer2/02-deploy.ps1 | 2 +- .../terraform_layer2/02-publish.ps1 | 2 +- .../terraform_layer3/03-deploy.ps1 | 2 +- .../terraform_layer3/03-publish.ps1 | 2 +- 14 files changed, 110 insertions(+), 44 deletions(-) diff --git a/solution/DeploymentV2/environments/featuretemplates/functional_tests.jsonc b/solution/DeploymentV2/environments/featuretemplates/functional_tests.jsonc index a24a8970..6c3e9c71 100644 --- a/solution/DeploymentV2/environments/featuretemplates/functional_tests.jsonc +++ b/solution/DeploymentV2/environments/featuretemplates/functional_tests.jsonc @@ -5,7 +5,7 @@ {"Name":"deploy_app_service_plan","Value":true}, {"Name":"deploy_data_factory","Value":true}, {"Name":"deploy_sentinel","Value":true}, - {"Name":"deploy_purview","Value":false}, + {"Name":"deploy_purview","Value":true}, {"Name":"deploy_synapse","Value":true}, {"Name":"deploy_metadata_database","Value":true}, {"Name":"is_vnet_isolated","Value":true}, @@ -17,6 +17,6 @@ {"Name":"publish_datafactory_pipelines","Value":true}, {"Name":"publish_web_app_addcurrentuserasadmin","Value":true}, {"Name":"deploy_selfhostedsql","Value":true}, - {"Name":"is_onprem_datafactory_ir_registered","Value":false}, + {"Name":"is_onprem_datafactory_ir_registered","Value":true}, {"Name":"publish_sif_database","Value":true} ] \ No newline at end of file diff --git a/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 b/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 index 64c5b6a9..148a6846 100644 --- a/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 +++ b/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 @@ -53,10 +53,23 @@ $hiddenoutput = !(Test-Path $newfolder) ? ($F = New-Item -itemType Directory -Na (jsonnet "./common_vars_template.jsonnet" --tla-str featuretemplatename=$FeatureTemplate --tla-str environment=$Environment --tla-str gitDeploy=$gitDeploy ) | Set-Content($newfolder +"/common_vars.json") $obj = Get-Content ($newfolder + "/common_vars.json") | ConvertFrom-Json + +#featureTemplateOverrides +$fto_vals = ((Get-Content -Path "./uat/common_vars_values.jsonc") | ConvertFrom-Json -Depth 10).FeatureTemplateOverrides +$fto_keys = $fto_vals | Get-Member | Where-Object {$_.MemberType -eq "NoteProperty"} + foreach($t in ($obj.ForEnvVar | Get-Member | Where-Object {$_.MemberType -eq "NoteProperty"})) { $Name = $t.Name $Value = $obj.ForEnvVar[0].$Name + + #Feature Template Value Overrides + if(($fto_keys | Where-Object {$_.Name -eq $Name.Replace("TF_VAR_","")}).count -gt 0) + { + $fto_prop = ($fto_keys | Where-Object {$_.Name -eq $Name.Replace("TF_VAR_","")}).Name + Write-Warning "Overriding Feature Template value for $fto_prop" + $Value = $fto_vals.$fto_prop + } if($Value.GetType().Name -eq "Boolean") { diff --git a/solution/DeploymentV2/environments/vars/admz/common_vars_values.jsonc b/solution/DeploymentV2/environments/vars/admz/common_vars_values.jsonc index 4ad56b08..85927813 100644 --- a/solution/DeploymentV2/environments/vars/admz/common_vars_values.jsonc +++ b/solution/DeploymentV2/environments/vars/admz/common_vars_values.jsonc @@ -15,5 +15,8 @@ "WEB_APP_ADMIN_USER":"ccbdbba4-669c-48d6-86b8-75c9ab2ee578", "ARM_SYNAPSE_WORKSPACE_NAME": "adsdevsynwadskhpv", //Required by deployment script (pwsh) to openfirewall and provide access.. note that this will not be known until first deployment "ARM_KEYVAULT_NAME": "ads-dev-kv-ads-khpv", //Required by deployment script (pwsh) to openfirewall and provide access.. note that this will not be known until first deployment - "ARM_DATALAKE_NAME": "adsdevdlsadskhpvadsl" //Required by deployment script (pwsh) to openfirewall and provide access.. note that this will not be known until first deployment + "ARM_DATALAKE_NAME": "adsdevdlsadskhpvadsl", //Required by deployment script (pwsh) to openfirewall and provide access.. note that this will not be known until first deployment + "FeatureTemplateOverrides": { + "is_onprem_datafactory_ir_registered":false + } } \ No newline at end of file diff --git a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet index d5d076d2..2d4d5bd9 100644 --- a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet +++ b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet @@ -638,6 +638,36 @@ local SecretFileSensitiveVars = { // Object comprehension. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/solution/DeploymentV2/environments/vars/local/common_vars_values.jsonc b/solution/DeploymentV2/environments/vars/local/common_vars_values.jsonc index 4ad56b08..85927813 100644 --- a/solution/DeploymentV2/environments/vars/local/common_vars_values.jsonc +++ b/solution/DeploymentV2/environments/vars/local/common_vars_values.jsonc @@ -15,5 +15,8 @@ "WEB_APP_ADMIN_USER":"ccbdbba4-669c-48d6-86b8-75c9ab2ee578", "ARM_SYNAPSE_WORKSPACE_NAME": "adsdevsynwadskhpv", //Required by deployment script (pwsh) to openfirewall and provide access.. note that this will not be known until first deployment "ARM_KEYVAULT_NAME": "ads-dev-kv-ads-khpv", //Required by deployment script (pwsh) to openfirewall and provide access.. note that this will not be known until first deployment - "ARM_DATALAKE_NAME": "adsdevdlsadskhpvadsl" //Required by deployment script (pwsh) to openfirewall and provide access.. note that this will not be known until first deployment + "ARM_DATALAKE_NAME": "adsdevdlsadskhpvadsl", //Required by deployment script (pwsh) to openfirewall and provide access.. note that this will not be known until first deployment + "FeatureTemplateOverrides": { + "is_onprem_datafactory_ir_registered":false + } } \ No newline at end of file diff --git a/solution/DeploymentV2/environments/vars/production/common_vars_values.jsonc b/solution/DeploymentV2/environments/vars/production/common_vars_values.jsonc index 4ad56b08..f1ee91de 100644 --- a/solution/DeploymentV2/environments/vars/production/common_vars_values.jsonc +++ b/solution/DeploymentV2/environments/vars/production/common_vars_values.jsonc @@ -15,5 +15,8 @@ "WEB_APP_ADMIN_USER":"ccbdbba4-669c-48d6-86b8-75c9ab2ee578", "ARM_SYNAPSE_WORKSPACE_NAME": "adsdevsynwadskhpv", //Required by deployment script (pwsh) to openfirewall and provide access.. note that this will not be known until first deployment "ARM_KEYVAULT_NAME": "ads-dev-kv-ads-khpv", //Required by deployment script (pwsh) to openfirewall and provide access.. note that this will not be known until first deployment - "ARM_DATALAKE_NAME": "adsdevdlsadskhpvadsl" //Required by deployment script (pwsh) to openfirewall and provide access.. note that this will not be known until first deployment + "ARM_DATALAKE_NAME": "adsdevdlsadskhpvadsl", //Required by deployment script (pwsh) to openfirewall and provide access.. note that this will not be known until first deployment, + "FeatureTemplateOverrides": { + "is_onprem_datafactory_ir_registered":false + } } \ No newline at end of file diff --git a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc index 6c2970ba..73200918 100644 --- a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc +++ b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc @@ -38,5 +38,8 @@ "GIT_ADF_REPOSITORY_NAME": "#####", "GIT_ADF_REPOSITORY_BRANCH_NAME": "#####", "GIT_ADF_USER_NAME": "#####", - "GIT_ADF_EMAIL_ADDRESS": "#####" + "GIT_ADF_EMAIL_ADDRESS": "#####", + "FeatureTemplateOverrides": { + "is_onprem_datafactory_ir_registered":false + } } diff --git a/solution/DeploymentV2/environments/vars/uat/common_vars_values.jsonc b/solution/DeploymentV2/environments/vars/uat/common_vars_values.jsonc index 1be1b5b4..b9132e69 100644 --- a/solution/DeploymentV2/environments/vars/uat/common_vars_values.jsonc +++ b/solution/DeploymentV2/environments/vars/uat/common_vars_values.jsonc @@ -38,5 +38,8 @@ "GIT_ADF_REPOSITORY_NAME": "#####", "GIT_ADF_REPOSITORY_BRANCH_NAME": "#####", "GIT_ADF_USER_NAME": "#####", - "GIT_ADF_EMAIL_ADDRESS": "#####" + "GIT_ADF_EMAIL_ADDRESS": "#####", + "FeatureTemplateOverrides": { + "is_onprem_datafactory_ir_registered":false + } } diff --git a/solution/DeploymentV2/pwshmodules/Deploy_0_Prep.psm1 b/solution/DeploymentV2/pwshmodules/Deploy_0_Prep.psm1 index 6ee32a2b..01802078 100644 --- a/solution/DeploymentV2/pwshmodules/Deploy_0_Prep.psm1 +++ b/solution/DeploymentV2/pwshmodules/Deploy_0_Prep.psm1 @@ -1,14 +1,13 @@ function PrepareDeployment ( - [Parameter(Mandatory=$true)] - [System.Boolean]$gitDeploy=$false, - [Parameter(Mandatory=$true)] + [Parameter(Mandatory = $true)] + [System.Boolean]$gitDeploy = $false, + [Parameter(Mandatory = $true)] [String]$deploymentFolderPath, - [Parameter(Mandatory=$true)] - [String]$FeatureTemplate, - [Parameter(Mandatory=$false)] - [String]$PathToReturnTo="" -) -{ + [Parameter(Mandatory = $false)] + [String]$FeatureTemplate="", + [Parameter(Mandatory = $false)] + [String]$PathToReturnTo = "" +) { Set-Location $deploymentFolderPath #Check for SQLServer Module @@ -18,14 +17,13 @@ function PrepareDeployment ( } catch { "SqlServer PowerShell module not installed." } - if($null -eq $SqlInstalled) - { + if ($null -eq $SqlInstalled) { write-host "Installing SqlServer Module" Install-Module -Name SqlServer -Scope CurrentUser -Force } #needed for git integration - az extension add --upgrade --name datafactory + #az extension add --upgrade --name datafactory #accept custom image terms #https://docs.microsoft.com/en-us/cli/azure/vm/image/terms?view=azure-cli-latest @@ -34,18 +32,15 @@ function PrepareDeployment ( - if ($gitDeploy) - { + if ($gitDeploy) { $resourceGroupName = [System.Environment]::GetEnvironmentVariable('ARM_RESOURCE_GROUP_NAME') $synapseWorkspaceName = [System.Environment]::GetEnvironmentVariable('ARM_RESOURCE_SYNAPSE_WORKSPACE_NAME') $env:TF_VAR_ip_address = (Invoke-WebRequest ifconfig.me/ip).Content } - else - { + else { - #Only Prompt if Environment Variable has not been set - if ($null -eq [System.Environment]::GetEnvironmentVariable('environmentName')) - { + #Prompt if Environment Variable has not been set + if ($null -eq [System.Environment]::GetEnvironmentVariable('environmentName')) { $envlist = (Get-ChildItem -Directory -Path ./environments/vars | Select-Object -Property Name).Name Import-Module ./pwshmodules/GetSelectionFromUser.psm1 -Force $environmentName = Get-SelectionFromUser -Options ($envlist) -Prompt "Select deployment environment" @@ -54,14 +49,32 @@ function PrepareDeployment ( $env:TF_VAR_ip_address2 = (Invoke-WebRequest ifconfig.me/ip).Content + #Prompt if Feature Template has not been set + if ([string]::IsNullOrEmpty($FeatureTemplate) -eq $true) { + if ([string]::IsNullOrEmpty($env:ARM_FEATURE_TEMPLATE) -eq $false) { + $FeatureTemplate = $env:ARM_FEATURE_TEMPLATE + } + else { + $fts = (Get-ChildItem -Path ./environments/featuretemplates | Select-Object -Property Name).Name.replace(".jsonc", "") + $templateName = Get-SelectionFromUser -Options ($fts) -Prompt "Select feature template" + if ($templateName -eq "Quit") { + Exit + } + else { + $FeatureTemplate = $templateName + $env:ARM_FEATURE_TEMPLATE = $FeatureTemplate + } + } + + } + } $environmentName = [System.Environment]::GetEnvironmentVariable('environmentName') - if ($environmentName -eq "Quit" -or [string]::IsNullOrEmpty($environmentName)) - { + if ($environmentName -eq "Quit" -or [string]::IsNullOrEmpty($environmentName)) { write-host "environmentName is currently: $environmentName" Write-Error "Environment is not set" Exit @@ -73,34 +86,29 @@ function PrepareDeployment ( ./PreprocessEnvironment.ps1 -Environment $environmentName -FeatureTemplate $FeatureTemplate -gitDeploy $gitDeploy Set-Location $deploymentFolderPath - [System.Environment]::SetEnvironmentVariable('TFenvironmentName',$environmentName) + [System.Environment]::SetEnvironmentVariable('TFenvironmentName', $environmentName) - try - { + try { $hiddenoutput = az storage account network-rule add --resource-group $env:TF_VAR_resource_group_name --account-name $env:TF_VAR_state_storage_account_name --ip-address $env:TF_VAR_ip_address $hiddenoutput = az keyvault network-rule add -g $env:TF_VAR_resource_group_name --name $env:keyVaultName --ip-address $env:TF_VAR_ip_address/32 $hiddenoutput = az synapse workspace firewall-rule create --name CICDAgent --resource-group $env:TF_VAR_resource_group_name --start-ip-address $env:TF_VAR_ip_address --end-ip-address $env:TF_VAR_ip_address --workspace-name $env:ARM_SYNAPSE_WORKSPACE_NAME $hiddenoutput = az storage account network-rule add --resource-group $env:TF_VAR_resource_group_name --account-name $env:datalakeName --ip-address $env:TF_VAR_ip_address } - catch - { + catch { Write-Warning 'Opening Firewalls for IP Address One Failed' } - try - { + try { $hiddenoutput = az storage account network-rule add --resource-group $env:TF_VAR_resource_group_name --account-name $env:TF_VAR_state_storage_account_name --ip-address $env:TF_VAR_ip_address2 $hiddenoutput = az keyvault network-rule add -g $env:TF_VAR_resource_group_name --name $env:keyVaultName --ip-address $env:TF_VAR_ip_address2/32 $hiddenoutput = az synapse workspace firewall-rule create --name CICDUser --resource-group $env:TF_VAR_resource_group_name --start-ip-address $env:TF_VAR_ip_address2 --end-ip-address $env:TF_VAR_ip_address2 --workspace-name $env:ARM_SYNAPSE_WORKSPACE_NAME $hiddenoutput = az storage account network-rule add --resource-group $env:TF_VAR_resource_group_name --account-name $env:datalakeName --ip-address $env:TF_VAR_ip_address2 } - catch - { + catch { Write-Warning 'Opening Firewalls for IP Address Two Failed' } - if([string]::IsNullOrEmpty($PathToReturnTo) -ne $true) - { + if ([string]::IsNullOrEmpty($PathToReturnTo) -ne $true) { Write-Debug "Returning to $PathToReturnTo" Set-Location $PathToReturnTo } diff --git a/solution/DeploymentV2/terraform_layer1/01-deploy.ps1 b/solution/DeploymentV2/terraform_layer1/01-deploy.ps1 index ae0af689..a7641c99 100644 --- a/solution/DeploymentV2/terraform_layer1/01-deploy.ps1 +++ b/solution/DeploymentV2/terraform_layer1/01-deploy.ps1 @@ -19,7 +19,7 @@ #---------------------------------------------------------------------------------------------------------------- param ( [Parameter(Mandatory=$false)] - [string]$FeatureTemplate="basic_deployment" + [string]$FeatureTemplate="" ) #------------------------------------------------------------------------------------------------------------ diff --git a/solution/DeploymentV2/terraform_layer2/02-deploy.ps1 b/solution/DeploymentV2/terraform_layer2/02-deploy.ps1 index 9ad883c6..ff7c6f1f 100644 --- a/solution/DeploymentV2/terraform_layer2/02-deploy.ps1 +++ b/solution/DeploymentV2/terraform_layer2/02-deploy.ps1 @@ -19,7 +19,7 @@ #---------------------------------------------------------------------------------------------------------------- param ( [Parameter(Mandatory=$false)] - [string]$FeatureTemplate="basic_deployment" + [string]$FeatureTemplate="" ) #------------------------------------------------------------------------------------------------------------ diff --git a/solution/DeploymentV2/terraform_layer2/02-publish.ps1 b/solution/DeploymentV2/terraform_layer2/02-publish.ps1 index 0064c097..cdd37b07 100644 --- a/solution/DeploymentV2/terraform_layer2/02-publish.ps1 +++ b/solution/DeploymentV2/terraform_layer2/02-publish.ps1 @@ -19,7 +19,7 @@ #---------------------------------------------------------------------------------------------------------------- param ( [Parameter(Mandatory=$false)] - [string]$FeatureTemplate="basic_deployment" + [string]$FeatureTemplate="" ) #------------------------------------------------------------------------------------------------------------ diff --git a/solution/DeploymentV2/terraform_layer3/03-deploy.ps1 b/solution/DeploymentV2/terraform_layer3/03-deploy.ps1 index 95ce3224..6699588c 100644 --- a/solution/DeploymentV2/terraform_layer3/03-deploy.ps1 +++ b/solution/DeploymentV2/terraform_layer3/03-deploy.ps1 @@ -19,7 +19,7 @@ #---------------------------------------------------------------------------------------------------------------- param ( [Parameter(Mandatory=$false)] - [string]$FeatureTemplate="basic_deployment" + [string]$FeatureTemplate="" ) #------------------------------------------------------------------------------------------------------------ diff --git a/solution/DeploymentV2/terraform_layer3/03-publish.ps1 b/solution/DeploymentV2/terraform_layer3/03-publish.ps1 index d0da0bf9..c0894384 100644 --- a/solution/DeploymentV2/terraform_layer3/03-publish.ps1 +++ b/solution/DeploymentV2/terraform_layer3/03-publish.ps1 @@ -19,7 +19,7 @@ #---------------------------------------------------------------------------------------------------------------- param ( [Parameter(Mandatory=$false)] - [string]$FeatureTemplate="basic_deployment" + [string]$FeatureTemplate="" ) #------------------------------------------------------------------------------------------------------------ From f956e1e747d20b4d97697ca0ef1446e70bfb5448 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Sat, 6 Aug 2022 19:03:24 +0800 Subject: [PATCH 096/151] fixed name issue --- .../{Deploy_4_PrivateLinks.ps1 => Deploy_4_PrivateLinks.psm1} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename solution/DeploymentV2/pwshmodules/{Deploy_4_PrivateLinks.ps1 => Deploy_4_PrivateLinks.psm1} (100%) diff --git a/solution/DeploymentV2/pwshmodules/Deploy_4_PrivateLinks.ps1 b/solution/DeploymentV2/pwshmodules/Deploy_4_PrivateLinks.psm1 similarity index 100% rename from solution/DeploymentV2/pwshmodules/Deploy_4_PrivateLinks.ps1 rename to solution/DeploymentV2/pwshmodules/Deploy_4_PrivateLinks.psm1 From 0f832504129b3cb3b4b58374989eabc82ea15d98 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Sun, 7 Aug 2022 08:36:21 +0800 Subject: [PATCH 097/151] Re-Org Dir --- README.md | 27 +++++++++++++++++++ cloc.md | 27 +++++++++---------- .../{ => utilities}/GitResetForCheckin.ps1 | 0 .../DeploymentV2/{ => utilities}/Test.ps1 | 0 .../{ => vmscripts}/DeployADFOnPremSHIR.ps1 | 0 .../InstallGatewayFunctions.ps1 | 0 .../RemoteInstallIntegrationRuntime.ps1 | 0 .../RemoteInstallSQLWithCDC.ps1 | 0 .../RemoteInstallSQLWithCDC.sh | 0 .../RemoteInstallSQLWithCDC_Script.ps1 | 0 10 files changed, 39 insertions(+), 15 deletions(-) rename solution/DeploymentV2/{ => utilities}/GitResetForCheckin.ps1 (100%) rename solution/DeploymentV2/{ => utilities}/Test.ps1 (100%) rename solution/DeploymentV2/{ => vmscripts}/DeployADFOnPremSHIR.ps1 (100%) rename solution/DeploymentV2/{ => vmscripts}/InstallGatewayFunctions.ps1 (100%) rename solution/DeploymentV2/{ => vmscripts}/RemoteInstallIntegrationRuntime.ps1 (100%) rename solution/DeploymentV2/{ => vmscripts}/RemoteInstallSQLWithCDC.ps1 (100%) rename solution/DeploymentV2/{ => vmscripts}/RemoteInstallSQLWithCDC.sh (100%) rename solution/DeploymentV2/{ => vmscripts}/RemoteInstallSQLWithCDC_Script.ps1 (100%) diff --git a/README.md b/README.md index 0b0e1899..31e9a3b2 100644 --- a/README.md +++ b/README.md @@ -73,6 +73,33 @@ cd ./solution/DeploymentV2 ``` >- :white_check_mark: You are now ready to begin the IAC deployment. To do so follow [this detailed guide.](./solution/DeploymentV2/README.md) + +## :hash: Code Composition +See below for the a "Cloc" generated breakdown of the source code files by format: +Language|# Files|Blank Lines|Comment Lines|Code Lines +:-------|-------:|-------:|-------:|-------: +JSON|508|52|0|367628 +YAML|20|4140|4215|186305 +SQL|180|2058|2098|38799 +C#|240|3910|1461|19565 +Razor|322|1757|268|18142 +CSS|4|2117|42|9440 +HCL|97|1090|581|9248 +Jupyter Notebook|15|0|1937|3755 +PowerShell|70|858|704|3568 +JavaScript|12|268|218|1288 +Markdown|40|235|0|994 +SVG|7|0|18|657 +MSBuild script|8|60|2|634 +Bourne Shell|3|62|72|423 +Python|2|14|64|55 +Dockerfile|1|6|9|34 +DOS Batch|1|4|3|1 +HTML|1|1|0|0 +--------|--------|--------|--------|-------- +SUM:|1531|16632|11692|660536 + + ## Post Deployment Set-up and Instructions ======= Coming Soon. diff --git a/cloc.md b/cloc.md index dea003cd..179ce510 100644 --- a/cloc.md +++ b/cloc.md @@ -1,28 +1,25 @@ -cloc|github.com/AlDanial/cloc v 1.82 T=12.07 s (299.1 files/s, 91359.7 lines/s) +cloc|github.com/AlDanial/cloc v 1.82 T=0.89 s (1714.7 files/s, 771497.5 lines/s) --- | --- Language|files|blank|comment|code :-------|-------:|-------:|-------:|-------: -JSON|772|58|0|541329 -YAML|22|4145|4217|186336 -JavaScript|1455|22426|21559|118459 -SQL|188|2090|2098|59893 -CSS|47|7877|447|33617 -C#|248|3934|1517|19616 +JSON|508|52|0|367628 +YAML|20|4140|4215|186305 +SQL|180|2058|2098|38799 +C#|240|3910|1461|19565 Razor|322|1757|268|18142 -SVG|197|0|42|15098 -HCL|101|1318|581|13006 -Sass|90|1333|1138|5545 -PowerShell|74|897|728|3761 +CSS|4|2117|42|9440 +HCL|97|1090|581|9248 Jupyter Notebook|15|0|1937|3755 -Markdown|60|352|0|1546 +PowerShell|70|858|704|3568 +JavaScript|12|268|218|1288 +Markdown|40|235|0|994 +SVG|7|0|18|657 MSBuild script|8|60|2|634 Bourne Shell|3|62|72|423 -Go|1|8|5|73 Python|2|14|64|55 Dockerfile|1|6|9|34 -make|1|6|0|22 DOS Batch|1|4|3|1 HTML|1|1|0|0 --------|--------|--------|--------|-------- -SUM:|3609|46348|34687|1021345 +SUM:|1531|16632|11692|660536 diff --git a/solution/DeploymentV2/GitResetForCheckin.ps1 b/solution/DeploymentV2/utilities/GitResetForCheckin.ps1 similarity index 100% rename from solution/DeploymentV2/GitResetForCheckin.ps1 rename to solution/DeploymentV2/utilities/GitResetForCheckin.ps1 diff --git a/solution/DeploymentV2/Test.ps1 b/solution/DeploymentV2/utilities/Test.ps1 similarity index 100% rename from solution/DeploymentV2/Test.ps1 rename to solution/DeploymentV2/utilities/Test.ps1 diff --git a/solution/DeploymentV2/DeployADFOnPremSHIR.ps1 b/solution/DeploymentV2/vmscripts/DeployADFOnPremSHIR.ps1 similarity index 100% rename from solution/DeploymentV2/DeployADFOnPremSHIR.ps1 rename to solution/DeploymentV2/vmscripts/DeployADFOnPremSHIR.ps1 diff --git a/solution/DeploymentV2/InstallGatewayFunctions.ps1 b/solution/DeploymentV2/vmscripts/InstallGatewayFunctions.ps1 similarity index 100% rename from solution/DeploymentV2/InstallGatewayFunctions.ps1 rename to solution/DeploymentV2/vmscripts/InstallGatewayFunctions.ps1 diff --git a/solution/DeploymentV2/RemoteInstallIntegrationRuntime.ps1 b/solution/DeploymentV2/vmscripts/RemoteInstallIntegrationRuntime.ps1 similarity index 100% rename from solution/DeploymentV2/RemoteInstallIntegrationRuntime.ps1 rename to solution/DeploymentV2/vmscripts/RemoteInstallIntegrationRuntime.ps1 diff --git a/solution/DeploymentV2/RemoteInstallSQLWithCDC.ps1 b/solution/DeploymentV2/vmscripts/RemoteInstallSQLWithCDC.ps1 similarity index 100% rename from solution/DeploymentV2/RemoteInstallSQLWithCDC.ps1 rename to solution/DeploymentV2/vmscripts/RemoteInstallSQLWithCDC.ps1 diff --git a/solution/DeploymentV2/RemoteInstallSQLWithCDC.sh b/solution/DeploymentV2/vmscripts/RemoteInstallSQLWithCDC.sh similarity index 100% rename from solution/DeploymentV2/RemoteInstallSQLWithCDC.sh rename to solution/DeploymentV2/vmscripts/RemoteInstallSQLWithCDC.sh diff --git a/solution/DeploymentV2/RemoteInstallSQLWithCDC_Script.ps1 b/solution/DeploymentV2/vmscripts/RemoteInstallSQLWithCDC_Script.ps1 similarity index 100% rename from solution/DeploymentV2/RemoteInstallSQLWithCDC_Script.ps1 rename to solution/DeploymentV2/vmscripts/RemoteInstallSQLWithCDC_Script.ps1 From 7e5c2d294df92a8429f28682d62483dfb14a081e Mon Sep 17 00:00:00 2001 From: John Rampono Date: Sun, 7 Aug 2022 10:04:25 +0800 Subject: [PATCH 098/151] Fixed Synapse & DataFactory to use single GatherOutputs from TF. --- .../DataFactory/Patterns/AdfOnlyTests_Run.ps1 | 5 +- .../DataFactory/Patterns/EnvVarsToFile.ps1 | 6 +- .../DataFactory/Patterns/FuncAppTests_Run.ps1 | 4 +- .../Patterns/GatherOutputsFromTerraform.psm1 | 30 ---- ...utputsFromTerraform_DataFactoryFolder.psm1 | 13 ++ .../Patterns/Jsonnet_GenerateADFArtefacts.ps1 | 7 +- .../DataFactory/Patterns/SqlTests_Upload.ps1 | 7 +- .../Patterns/UploadGeneratedPatternsToADF.ps1 | 5 +- .../Patterns/UploadGeneratedPatternstoGit.ps1 | 5 +- .../Patterns/UploadTaskTypeMappings.ps1 | 6 +- .../vars/common_vars_template.jsonnet | 16 ++ solution/Synapse/Patterns/EnvVarsToFile.ps1 | 4 +- .../Patterns/FuncAppTests_Generate.ps1 | 6 +- .../Patterns/GatherOutputsFromTerraform.psm1 | 31 ---- ...herOutputsFromTerraform_SynapseFolder.psm1 | 13 ++ .../Patterns/Jsonnet_GenerateADFArtefacts.ps1 | 4 +- solution/Synapse/Patterns/SqlTests_Upload.ps1 | 6 +- .../Patterns/UploadGeneratedPatternsToADF.ps1 | 5 +- .../Patterns/UploadGeneratedPatternstoGit.ps1 | 4 +- .../Patterns/UploadTaskTypeMappings.ps1 | 5 +- .../functionapptests/tests/tests.json | 140 +++++++++--------- .../functionapptests/tests/tests.json | 10 +- .../functionapptests/tests/tests.json | 40 ++--- .../functionapptests/tests/tests.json | 24 +-- solution/Synapse/Patterns/uploadNotebooks.ps1 | 4 +- 25 files changed, 199 insertions(+), 201 deletions(-) delete mode 100644 solution/DataFactory/Patterns/GatherOutputsFromTerraform.psm1 create mode 100644 solution/DataFactory/Patterns/GatherOutputsFromTerraform_DataFactoryFolder.psm1 delete mode 100644 solution/Synapse/Patterns/GatherOutputsFromTerraform.psm1 create mode 100644 solution/Synapse/Patterns/GatherOutputsFromTerraform_SynapseFolder.psm1 diff --git a/solution/DataFactory/Patterns/AdfOnlyTests_Run.ps1 b/solution/DataFactory/Patterns/AdfOnlyTests_Run.ps1 index 70b3820e..d9e0adfd 100644 --- a/solution/DataFactory/Patterns/AdfOnlyTests_Run.ps1 +++ b/solution/DataFactory/Patterns/AdfOnlyTests_Run.ps1 @@ -1,6 +1,7 @@ az config set extension.use_dynamic_install=yes_without_prompt -Import-Module .\GatherOutputsFromTerraform.psm1 -force -$tout = GatherOutputsFromTerraform + +Import-Module ./GatherOutputsFromTerraform_DataFactoryFolder.psm1 -Force +$tout = GatherOutputsFromTerraform_DataFactoryFolder if($tout.datafactory_name -eq "") { $tout.datafactory_name = Read-Host "Enter the name of the data factory" diff --git a/solution/DataFactory/Patterns/EnvVarsToFile.ps1 b/solution/DataFactory/Patterns/EnvVarsToFile.ps1 index 9584899f..a91910b1 100644 --- a/solution/DataFactory/Patterns/EnvVarsToFile.ps1 +++ b/solution/DataFactory/Patterns/EnvVarsToFile.ps1 @@ -12,6 +12,8 @@ $obj | Add-Member "AdsOpts_CD_Services_DataFactory_SubscriptionId" $Subscription $obj | ConvertTo-Json | Set-Content ('./pipeline/static/partials/secrets.libsonnet') #> -Import-Module .\GatherOutputsFromTerraform.psm1 -force -$tout = GatherOutputsFromTerraform + +Write-Host $PWD.Path +Import-Module ./GatherOutputsFromTerraform_DataFactoryFolder.psm1 -Force +$tout = GatherOutputsFromTerraform_DataFactoryFolder $tout | ConvertTo-Json -Depth 10| Set-Content "./pipeline/static/partials/secrets.libsonnet" \ No newline at end of file diff --git a/solution/DataFactory/Patterns/FuncAppTests_Run.ps1 b/solution/DataFactory/Patterns/FuncAppTests_Run.ps1 index 9617006f..e903d728 100644 --- a/solution/DataFactory/Patterns/FuncAppTests_Run.ps1 +++ b/solution/DataFactory/Patterns/FuncAppTests_Run.ps1 @@ -1,5 +1,5 @@ -Import-Module .\GatherOutputsFromTerraform.psm1 -force -$tout = GatherOutputsFromTerraform +Import-Module ./GatherOutputsFromTerraform_DataFactoryFolder.psm1 -Force +$tout = GatherOutputsFromTerraform_DataFactoryFolder if($tout.datafactory_name -eq "") { diff --git a/solution/DataFactory/Patterns/GatherOutputsFromTerraform.psm1 b/solution/DataFactory/Patterns/GatherOutputsFromTerraform.psm1 deleted file mode 100644 index dac9d0b2..00000000 --- a/solution/DataFactory/Patterns/GatherOutputsFromTerraform.psm1 +++ /dev/null @@ -1,30 +0,0 @@ -function GatherOutputsFromTerraform() -{ - $environmentName = $env:TFenvironmentName - $myIp = (Invoke-WebRequest ifconfig.me/ip).Content - - $CurrentFolderPath = $PWD - Set-Location "../../DeploymentV2/terraform_layer2" - $env:TF_VAR_ip_address = $myIp - - #------------------------------------------------------------------------------------------------------------ - # Get all the outputs from terraform so we can use them in subsequent steps - #------------------------------------------------------------------------------------------------------------ - Write-Verbose "-------------------------------------------------------------------------------------------------" - Write-Verbose "Reading Terraform Outputs - Started" - - $tout = New-Object PSObject - - $tout0 = (terraform output -json | ConvertFrom-Json -Depth 10).PSObject.Properties - $tout0 | Foreach-Object { - $tout | Add-Member -MemberType NoteProperty -Name $_.Name -Value $_.Value.value - } - - $rgid = (az group show -n $tout.resource_group_name | ConvertFrom-Json -Depth 10).id - $tout | Add-Member -MemberType NoteProperty -Name "resource_group_id" -Value $rgid - - Set-Location $CurrentFolderPath - Write-Verbose "Reading Terraform Outputs - Finished" - Write-Verbose "-------------------------------------------------------------------------------------------------" - return $tout -} \ No newline at end of file diff --git a/solution/DataFactory/Patterns/GatherOutputsFromTerraform_DataFactoryFolder.psm1 b/solution/DataFactory/Patterns/GatherOutputsFromTerraform_DataFactoryFolder.psm1 new file mode 100644 index 00000000..a7b23efb --- /dev/null +++ b/solution/DataFactory/Patterns/GatherOutputsFromTerraform_DataFactoryFolder.psm1 @@ -0,0 +1,13 @@ +function GatherOutputsFromTerraform_DataFactoryFolder() +{ + $PathToReturnTo = (Get-Location).Path + $DeploymentFolderPath = Convert-Path -Path ((Get-Location).tostring() + './../../DeploymentV2/') + $TerraformFolderPath = Convert-Path -Path ($DeploymentFolderPath + '/terraform_layer2/') + + Import-Module (Convert-Path -Path ($DeploymentFolderPath + "/pwshmodules/GatherOutputsFromTerraform.psm1")) -Force + Import-Module (Convert-Path -Path ($DeploymentFolderPath + "/pwshmodules/Deploy_0_Prep.psm1")) -Force + PrepareDeployment -gitDeploy $false -deploymentFolderPath $deploymentFolderPath -FeatureTemplate "" -PathToReturnTo $PathToReturnTo + + $tout = GatherOutputsFromTerraform -TerraformFolderPath $TerraformFolderPath + return $tout +} diff --git a/solution/DataFactory/Patterns/Jsonnet_GenerateADFArtefacts.ps1 b/solution/DataFactory/Patterns/Jsonnet_GenerateADFArtefacts.ps1 index 0dba7317..d48b1738 100644 --- a/solution/DataFactory/Patterns/Jsonnet_GenerateADFArtefacts.ps1 +++ b/solution/DataFactory/Patterns/Jsonnet_GenerateADFArtefacts.ps1 @@ -1,5 +1,8 @@ -Import-Module .\GatherOutputsFromTerraform.psm1 -force -$tout = GatherOutputsFromTerraform + + +Import-Module ./GatherOutputsFromTerraform_DataFactoryFolder.psm1 -Force +$tout = GatherOutputsFromTerraform_DataFactoryFolder + $newfolder = "./output/" diff --git a/solution/DataFactory/Patterns/SqlTests_Upload.ps1 b/solution/DataFactory/Patterns/SqlTests_Upload.ps1 index 0dc31be9..ca745edb 100644 --- a/solution/DataFactory/Patterns/SqlTests_Upload.ps1 +++ b/solution/DataFactory/Patterns/SqlTests_Upload.ps1 @@ -1,5 +1,8 @@ -Import-Module .\GatherOutputsFromTerraform.psm1 -force -$tout = GatherOutputsFromTerraform + +Write-Host $PWD +Import-Module ./GatherOutputsFromTerraform_DataFactoryFolder.psm1 -Force +$tout = GatherOutputsFromTerraform_DataFactoryFolder + $sqlserver_name=$tout.sqlserver_name $stagingdb_name=$tout.stagingdb_name $metadatadb_name=$tout.metadatadb_name diff --git a/solution/DataFactory/Patterns/UploadGeneratedPatternsToADF.ps1 b/solution/DataFactory/Patterns/UploadGeneratedPatternsToADF.ps1 index eb94fb51..96daefb7 100644 --- a/solution/DataFactory/Patterns/UploadGeneratedPatternsToADF.ps1 +++ b/solution/DataFactory/Patterns/UploadGeneratedPatternsToADF.ps1 @@ -1,5 +1,6 @@ -Import-Module .\GatherOutputsFromTerraform.psm1 -force -$tout = GatherOutputsFromTerraform + +Import-Module ./GatherOutputsFromTerraform_DataFactoryFolder.psm1 -Force +$tout = GatherOutputsFromTerraform_DataFactoryFolder if($tout.datafactory_name -eq "") { diff --git a/solution/DataFactory/Patterns/UploadGeneratedPatternstoGit.ps1 b/solution/DataFactory/Patterns/UploadGeneratedPatternstoGit.ps1 index 181ad853..20df500e 100644 --- a/solution/DataFactory/Patterns/UploadGeneratedPatternstoGit.ps1 +++ b/solution/DataFactory/Patterns/UploadGeneratedPatternstoGit.ps1 @@ -1,5 +1,6 @@ -Import-Module .\GatherOutputsFromTerraform.psm1 -force -$tout = GatherOutputsFromTerraform + +Import-Module ./GatherOutputsFromTerraform_DataFactoryFolder.psm1 -Force +$tout = GatherOutputsFromTerraform_DataFactoryFolder function RemoveRepetitiveChars ($string, $char) { $string = $string.Split($char).where{$_} -join $char diff --git a/solution/DataFactory/Patterns/UploadTaskTypeMappings.ps1 b/solution/DataFactory/Patterns/UploadTaskTypeMappings.ps1 index 78d45864..2573c774 100644 --- a/solution/DataFactory/Patterns/UploadTaskTypeMappings.ps1 +++ b/solution/DataFactory/Patterns/UploadTaskTypeMappings.ps1 @@ -1,5 +1,7 @@ -Import-Module .\GatherOutputsFromTerraform.psm1 -force -$tout = GatherOutputsFromTerraform + +Import-Module ./GatherOutputsFromTerraform_DataFactoryFolder.psm1 -Force +$tout = GatherOutputsFromTerraform_DataFactoryFolder + $sqlserver_name=$tout.sqlserver_name $stagingdb_name=$tout.stagingdb_name $metadatadb_name=$tout.metadatadb_name diff --git a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet index 2d4d5bd9..49216c47 100644 --- a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet +++ b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet @@ -671,6 +671,22 @@ local SecretFileSensitiveVars = { // Object comprehension. + + + + + + + + + + + + + + + + diff --git a/solution/Synapse/Patterns/EnvVarsToFile.ps1 b/solution/Synapse/Patterns/EnvVarsToFile.ps1 index 9584899f..d3b57344 100644 --- a/solution/Synapse/Patterns/EnvVarsToFile.ps1 +++ b/solution/Synapse/Patterns/EnvVarsToFile.ps1 @@ -12,6 +12,6 @@ $obj | Add-Member "AdsOpts_CD_Services_DataFactory_SubscriptionId" $Subscription $obj | ConvertTo-Json | Set-Content ('./pipeline/static/partials/secrets.libsonnet') #> -Import-Module .\GatherOutputsFromTerraform.psm1 -force -$tout = GatherOutputsFromTerraform +Import-Module ./GatherOutputsFromTerraform_SynapseFolder.psm1 -Force +$tout = GatherOutputsFromTerraform_SynapseFolder $tout | ConvertTo-Json -Depth 10| Set-Content "./pipeline/static/partials/secrets.libsonnet" \ No newline at end of file diff --git a/solution/Synapse/Patterns/FuncAppTests_Generate.ps1 b/solution/Synapse/Patterns/FuncAppTests_Generate.ps1 index acc5996d..6cfb6bc4 100644 --- a/solution/Synapse/Patterns/FuncAppTests_Generate.ps1 +++ b/solution/Synapse/Patterns/FuncAppTests_Generate.ps1 @@ -1,5 +1,7 @@ -Import-Module .\GatherOutputsFromTerraform.psm1 -force -$tout = GatherOutputsFromTerraform + + +Import-Module ./GatherOutputsFromTerraform_SynapseFolder.psm1 -Force +$tout = GatherOutputsFromTerraform_SynapseFolder $patterns = ((Get-Content "Patterns.json") | ConvertFrom-Json).Folder | Sort-Object | Get-Unique $CurDir = $PWD.ToString() diff --git a/solution/Synapse/Patterns/GatherOutputsFromTerraform.psm1 b/solution/Synapse/Patterns/GatherOutputsFromTerraform.psm1 deleted file mode 100644 index 50d57f9f..00000000 --- a/solution/Synapse/Patterns/GatherOutputsFromTerraform.psm1 +++ /dev/null @@ -1,31 +0,0 @@ -function GatherOutputsFromTerraform() -{ - $environmentName = $env:TFenvironmentName - #$environmentName = "local" # currently supports (local, staging) - $myIp = (Invoke-WebRequest ifconfig.me/ip).Content - - $CurrentFolderPath = $PWD - Set-Location "../../DeploymentV2/terraform_layer2" - $env:TF_VAR_ip_address = $myIp - - #------------------------------------------------------------------------------------------------------------ - # Get all the outputs from terraform so we can use them in subsequent steps - #------------------------------------------------------------------------------------------------------------ - Write-Information "-------------------------------------------------------------------------------------------------" - Write-Information "Reading Terraform Outputs - Started" - - $tout = New-Object PSObject - - $tout0 = (terraform output -json | ConvertFrom-Json -Depth 10).PSObject.Properties - $tout0 | Foreach-Object { - $tout | Add-Member -MemberType NoteProperty -Name $_.Name -Value $_.Value.value - } - - $rgid = (az group show -n $tout.resource_group_name | ConvertFrom-Json -Depth 10).id - $tout | Add-Member -MemberType NoteProperty -Name "resource_group_id" -Value $rgid - - Set-Location $CurrentFolderPath - Write-Information "Reading Terraform Outputs - Finished" - Write-Information "-------------------------------------------------------------------------------------------------" - return $tout -} \ No newline at end of file diff --git a/solution/Synapse/Patterns/GatherOutputsFromTerraform_SynapseFolder.psm1 b/solution/Synapse/Patterns/GatherOutputsFromTerraform_SynapseFolder.psm1 new file mode 100644 index 00000000..717b7b58 --- /dev/null +++ b/solution/Synapse/Patterns/GatherOutputsFromTerraform_SynapseFolder.psm1 @@ -0,0 +1,13 @@ +function GatherOutputsFromTerraform_SynapseFolder() +{ + $PathToReturnTo = (Get-Location).Path + $DeploymentFolderPath = Convert-Path -Path ((Get-Location).tostring() + './../../DeploymentV2/') + $TerraformFolderPath = Convert-Path -Path ($DeploymentFolderPath + '/terraform_layer2/') + + Import-Module (Convert-Path -Path ($DeploymentFolderPath + "/pwshmodules/GatherOutputsFromTerraform.psm1")) -Force + Import-Module (Convert-Path -Path ($DeploymentFolderPath + "/pwshmodules/Deploy_0_Prep.psm1")) -Force + PrepareDeployment -gitDeploy $false -deploymentFolderPath $deploymentFolderPath -FeatureTemplate "" -PathToReturnTo $PathToReturnTo + + $tout = GatherOutputsFromTerraform -TerraformFolderPath $TerraformFolderPath + return $tout +} \ No newline at end of file diff --git a/solution/Synapse/Patterns/Jsonnet_GenerateADFArtefacts.ps1 b/solution/Synapse/Patterns/Jsonnet_GenerateADFArtefacts.ps1 index 8fd48172..97093645 100644 --- a/solution/Synapse/Patterns/Jsonnet_GenerateADFArtefacts.ps1 +++ b/solution/Synapse/Patterns/Jsonnet_GenerateADFArtefacts.ps1 @@ -1,5 +1,5 @@ -Import-Module .\GatherOutputsFromTerraform.psm1 -force -$tout = GatherOutputsFromTerraform +Import-Module ./GatherOutputsFromTerraform_SynapseFolder.psm1 -Force +$tout = GatherOutputsFromTerraform_SynapseFolder $newfolder = "./output/" diff --git a/solution/Synapse/Patterns/SqlTests_Upload.ps1 b/solution/Synapse/Patterns/SqlTests_Upload.ps1 index 92ec8564..5c0fbb0f 100644 --- a/solution/Synapse/Patterns/SqlTests_Upload.ps1 +++ b/solution/Synapse/Patterns/SqlTests_Upload.ps1 @@ -1,5 +1,7 @@ -Import-Module .\GatherOutputsFromTerraform.psm1 -force -$tout = GatherOutputsFromTerraform + +Import-Module ./GatherOutputsFromTerraform_SynapseFolder.psm1 -Force +$tout = GatherOutputsFromTerraform_SynapseFolder + $sqlserver_name=$tout.sqlserver_name $stagingdb_name=$tout.stagingdb_name $metadatadb_name=$tout.metadatadb_name diff --git a/solution/Synapse/Patterns/UploadGeneratedPatternsToADF.ps1 b/solution/Synapse/Patterns/UploadGeneratedPatternsToADF.ps1 index 7a156a6a..61977056 100644 --- a/solution/Synapse/Patterns/UploadGeneratedPatternsToADF.ps1 +++ b/solution/Synapse/Patterns/UploadGeneratedPatternsToADF.ps1 @@ -1,6 +1,5 @@ -Import-Module .\GatherOutputsFromTerraform.psm1 -force -$tout = GatherOutputsFromTerraform - +Import-Module ./GatherOutputsFromTerraform_SynapseFolder.psm1 -Force +$tout = GatherOutputsFromTerraform_SynapseFolder if($tout.datafactory_name -eq "") { $tout.datafactory_name = Read-Host "Enter the name of the data factory" diff --git a/solution/Synapse/Patterns/UploadGeneratedPatternstoGit.ps1 b/solution/Synapse/Patterns/UploadGeneratedPatternstoGit.ps1 index e3c0d102..1b419aa7 100644 --- a/solution/Synapse/Patterns/UploadGeneratedPatternstoGit.ps1 +++ b/solution/Synapse/Patterns/UploadGeneratedPatternstoGit.ps1 @@ -1,5 +1,5 @@ -Import-Module .\GatherOutputsFromTerraform.psm1 -force -$tout = GatherOutputsFromTerraform +Import-Module ./GatherOutputsFromTerraform_SynapseFolder.psm1 -Force +$tout = GatherOutputsFromTerraform_SynapseFolder function RemoveRepetitiveChars ($string, $char) { $string = $string.Split($char).where{$_} -join $char diff --git a/solution/Synapse/Patterns/UploadTaskTypeMappings.ps1 b/solution/Synapse/Patterns/UploadTaskTypeMappings.ps1 index dbf03a9b..8a9617f2 100644 --- a/solution/Synapse/Patterns/UploadTaskTypeMappings.ps1 +++ b/solution/Synapse/Patterns/UploadTaskTypeMappings.ps1 @@ -1,5 +1,6 @@ -Import-Module .\GatherOutputsFromTerraform.psm1 -force -$tout = GatherOutputsFromTerraform +Import-Module ./GatherOutputsFromTerraform_SynapseFolder.psm1 -Force +$tout = GatherOutputsFromTerraform_SynapseFolder + $sqlserver_name=$tout.sqlserver_name $stagingdb_name=$tout.stagingdb_name $metadatadb_name=$tout.metadatadb_name diff --git a/solution/Synapse/Patterns/pipeline/Azure-Storage-to-Azure-Storage/functionapptests/tests/tests.json b/solution/Synapse/Patterns/pipeline/Azure-Storage-to-Azure-Storage/functionapptests/tests/tests.json index aeaa3934..2ecd107e 100644 --- a/solution/Synapse/Patterns/pipeline/Azure-Storage-to-Azure-Storage/functionapptests/tests/tests.json +++ b/solution/Synapse/Patterns/pipeline/Azure-Storage-to-Azure-Storage/functionapptests/tests/tests.json @@ -3,27 +3,27 @@ "DegreeOfCopyParallelism": 1, "DependencyChainTag": "DeltaLoad", "EngineId": -2, - "EngineJson": "{\"endpoint\": \"https://adsdevsynwadswe7y.dev.azuresynapse.net\", \"DeltaProcessingNotebook\": \"DeltaProcessingNotebook\"}", - "EngineName": "ads-dev-adf-ads-we7y", - "EngineResourceGroup": "gfd1", + "EngineJson": "{\"endpoint\": \"https://adsstgsynwadsnvmz.dev.azuresynapse.net\", \"DeltaProcessingNotebook\": \"DeltaProcessingNotebook\"}", + "EngineName": "ads-stg-adf-ads-nvmz", + "EngineResourceGroup": "gfuat", "EngineSubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3", "NumberOfRetries": 3, "ScheduleMasterId": "-2", - "SourceKeyVaultBaseUrl": "https://ads-dev-kv-ads-we7y.vault.azure.net", + "SourceKeyVaultBaseUrl": "https://ads-stg-kv-ads-nvmz.vault.azure.net", "SourceSystemAuthType": "MSI", "SourceSystemId": -4, "SourceSystemJSON": "{\n \"Container\": \"datalakeraw\"\n}", "SourceSystemSecretName": "", - "SourceSystemServer": "https://adsdevdlsadswe7yadsl.dfs.core.windows.net", + "SourceSystemServer": "https://adsstgdlsadsnvmzadsl.dfs.core.windows.net", "SourceSystemType": "ADLS", "SourceSystemUserName": "", "SynapsePipeline": "GPL_SparkNotebookExecution_Primary_Azure", - "TargetKeyVaultBaseUrl": "https://ads-dev-kv-ads-we7y.vault.azure.net", + "TargetKeyVaultBaseUrl": "https://ads-stg-kv-ads-nvmz.vault.azure.net", "TargetSystemAuthType": "MSI", "TargetSystemId": -4, "TargetSystemJSON": "{\n \"Container\": \"datalakeraw\"\n}", "TargetSystemSecretName": "", - "TargetSystemServer": "https://adsdevdlsadswe7yadsl.dfs.core.windows.net", + "TargetSystemServer": "https://adsstgdlsadsnvmzadsl.dfs.core.windows.net", "TargetSystemType": "ADLS", "TargetSystemUserName": "", "TaskDatafactoryIR": "Azure", @@ -43,27 +43,27 @@ "DegreeOfCopyParallelism": 1, "DependencyChainTag": "DeltaLoad", "EngineId": -2, - "EngineJson": "{\"endpoint\": \"https://adsdevsynwadswe7y.dev.azuresynapse.net\", \"DeltaProcessingNotebook\": \"DeltaProcessingNotebook\"}", - "EngineName": "ads-dev-adf-ads-we7y", - "EngineResourceGroup": "gfd1", + "EngineJson": "{\"endpoint\": \"https://adsstgsynwadsnvmz.dev.azuresynapse.net\", \"DeltaProcessingNotebook\": \"DeltaProcessingNotebook\"}", + "EngineName": "ads-stg-adf-ads-nvmz", + "EngineResourceGroup": "gfuat", "EngineSubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3", "NumberOfRetries": 3, "ScheduleMasterId": "-2", - "SourceKeyVaultBaseUrl": "https://ads-dev-kv-ads-we7y.vault.azure.net", + "SourceKeyVaultBaseUrl": "https://ads-stg-kv-ads-nvmz.vault.azure.net", "SourceSystemAuthType": "MSI", "SourceSystemId": -4, "SourceSystemJSON": "{\n \"Container\": \"datalakeraw\"\n}", "SourceSystemSecretName": "", - "SourceSystemServer": "https://adsdevdlsadswe7yadsl.dfs.core.windows.net", + "SourceSystemServer": "https://adsstgdlsadsnvmzadsl.dfs.core.windows.net", "SourceSystemType": "ADLS", "SourceSystemUserName": "", "SynapsePipeline": "GPL_SparkNotebookExecution_Primary_Azure", - "TargetKeyVaultBaseUrl": "https://ads-dev-kv-ads-we7y.vault.azure.net", + "TargetKeyVaultBaseUrl": "https://ads-stg-kv-ads-nvmz.vault.azure.net", "TargetSystemAuthType": "MSI", "TargetSystemId": -4, "TargetSystemJSON": "{\n \"Container\": \"datalakeraw\"\n}", "TargetSystemSecretName": "", - "TargetSystemServer": "https://adsdevdlsadswe7yadsl.dfs.core.windows.net", + "TargetSystemServer": "https://adsstgdlsadsnvmzadsl.dfs.core.windows.net", "TargetSystemType": "ADLS", "TargetSystemUserName": "", "TaskDatafactoryIR": "Azure", @@ -83,27 +83,27 @@ "DegreeOfCopyParallelism": 1, "DependencyChainTag": "DeltaLoad", "EngineId": -2, - "EngineJson": "{\"endpoint\": \"https://adsdevsynwadswe7y.dev.azuresynapse.net\", \"DeltaProcessingNotebook\": \"DeltaProcessingNotebook\"}", - "EngineName": "ads-dev-adf-ads-we7y", - "EngineResourceGroup": "gfd1", + "EngineJson": "{\"endpoint\": \"https://adsstgsynwadsnvmz.dev.azuresynapse.net\", \"DeltaProcessingNotebook\": \"DeltaProcessingNotebook\"}", + "EngineName": "ads-stg-adf-ads-nvmz", + "EngineResourceGroup": "gfuat", "EngineSubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3", "NumberOfRetries": 3, "ScheduleMasterId": "-2", - "SourceKeyVaultBaseUrl": "https://ads-dev-kv-ads-we7y.vault.azure.net", + "SourceKeyVaultBaseUrl": "https://ads-stg-kv-ads-nvmz.vault.azure.net", "SourceSystemAuthType": "MSI", "SourceSystemId": -4, "SourceSystemJSON": "{\n \"Container\": \"datalakeraw\"\n}", "SourceSystemSecretName": "", - "SourceSystemServer": "https://adsdevdlsadswe7yadsl.dfs.core.windows.net", + "SourceSystemServer": "https://adsstgdlsadsnvmzadsl.dfs.core.windows.net", "SourceSystemType": "ADLS", "SourceSystemUserName": "", "SynapsePipeline": "GPL_SparkNotebookExecution_Primary_Azure", - "TargetKeyVaultBaseUrl": "https://ads-dev-kv-ads-we7y.vault.azure.net", + "TargetKeyVaultBaseUrl": "https://ads-stg-kv-ads-nvmz.vault.azure.net", "TargetSystemAuthType": "MSI", "TargetSystemId": -4, "TargetSystemJSON": "{\n \"Container\": \"datalakeraw\"\n}", "TargetSystemSecretName": "", - "TargetSystemServer": "https://adsdevdlsadswe7yadsl.dfs.core.windows.net", + "TargetSystemServer": "https://adsstgdlsadsnvmzadsl.dfs.core.windows.net", "TargetSystemType": "ADLS", "TargetSystemUserName": "", "TaskDatafactoryIR": "Azure", @@ -123,27 +123,27 @@ "DegreeOfCopyParallelism": 1, "DependencyChainTag": "DeltaLoad", "EngineId": -2, - "EngineJson": "{\"endpoint\": \"https://adsdevsynwadswe7y.dev.azuresynapse.net\", \"DeltaProcessingNotebook\": \"DeltaProcessingNotebook\"}", - "EngineName": "ads-dev-adf-ads-we7y", - "EngineResourceGroup": "gfd1", + "EngineJson": "{\"endpoint\": \"https://adsstgsynwadsnvmz.dev.azuresynapse.net\", \"DeltaProcessingNotebook\": \"DeltaProcessingNotebook\"}", + "EngineName": "ads-stg-adf-ads-nvmz", + "EngineResourceGroup": "gfuat", "EngineSubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3", "NumberOfRetries": 3, "ScheduleMasterId": "-2", - "SourceKeyVaultBaseUrl": "https://ads-dev-kv-ads-we7y.vault.azure.net", + "SourceKeyVaultBaseUrl": "https://ads-stg-kv-ads-nvmz.vault.azure.net", "SourceSystemAuthType": "MSI", "SourceSystemId": -4, "SourceSystemJSON": "{\n \"Container\": \"datalakeraw\"\n}", "SourceSystemSecretName": "", - "SourceSystemServer": "https://adsdevdlsadswe7yadsl.dfs.core.windows.net", + "SourceSystemServer": "https://adsstgdlsadsnvmzadsl.dfs.core.windows.net", "SourceSystemType": "ADLS", "SourceSystemUserName": "", "SynapsePipeline": "GPL_SparkNotebookExecution_Primary_Azure", - "TargetKeyVaultBaseUrl": "https://ads-dev-kv-ads-we7y.vault.azure.net", + "TargetKeyVaultBaseUrl": "https://ads-stg-kv-ads-nvmz.vault.azure.net", "TargetSystemAuthType": "MSI", "TargetSystemId": -4, "TargetSystemJSON": "{\n \"Container\": \"datalakeraw\"\n}", "TargetSystemSecretName": "", - "TargetSystemServer": "https://adsdevdlsadswe7yadsl.dfs.core.windows.net", + "TargetSystemServer": "https://adsstgdlsadsnvmzadsl.dfs.core.windows.net", "TargetSystemType": "ADLS", "TargetSystemUserName": "", "TaskDatafactoryIR": "Azure", @@ -163,27 +163,27 @@ "DegreeOfCopyParallelism": 1, "DependencyChainTag": "DeltaLoad", "EngineId": -2, - "EngineJson": "{\"endpoint\": \"https://adsdevsynwadswe7y.dev.azuresynapse.net\", \"DeltaProcessingNotebook\": \"DeltaProcessingNotebook\"}", - "EngineName": "ads-dev-adf-ads-we7y", - "EngineResourceGroup": "gfd1", + "EngineJson": "{\"endpoint\": \"https://adsstgsynwadsnvmz.dev.azuresynapse.net\", \"DeltaProcessingNotebook\": \"DeltaProcessingNotebook\"}", + "EngineName": "ads-stg-adf-ads-nvmz", + "EngineResourceGroup": "gfuat", "EngineSubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3", "NumberOfRetries": 3, "ScheduleMasterId": "-2", - "SourceKeyVaultBaseUrl": "https://ads-dev-kv-ads-we7y.vault.azure.net", + "SourceKeyVaultBaseUrl": "https://ads-stg-kv-ads-nvmz.vault.azure.net", "SourceSystemAuthType": "MSI", "SourceSystemId": -4, "SourceSystemJSON": "{\n \"Container\": \"datalakeraw\"\n}", "SourceSystemSecretName": "", - "SourceSystemServer": "https://adsdevdlsadswe7yadsl.dfs.core.windows.net", + "SourceSystemServer": "https://adsstgdlsadsnvmzadsl.dfs.core.windows.net", "SourceSystemType": "ADLS", "SourceSystemUserName": "", "SynapsePipeline": "GPL_SparkNotebookExecution_Primary_Azure", - "TargetKeyVaultBaseUrl": "https://ads-dev-kv-ads-we7y.vault.azure.net", + "TargetKeyVaultBaseUrl": "https://ads-stg-kv-ads-nvmz.vault.azure.net", "TargetSystemAuthType": "MSI", "TargetSystemId": -4, "TargetSystemJSON": "{\n \"Container\": \"datalakeraw\"\n}", "TargetSystemSecretName": "", - "TargetSystemServer": "https://adsdevdlsadswe7yadsl.dfs.core.windows.net", + "TargetSystemServer": "https://adsstgdlsadsnvmzadsl.dfs.core.windows.net", "TargetSystemType": "ADLS", "TargetSystemUserName": "", "TaskDatafactoryIR": "Azure", @@ -203,27 +203,27 @@ "DegreeOfCopyParallelism": 1, "DependencyChainTag": "DeltaLoad", "EngineId": -2, - "EngineJson": "{\"endpoint\": \"https://adsdevsynwadswe7y.dev.azuresynapse.net\", \"DeltaProcessingNotebook\": \"DeltaProcessingNotebook\"}", - "EngineName": "ads-dev-adf-ads-we7y", - "EngineResourceGroup": "gfd1", + "EngineJson": "{\"endpoint\": \"https://adsstgsynwadsnvmz.dev.azuresynapse.net\", \"DeltaProcessingNotebook\": \"DeltaProcessingNotebook\"}", + "EngineName": "ads-stg-adf-ads-nvmz", + "EngineResourceGroup": "gfuat", "EngineSubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3", "NumberOfRetries": 3, "ScheduleMasterId": "-2", - "SourceKeyVaultBaseUrl": "https://ads-dev-kv-ads-we7y.vault.azure.net", + "SourceKeyVaultBaseUrl": "https://ads-stg-kv-ads-nvmz.vault.azure.net", "SourceSystemAuthType": "MSI", "SourceSystemId": -4, "SourceSystemJSON": "{\n \"Container\": \"datalakeraw\"\n}", "SourceSystemSecretName": "", - "SourceSystemServer": "https://adsdevdlsadswe7yadsl.dfs.core.windows.net", + "SourceSystemServer": "https://adsstgdlsadsnvmzadsl.dfs.core.windows.net", "SourceSystemType": "ADLS", "SourceSystemUserName": "", "SynapsePipeline": "GPL_SparkNotebookExecution_Primary_Azure", - "TargetKeyVaultBaseUrl": "https://ads-dev-kv-ads-we7y.vault.azure.net", + "TargetKeyVaultBaseUrl": "https://ads-stg-kv-ads-nvmz.vault.azure.net", "TargetSystemAuthType": "MSI", "TargetSystemId": -4, "TargetSystemJSON": "{\n \"Container\": \"datalakeraw\"\n}", "TargetSystemSecretName": "", - "TargetSystemServer": "https://adsdevdlsadswe7yadsl.dfs.core.windows.net", + "TargetSystemServer": "https://adsstgdlsadsnvmzadsl.dfs.core.windows.net", "TargetSystemType": "ADLS", "TargetSystemUserName": "", "TaskDatafactoryIR": "Azure", @@ -243,27 +243,27 @@ "DegreeOfCopyParallelism": 1, "DependencyChainTag": "DeltaLoad", "EngineId": -2, - "EngineJson": "{\"endpoint\": \"https://adsdevsynwadswe7y.dev.azuresynapse.net\", \"DeltaProcessingNotebook\": \"DeltaProcessingNotebook\"}", - "EngineName": "ads-dev-adf-ads-we7y", - "EngineResourceGroup": "gfd1", + "EngineJson": "{\"endpoint\": \"https://adsstgsynwadsnvmz.dev.azuresynapse.net\", \"DeltaProcessingNotebook\": \"DeltaProcessingNotebook\"}", + "EngineName": "ads-stg-adf-ads-nvmz", + "EngineResourceGroup": "gfuat", "EngineSubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3", "NumberOfRetries": 3, "ScheduleMasterId": "-2", - "SourceKeyVaultBaseUrl": "https://ads-dev-kv-ads-we7y.vault.azure.net", + "SourceKeyVaultBaseUrl": "https://ads-stg-kv-ads-nvmz.vault.azure.net", "SourceSystemAuthType": "MSI", "SourceSystemId": -4, "SourceSystemJSON": "{\n \"Container\": \"datalakeraw\"\n}", "SourceSystemSecretName": "", - "SourceSystemServer": "https://adsdevdlsadswe7yadsl.dfs.core.windows.net", + "SourceSystemServer": "https://adsstgdlsadsnvmzadsl.dfs.core.windows.net", "SourceSystemType": "ADLS", "SourceSystemUserName": "", "SynapsePipeline": "GPL_SparkNotebookExecution_Primary_Azure", - "TargetKeyVaultBaseUrl": "https://ads-dev-kv-ads-we7y.vault.azure.net", + "TargetKeyVaultBaseUrl": "https://ads-stg-kv-ads-nvmz.vault.azure.net", "TargetSystemAuthType": "MSI", "TargetSystemId": -4, "TargetSystemJSON": "{\n \"Container\": \"datalakeraw\"\n}", "TargetSystemSecretName": "", - "TargetSystemServer": "https://adsdevdlsadswe7yadsl.dfs.core.windows.net", + "TargetSystemServer": "https://adsstgdlsadsnvmzadsl.dfs.core.windows.net", "TargetSystemType": "ADLS", "TargetSystemUserName": "", "TaskDatafactoryIR": "Azure", @@ -283,27 +283,27 @@ "DegreeOfCopyParallelism": 1, "DependencyChainTag": "DeltaLoad", "EngineId": -2, - "EngineJson": "{\"endpoint\": \"https://adsdevsynwadswe7y.dev.azuresynapse.net\", \"DeltaProcessingNotebook\": \"DeltaProcessingNotebook\"}", - "EngineName": "ads-dev-adf-ads-we7y", - "EngineResourceGroup": "gfd1", + "EngineJson": "{\"endpoint\": \"https://adsstgsynwadsnvmz.dev.azuresynapse.net\", \"DeltaProcessingNotebook\": \"DeltaProcessingNotebook\"}", + "EngineName": "ads-stg-adf-ads-nvmz", + "EngineResourceGroup": "gfuat", "EngineSubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3", "NumberOfRetries": 3, "ScheduleMasterId": "-2", - "SourceKeyVaultBaseUrl": "https://ads-dev-kv-ads-we7y.vault.azure.net", + "SourceKeyVaultBaseUrl": "https://ads-stg-kv-ads-nvmz.vault.azure.net", "SourceSystemAuthType": "MSI", "SourceSystemId": -4, "SourceSystemJSON": "{\n \"Container\": \"datalakeraw\"\n}", "SourceSystemSecretName": "", - "SourceSystemServer": "https://adsdevdlsadswe7yadsl.dfs.core.windows.net", + "SourceSystemServer": "https://adsstgdlsadsnvmzadsl.dfs.core.windows.net", "SourceSystemType": "ADLS", "SourceSystemUserName": "", "SynapsePipeline": "GPL_SparkNotebookExecution_Primary_Azure", - "TargetKeyVaultBaseUrl": "https://ads-dev-kv-ads-we7y.vault.azure.net", + "TargetKeyVaultBaseUrl": "https://ads-stg-kv-ads-nvmz.vault.azure.net", "TargetSystemAuthType": "MSI", "TargetSystemId": -4, "TargetSystemJSON": "{\n \"Container\": \"datalakeraw\"\n}", "TargetSystemSecretName": "", - "TargetSystemServer": "https://adsdevdlsadswe7yadsl.dfs.core.windows.net", + "TargetSystemServer": "https://adsstgdlsadsnvmzadsl.dfs.core.windows.net", "TargetSystemType": "ADLS", "TargetSystemUserName": "", "TaskDatafactoryIR": "Azure", @@ -323,27 +323,27 @@ "DegreeOfCopyParallelism": 1, "DependencyChainTag": "DeltaLoad", "EngineId": -2, - "EngineJson": "{\"endpoint\": \"https://adsdevsynwadswe7y.dev.azuresynapse.net\", \"DeltaProcessingNotebook\": \"DeltaProcessingNotebook\"}", - "EngineName": "ads-dev-adf-ads-we7y", - "EngineResourceGroup": "gfd1", + "EngineJson": "{\"endpoint\": \"https://adsstgsynwadsnvmz.dev.azuresynapse.net\", \"DeltaProcessingNotebook\": \"DeltaProcessingNotebook\"}", + "EngineName": "ads-stg-adf-ads-nvmz", + "EngineResourceGroup": "gfuat", "EngineSubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3", "NumberOfRetries": 3, "ScheduleMasterId": "-2", - "SourceKeyVaultBaseUrl": "https://ads-dev-kv-ads-we7y.vault.azure.net", + "SourceKeyVaultBaseUrl": "https://ads-stg-kv-ads-nvmz.vault.azure.net", "SourceSystemAuthType": "MSI", "SourceSystemId": -4, "SourceSystemJSON": "{\n \"Container\": \"datalakeraw\"\n}", "SourceSystemSecretName": "", - "SourceSystemServer": "https://adsdevdlsadswe7yadsl.dfs.core.windows.net", + "SourceSystemServer": "https://adsstgdlsadsnvmzadsl.dfs.core.windows.net", "SourceSystemType": "ADLS", "SourceSystemUserName": "", "SynapsePipeline": "GPL_SparkNotebookExecution_Primary_Azure", - "TargetKeyVaultBaseUrl": "https://ads-dev-kv-ads-we7y.vault.azure.net", + "TargetKeyVaultBaseUrl": "https://ads-stg-kv-ads-nvmz.vault.azure.net", "TargetSystemAuthType": "MSI", "TargetSystemId": -4, "TargetSystemJSON": "{\n \"Container\": \"datalakeraw\"\n}", "TargetSystemSecretName": "", - "TargetSystemServer": "https://adsdevdlsadswe7yadsl.dfs.core.windows.net", + "TargetSystemServer": "https://adsstgdlsadsnvmzadsl.dfs.core.windows.net", "TargetSystemType": "ADLS", "TargetSystemUserName": "", "TaskDatafactoryIR": "Azure", @@ -363,27 +363,27 @@ "DegreeOfCopyParallelism": 1, "DependencyChainTag": "DeltaLoad", "EngineId": -2, - "EngineJson": "{\"endpoint\": \"https://adsdevsynwadswe7y.dev.azuresynapse.net\", \"DeltaProcessingNotebook\": \"DeltaProcessingNotebook\"}", - "EngineName": "ads-dev-adf-ads-we7y", - "EngineResourceGroup": "gfd1", + "EngineJson": "{\"endpoint\": \"https://adsstgsynwadsnvmz.dev.azuresynapse.net\", \"DeltaProcessingNotebook\": \"DeltaProcessingNotebook\"}", + "EngineName": "ads-stg-adf-ads-nvmz", + "EngineResourceGroup": "gfuat", "EngineSubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3", "NumberOfRetries": 3, "ScheduleMasterId": "-2", - "SourceKeyVaultBaseUrl": "https://ads-dev-kv-ads-we7y.vault.azure.net", + "SourceKeyVaultBaseUrl": "https://ads-stg-kv-ads-nvmz.vault.azure.net", "SourceSystemAuthType": "MSI", "SourceSystemId": -4, "SourceSystemJSON": "{\n \"Container\": \"datalakeraw\"\n}", "SourceSystemSecretName": "", - "SourceSystemServer": "https://adsdevdlsadswe7yadsl.dfs.core.windows.net", + "SourceSystemServer": "https://adsstgdlsadsnvmzadsl.dfs.core.windows.net", "SourceSystemType": "ADLS", "SourceSystemUserName": "", "SynapsePipeline": "GPL_SparkNotebookExecution_Primary_Azure", - "TargetKeyVaultBaseUrl": "https://ads-dev-kv-ads-we7y.vault.azure.net", + "TargetKeyVaultBaseUrl": "https://ads-stg-kv-ads-nvmz.vault.azure.net", "TargetSystemAuthType": "MSI", "TargetSystemId": -4, "TargetSystemJSON": "{\n \"Container\": \"datalakeraw\"\n}", "TargetSystemSecretName": "", - "TargetSystemServer": "https://adsdevdlsadswe7yadsl.dfs.core.windows.net", + "TargetSystemServer": "https://adsstgdlsadsnvmzadsl.dfs.core.windows.net", "TargetSystemType": "ADLS", "TargetSystemUserName": "", "TaskDatafactoryIR": "Azure", diff --git a/solution/Synapse/Patterns/pipeline/Execute-Notebook/functionapptests/tests/tests.json b/solution/Synapse/Patterns/pipeline/Execute-Notebook/functionapptests/tests/tests.json index 1b978679..07423a75 100644 --- a/solution/Synapse/Patterns/pipeline/Execute-Notebook/functionapptests/tests/tests.json +++ b/solution/Synapse/Patterns/pipeline/Execute-Notebook/functionapptests/tests/tests.json @@ -3,13 +3,13 @@ "DegreeOfCopyParallelism": 1, "DependencyChainTag": "", "EngineId": -2, - "EngineJson": "{\"endpoint\": \"https://adsdevsynwadswe7y.dev.azuresynapse.net\", \"DeltaProcessingNotebook\": \"DeltaProcessingNotebook\"}", - "EngineName": "ads-dev-adf-ads-we7y", - "EngineResourceGroup": "gfd1", + "EngineJson": "{\"endpoint\": \"https://adsstgsynwadsnvmz.dev.azuresynapse.net\", \"DeltaProcessingNotebook\": \"DeltaProcessingNotebook\"}", + "EngineName": "ads-stg-adf-ads-nvmz", + "EngineResourceGroup": "gfuat", "EngineSubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3", "NumberOfRetries": 3, "ScheduleMasterId": "-2", - "SourceKeyVaultBaseUrl": "https://ads-dev-kv-ads-we7y.vault.azure.net", + "SourceKeyVaultBaseUrl": "https://ads-stg-kv-ads-nvmz.vault.azure.net", "SourceSystemAuthType": "MSI", "SourceSystemId": -16, "SourceSystemJSON": "{\n\n}", @@ -18,7 +18,7 @@ "SourceSystemType": "N/A", "SourceSystemUserName": "", "SynapsePipeline": "GPL_SparkNotebookExecution_Primary_Azure", - "TargetKeyVaultBaseUrl": "https://ads-dev-kv-ads-we7y.vault.azure.net", + "TargetKeyVaultBaseUrl": "https://ads-stg-kv-ads-nvmz.vault.azure.net", "TargetSystemAuthType": "MSI", "TargetSystemId": -16, "TargetSystemJSON": "{\n\n}", diff --git a/solution/Synapse/Patterns/pipeline/Rest-API-to-Azure-Storage/functionapptests/tests/tests.json b/solution/Synapse/Patterns/pipeline/Rest-API-to-Azure-Storage/functionapptests/tests/tests.json index 95db26d6..abdb7f9c 100644 --- a/solution/Synapse/Patterns/pipeline/Rest-API-to-Azure-Storage/functionapptests/tests/tests.json +++ b/solution/Synapse/Patterns/pipeline/Rest-API-to-Azure-Storage/functionapptests/tests/tests.json @@ -3,13 +3,13 @@ "DegreeOfCopyParallelism": 1, "DependencyChainTag": "", "EngineId": -2, - "EngineJson": "{\"endpoint\": \"https://adsdevsynwadswe7y.dev.azuresynapse.net\", \"DeltaProcessingNotebook\": \"DeltaProcessingNotebook\"}", - "EngineName": "ads-dev-adf-ads-we7y", - "EngineResourceGroup": "gfd1", + "EngineJson": "{\"endpoint\": \"https://adsstgsynwadsnvmz.dev.azuresynapse.net\", \"DeltaProcessingNotebook\": \"DeltaProcessingNotebook\"}", + "EngineName": "ads-stg-adf-ads-nvmz", + "EngineResourceGroup": "gfuat", "EngineSubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3", "NumberOfRetries": 3, "ScheduleMasterId": "-2", - "SourceKeyVaultBaseUrl": "https://ads-dev-kv-ads-we7y.vault.azure.net", + "SourceKeyVaultBaseUrl": "https://ads-stg-kv-ads-nvmz.vault.azure.net", "SourceSystemAuthType": "MSI", "SourceSystemId": -17, "SourceSystemJSON": "{\n\n}", @@ -18,7 +18,7 @@ "SourceSystemType": "Rest", "SourceSystemUserName": "", "SynapsePipeline": "GPL_SparkNotebookExecution_Primary_Azure", - "TargetKeyVaultBaseUrl": "https://ads-dev-kv-ads-we7y.vault.azure.net", + "TargetKeyVaultBaseUrl": "https://ads-stg-kv-ads-nvmz.vault.azure.net", "TargetSystemAuthType": "MSI", "TargetSystemId": -8, "TargetSystemJSON": "{\n\n}", @@ -43,13 +43,13 @@ "DegreeOfCopyParallelism": 1, "DependencyChainTag": "", "EngineId": -2, - "EngineJson": "{\"endpoint\": \"https://adsdevsynwadswe7y.dev.azuresynapse.net\", \"DeltaProcessingNotebook\": \"DeltaProcessingNotebook\"}", - "EngineName": "ads-dev-adf-ads-we7y", - "EngineResourceGroup": "gfd1", + "EngineJson": "{\"endpoint\": \"https://adsstgsynwadsnvmz.dev.azuresynapse.net\", \"DeltaProcessingNotebook\": \"DeltaProcessingNotebook\"}", + "EngineName": "ads-stg-adf-ads-nvmz", + "EngineResourceGroup": "gfuat", "EngineSubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3", "NumberOfRetries": 3, "ScheduleMasterId": "-2", - "SourceKeyVaultBaseUrl": "https://ads-dev-kv-ads-we7y.vault.azure.net", + "SourceKeyVaultBaseUrl": "https://ads-stg-kv-ads-nvmz.vault.azure.net", "SourceSystemAuthType": "MSI", "SourceSystemId": -17, "SourceSystemJSON": "{\n\n}", @@ -58,7 +58,7 @@ "SourceSystemType": "Rest", "SourceSystemUserName": "", "SynapsePipeline": "GPL_SparkNotebookExecution_Primary_Azure", - "TargetKeyVaultBaseUrl": "https://ads-dev-kv-ads-we7y.vault.azure.net", + "TargetKeyVaultBaseUrl": "https://ads-stg-kv-ads-nvmz.vault.azure.net", "TargetSystemAuthType": "MSI", "TargetSystemId": -8, "TargetSystemJSON": "{\n\n}", @@ -83,13 +83,13 @@ "DegreeOfCopyParallelism": 1, "DependencyChainTag": "", "EngineId": -2, - "EngineJson": "{\"endpoint\": \"https://adsdevsynwadswe7y.dev.azuresynapse.net\", \"DeltaProcessingNotebook\": \"DeltaProcessingNotebook\"}", - "EngineName": "ads-dev-adf-ads-we7y", - "EngineResourceGroup": "gfd1", + "EngineJson": "{\"endpoint\": \"https://adsstgsynwadsnvmz.dev.azuresynapse.net\", \"DeltaProcessingNotebook\": \"DeltaProcessingNotebook\"}", + "EngineName": "ads-stg-adf-ads-nvmz", + "EngineResourceGroup": "gfuat", "EngineSubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3", "NumberOfRetries": 3, "ScheduleMasterId": "-2", - "SourceKeyVaultBaseUrl": "https://ads-dev-kv-ads-we7y.vault.azure.net", + "SourceKeyVaultBaseUrl": "https://ads-stg-kv-ads-nvmz.vault.azure.net", "SourceSystemAuthType": "MSI", "SourceSystemId": -17, "SourceSystemJSON": "{\n\n}", @@ -98,7 +98,7 @@ "SourceSystemType": "Rest", "SourceSystemUserName": "", "SynapsePipeline": "GPL_SparkNotebookExecution_Primary_Azure", - "TargetKeyVaultBaseUrl": "https://ads-dev-kv-ads-we7y.vault.azure.net", + "TargetKeyVaultBaseUrl": "https://ads-stg-kv-ads-nvmz.vault.azure.net", "TargetSystemAuthType": "MSI", "TargetSystemId": -8, "TargetSystemJSON": "{\n\n}", @@ -123,13 +123,13 @@ "DegreeOfCopyParallelism": 1, "DependencyChainTag": "", "EngineId": -2, - "EngineJson": "{\"endpoint\": \"https://adsdevsynwadswe7y.dev.azuresynapse.net\", \"DeltaProcessingNotebook\": \"DeltaProcessingNotebook\"}", - "EngineName": "ads-dev-adf-ads-we7y", - "EngineResourceGroup": "gfd1", + "EngineJson": "{\"endpoint\": \"https://adsstgsynwadsnvmz.dev.azuresynapse.net\", \"DeltaProcessingNotebook\": \"DeltaProcessingNotebook\"}", + "EngineName": "ads-stg-adf-ads-nvmz", + "EngineResourceGroup": "gfuat", "EngineSubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3", "NumberOfRetries": 3, "ScheduleMasterId": "-2", - "SourceKeyVaultBaseUrl": "https://ads-dev-kv-ads-we7y.vault.azure.net", + "SourceKeyVaultBaseUrl": "https://ads-stg-kv-ads-nvmz.vault.azure.net", "SourceSystemAuthType": "MSI", "SourceSystemId": -17, "SourceSystemJSON": "{\n\n}", @@ -138,7 +138,7 @@ "SourceSystemType": "Rest", "SourceSystemUserName": "", "SynapsePipeline": "GPL_SparkNotebookExecution_Primary_Azure", - "TargetKeyVaultBaseUrl": "https://ads-dev-kv-ads-we7y.vault.azure.net", + "TargetKeyVaultBaseUrl": "https://ads-stg-kv-ads-nvmz.vault.azure.net", "TargetSystemAuthType": "MSI", "TargetSystemId": -8, "TargetSystemJSON": "{\n\n}", diff --git a/solution/Synapse/Patterns/pipeline/Synapse-SQLPool-Start-Stop/functionapptests/tests/tests.json b/solution/Synapse/Patterns/pipeline/Synapse-SQLPool-Start-Stop/functionapptests/tests/tests.json index 939b6b01..13ff1f60 100644 --- a/solution/Synapse/Patterns/pipeline/Synapse-SQLPool-Start-Stop/functionapptests/tests/tests.json +++ b/solution/Synapse/Patterns/pipeline/Synapse-SQLPool-Start-Stop/functionapptests/tests/tests.json @@ -3,13 +3,13 @@ "DegreeOfCopyParallelism": 1, "DependencyChainTag": "", "EngineId": -2, - "EngineJson": "{\"endpoint\": \"https://adsdevsynwadswe7y.dev.azuresynapse.net\", \"DeltaProcessingNotebook\": \"DeltaProcessingNotebook\"}", - "EngineName": "ads-dev-adf-ads-we7y", - "EngineResourceGroup": "gfd1", + "EngineJson": "{\"endpoint\": \"https://adsstgsynwadsnvmz.dev.azuresynapse.net\", \"DeltaProcessingNotebook\": \"DeltaProcessingNotebook\"}", + "EngineName": "ads-stg-adf-ads-nvmz", + "EngineResourceGroup": "gfuat", "EngineSubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3", "NumberOfRetries": 3, "ScheduleMasterId": "-2", - "SourceKeyVaultBaseUrl": "https://ads-dev-kv-ads-we7y.vault.azure.net", + "SourceKeyVaultBaseUrl": "https://ads-stg-kv-ads-nvmz.vault.azure.net", "SourceSystemAuthType": "MSI", "SourceSystemId": -16, "SourceSystemJSON": "{\n\n}", @@ -18,7 +18,7 @@ "SourceSystemType": "N/A", "SourceSystemUserName": "", "SynapsePipeline": "Not-Applicable", - "TargetKeyVaultBaseUrl": "https://ads-dev-kv-ads-we7y.vault.azure.net", + "TargetKeyVaultBaseUrl": "https://ads-stg-kv-ads-nvmz.vault.azure.net", "TargetSystemAuthType": "MSI", "TargetSystemId": -10, "TargetSystemJSON": "{\n\n}", @@ -34,7 +34,7 @@ "TaskInstanceId": -1, "TaskInstanceJson": "{\n\n}", "TaskMasterId": -1015, - "TaskMasterJson": "{\n \"SQLPoolName\": \"adsdevsyndpads\",\n \"SQLPoolOperation\": \"start\",\n \"Source\": {\n \"DataFileName\": \"\",\n \"DeleteAfterCompletion\": \"false\",\n \"MaxConcurrentConnections\": 0,\n \"Recursively\": \"false\",\n \"RelativePath\": \"\",\n \"SchemaFileName\": \"\",\n \"Type\": \"Not-Applicable\"\n },\n \"Target\": {\n \"DataFileName\": \"\",\n \"DeleteAfterCompletion\": \"false\",\n \"MaxConcurrentConnections\": 0,\n \"Recursively\": \"false\",\n \"RelativePath\": \"\",\n \"SchemaFileName\": \"\",\n \"Type\": \"Not-Applicable\"\n }\n}", + "TaskMasterJson": "{\n \"SQLPoolName\": \"adsstgsyndpads\",\n \"SQLPoolOperation\": \"start\",\n \"Source\": {\n \"DataFileName\": \"\",\n \"DeleteAfterCompletion\": \"false\",\n \"MaxConcurrentConnections\": 0,\n \"Recursively\": \"false\",\n \"RelativePath\": \"\",\n \"SchemaFileName\": \"\",\n \"Type\": \"Not-Applicable\"\n },\n \"Target\": {\n \"DataFileName\": \"\",\n \"DeleteAfterCompletion\": \"false\",\n \"MaxConcurrentConnections\": 0,\n \"Recursively\": \"false\",\n \"RelativePath\": \"\",\n \"SchemaFileName\": \"\",\n \"Type\": \"Not-Applicable\"\n }\n}", "TaskStatus": "Untried", "TaskType": "Synapse-SQLPool-Start-Stop", "TaskTypeId": -6, @@ -44,13 +44,13 @@ "DegreeOfCopyParallelism": 1, "DependencyChainTag": "", "EngineId": -2, - "EngineJson": "{\"endpoint\": \"https://adsdevsynwadswe7y.dev.azuresynapse.net\", \"DeltaProcessingNotebook\": \"DeltaProcessingNotebook\"}", - "EngineName": "ads-dev-adf-ads-we7y", - "EngineResourceGroup": "gfd1", + "EngineJson": "{\"endpoint\": \"https://adsstgsynwadsnvmz.dev.azuresynapse.net\", \"DeltaProcessingNotebook\": \"DeltaProcessingNotebook\"}", + "EngineName": "ads-stg-adf-ads-nvmz", + "EngineResourceGroup": "gfuat", "EngineSubscriptionId": "035a1364-f00d-48e2-b582-4fe125905ee3", "NumberOfRetries": 3, "ScheduleMasterId": "-2", - "SourceKeyVaultBaseUrl": "https://ads-dev-kv-ads-we7y.vault.azure.net", + "SourceKeyVaultBaseUrl": "https://ads-stg-kv-ads-nvmz.vault.azure.net", "SourceSystemAuthType": "MSI", "SourceSystemId": -16, "SourceSystemJSON": "{\n\n}", @@ -59,7 +59,7 @@ "SourceSystemType": "N/A", "SourceSystemUserName": "", "SynapsePipeline": "Not-Applicable", - "TargetKeyVaultBaseUrl": "https://ads-dev-kv-ads-we7y.vault.azure.net", + "TargetKeyVaultBaseUrl": "https://ads-stg-kv-ads-nvmz.vault.azure.net", "TargetSystemAuthType": "MSI", "TargetSystemId": -10, "TargetSystemJSON": "{\n\n}", @@ -75,7 +75,7 @@ "TaskInstanceId": -1, "TaskInstanceJson": "{\n\n}", "TaskMasterId": -1016, - "TaskMasterJson": "{\n \"SQLPoolName\": \"adsdevsyndpads\",\n \"SQLPoolOperation\": \"pause\",\n \"Source\": {\n \"DataFileName\": \"\",\n \"DeleteAfterCompletion\": \"false\",\n \"MaxConcurrentConnections\": 0,\n \"Recursively\": \"false\",\n \"RelativePath\": \"\",\n \"SchemaFileName\": \"\",\n \"Type\": \"Not-Applicable\"\n },\n \"Target\": {\n \"DataFileName\": \"\",\n \"DeleteAfterCompletion\": \"false\",\n \"MaxConcurrentConnections\": 0,\n \"Recursively\": \"false\",\n \"RelativePath\": \"\",\n \"SchemaFileName\": \"\",\n \"Type\": \"Not-Applicable\"\n }\n}", + "TaskMasterJson": "{\n \"SQLPoolName\": \"adsstgsyndpads\",\n \"SQLPoolOperation\": \"pause\",\n \"Source\": {\n \"DataFileName\": \"\",\n \"DeleteAfterCompletion\": \"false\",\n \"MaxConcurrentConnections\": 0,\n \"Recursively\": \"false\",\n \"RelativePath\": \"\",\n \"SchemaFileName\": \"\",\n \"Type\": \"Not-Applicable\"\n },\n \"Target\": {\n \"DataFileName\": \"\",\n \"DeleteAfterCompletion\": \"false\",\n \"MaxConcurrentConnections\": 0,\n \"Recursively\": \"false\",\n \"RelativePath\": \"\",\n \"SchemaFileName\": \"\",\n \"Type\": \"Not-Applicable\"\n }\n}", "TaskStatus": "Untried", "TaskType": "Synapse-SQLPool-Start-Stop", "TaskTypeId": -6, diff --git a/solution/Synapse/Patterns/uploadNotebooks.ps1 b/solution/Synapse/Patterns/uploadNotebooks.ps1 index dbb2a928..23c3aa5a 100644 --- a/solution/Synapse/Patterns/uploadNotebooks.ps1 +++ b/solution/Synapse/Patterns/uploadNotebooks.ps1 @@ -1,5 +1,5 @@ -Import-Module .\GatherOutputsFromTerraform.psm1 -force -$tout = GatherOutputsFromTerraform +Import-Module ./GatherOutputsFromTerraform_SynapseFolder.psm1 -Force +$tout = GatherOutputsFromTerraform_SynapseFolder Write-Information "_____________________________" Write-Information " Uploading Synapse Notebooks " From 779878b9712053a8592a4f698eda82be6eea6031 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Sun, 7 Aug 2022 11:02:23 +0800 Subject: [PATCH 099/151] Fixed Inconsistent Firewall Rule Naming --- solution/DeploymentV2/terraform_layer2/synapse.tf | 2 +- solution/DeploymentV2/terraform_layer3/database.ps1 | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/solution/DeploymentV2/terraform_layer2/synapse.tf b/solution/DeploymentV2/terraform_layer2/synapse.tf index 6e56203a..63879279 100644 --- a/solution/DeploymentV2/terraform_layer2/synapse.tf +++ b/solution/DeploymentV2/terraform_layer2/synapse.tf @@ -130,7 +130,7 @@ resource "azurerm_synapse_firewall_rule" "cicd" { resource "azurerm_synapse_firewall_rule" "cicd_user" { count = var.deploy_adls && var.deploy_synapse ? 1 : 0 - name = "AllowCICDUser" + name = "CICDUser" synapse_workspace_id = azurerm_synapse_workspace.synapse[0].id start_ip_address = var.ip_address2 end_ip_address = var.ip_address2 diff --git a/solution/DeploymentV2/terraform_layer3/database.ps1 b/solution/DeploymentV2/terraform_layer3/database.ps1 index 60107315..26574801 100644 --- a/solution/DeploymentV2/terraform_layer3/database.ps1 +++ b/solution/DeploymentV2/terraform_layer3/database.ps1 @@ -137,8 +137,8 @@ else { #Add Ip to SQL Firewall #$result = az synapse workspace update -n $synapse_workspace_name -g $resource_group_name --set publicNetworkAccess="Enabled" - $result = az synapse workspace firewall-rule create --resource-group $tout.resource_group_name --workspace-name $tout.synapse_workspace_name --name "DeploymentAgent" --start-ip-address $myIp --end-ip-address $myIp - $result = az synapse workspace firewall-rule create --resource-group $tout.resource_group_name --workspace-name $tout.synapse_workspace_name --name "DeploymentUser" --start-ip-address $myIp2 --end-ip-address $myIp2 + $result = az synapse workspace firewall-rule create --resource-group $tout.resource_group_name --workspace-name $tout.synapse_workspace_name --name "CICDAgent" --start-ip-address $myIp --end-ip-address $myIp + $result = az synapse workspace firewall-rule create --resource-group $tout.resource_group_name --workspace-name $tout.synapse_workspace_name --name "CICDUser" --start-ip-address $myIp2 --end-ip-address $myIp2 if ($tout.is_vnet_isolated -eq $false) { From 94853ffd6c32aa251ec818fc961cb7905cae86c2 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Sun, 7 Aug 2022 14:47:45 +0800 Subject: [PATCH 100/151] Fixed GitDeploy issue --- ...atherOutputsFromTerraform_DataFactoryFolder.psm1 | 10 +++++++++- .../pwshmodules/Deploy_9_DataFactory.psm1 | 4 ++-- .../GatherOutputsFromTerraform_SynapseFolder.psm1 | 13 +++++++++---- 3 files changed, 20 insertions(+), 7 deletions(-) diff --git a/solution/DataFactory/Patterns/GatherOutputsFromTerraform_DataFactoryFolder.psm1 b/solution/DataFactory/Patterns/GatherOutputsFromTerraform_DataFactoryFolder.psm1 index a7b23efb..3c3faf79 100644 --- a/solution/DataFactory/Patterns/GatherOutputsFromTerraform_DataFactoryFolder.psm1 +++ b/solution/DataFactory/Patterns/GatherOutputsFromTerraform_DataFactoryFolder.psm1 @@ -6,7 +6,15 @@ function GatherOutputsFromTerraform_DataFactoryFolder() Import-Module (Convert-Path -Path ($DeploymentFolderPath + "/pwshmodules/GatherOutputsFromTerraform.psm1")) -Force Import-Module (Convert-Path -Path ($DeploymentFolderPath + "/pwshmodules/Deploy_0_Prep.psm1")) -Force - PrepareDeployment -gitDeploy $false -deploymentFolderPath $deploymentFolderPath -FeatureTemplate "" -PathToReturnTo $PathToReturnTo + + if($gitDeploy) + { + #Do Nothing as prepare will already have been run + } + else + { + PrepareDeployment -gitDeploy $false -deploymentFolderPath $deploymentFolderPath -FeatureTemplate "" -PathToReturnTo $PathToReturnTo + } $tout = GatherOutputsFromTerraform -TerraformFolderPath $TerraformFolderPath return $tout diff --git a/solution/DeploymentV2/pwshmodules/Deploy_9_DataFactory.psm1 b/solution/DeploymentV2/pwshmodules/Deploy_9_DataFactory.psm1 index 9b5be9e6..b6333441 100644 --- a/solution/DeploymentV2/pwshmodules/Deploy_9_DataFactory.psm1 +++ b/solution/DeploymentV2/pwshmodules/Deploy_9_DataFactory.psm1 @@ -22,10 +22,10 @@ function DeployDataFactoryAndSynapseArtefacts ( $myIp2 = $env:TF_VAR_ip_address2 if ($myIp -ne $null) { - $result = az sql server firewall-rule create -g $tout.resource_group_name -s $tout.sqlserver_name -n "DeploymentAgent" --start-ip-address $myIp --end-ip-address $myIp + $result = az sql server firewall-rule create -g $tout.resource_group_name -s $tout.sqlserver_name -n "CICDAgent" --start-ip-address $myIp --end-ip-address $myIp } if ($myIp2 -ne $null) { - $result = az sql server firewall-rule create -g $tout.resource_group_name -s $tout.sqlserver_name -n "DeploymentUser" --start-ip-address $myIp2 --end-ip-address $myIp2 + $result = az sql server firewall-rule create -g $tout.resource_group_name -s $tout.sqlserver_name -n "CICDUser" --start-ip-address $myIp2 --end-ip-address $myIp2 } $SqlInstalled = Get-InstalledModule SqlServer diff --git a/solution/Synapse/Patterns/GatherOutputsFromTerraform_SynapseFolder.psm1 b/solution/Synapse/Patterns/GatherOutputsFromTerraform_SynapseFolder.psm1 index 717b7b58..4679650e 100644 --- a/solution/Synapse/Patterns/GatherOutputsFromTerraform_SynapseFolder.psm1 +++ b/solution/Synapse/Patterns/GatherOutputsFromTerraform_SynapseFolder.psm1 @@ -1,13 +1,18 @@ -function GatherOutputsFromTerraform_SynapseFolder() -{ +function GatherOutputsFromTerraform_SynapseFolder() { $PathToReturnTo = (Get-Location).Path $DeploymentFolderPath = Convert-Path -Path ((Get-Location).tostring() + './../../DeploymentV2/') $TerraformFolderPath = Convert-Path -Path ($DeploymentFolderPath + '/terraform_layer2/') Import-Module (Convert-Path -Path ($DeploymentFolderPath + "/pwshmodules/GatherOutputsFromTerraform.psm1")) -Force Import-Module (Convert-Path -Path ($DeploymentFolderPath + "/pwshmodules/Deploy_0_Prep.psm1")) -Force - PrepareDeployment -gitDeploy $false -deploymentFolderPath $deploymentFolderPath -FeatureTemplate "" -PathToReturnTo $PathToReturnTo - + + if ($gitDeploy) { + #Do Nothing as prepare will already have been run + } + else { + PrepareDeployment -gitDeploy $false -deploymentFolderPath $deploymentFolderPath -FeatureTemplate "" -PathToReturnTo $PathToReturnTo + } + $tout = GatherOutputsFromTerraform -TerraformFolderPath $TerraformFolderPath return $tout } \ No newline at end of file From 6ee4c56bfc69479878e08392562cd640efeebdff Mon Sep 17 00:00:00 2001 From: John Rampono Date: Sun, 7 Aug 2022 15:02:54 +0800 Subject: [PATCH 101/151] Removed Synapse Firewall Rules from Terraform --- .../DeploymentV2/terraform_layer2/synapse.tf | 31 ++++++++++--------- 1 file changed, 16 insertions(+), 15 deletions(-) diff --git a/solution/DeploymentV2/terraform_layer2/synapse.tf b/solution/DeploymentV2/terraform_layer2/synapse.tf index 63879279..6ff3a888 100644 --- a/solution/DeploymentV2/terraform_layer2/synapse.tf +++ b/solution/DeploymentV2/terraform_layer2/synapse.tf @@ -119,22 +119,23 @@ resource "azurerm_synapse_spark_pool" "synapse_spark_pool" { # -------------------------------------------------------------------------------------------------------------------- # Synapse Workspace Firewall Rules (Allow Public Access) +# - These are now done outside of terraform as they are required prior to terraform in order to be able to read state # -------------------------------------------------------------------------------------------------------------------- -resource "azurerm_synapse_firewall_rule" "cicd" { - count = var.deploy_adls && var.deploy_synapse ? 1 : 0 - name = "CICDAgent" - synapse_workspace_id = azurerm_synapse_workspace.synapse[0].id - start_ip_address = var.ip_address - end_ip_address = var.ip_address -} +# resource "azurerm_synapse_firewall_rule" "cicd" { +# count = var.deploy_adls && var.deploy_synapse ? 1 : 0 +# name = "CICDAgent" +# synapse_workspace_id = azurerm_synapse_workspace.synapse[0].id +# start_ip_address = var.ip_address +# end_ip_address = var.ip_address +# } -resource "azurerm_synapse_firewall_rule" "cicd_user" { - count = var.deploy_adls && var.deploy_synapse ? 1 : 0 - name = "CICDUser" - synapse_workspace_id = azurerm_synapse_workspace.synapse[0].id - start_ip_address = var.ip_address2 - end_ip_address = var.ip_address2 -} +# resource "azurerm_synapse_firewall_rule" "cicd_user" { +# count = var.deploy_adls && var.deploy_synapse ? 1 : 0 +# name = "CICDUser" +# synapse_workspace_id = azurerm_synapse_workspace.synapse[0].id +# start_ip_address = var.ip_address2 +# end_ip_address = var.ip_address2 +# } # -------------------------------------------------------------------------------------------------------------------- # Synapse Workspace Firewall Rules (Allow Public Access) @@ -148,7 +149,7 @@ resource "azurerm_synapse_firewall_rule" "public_access" { } resource "time_sleep" "azurerm_synapse_firewall_rule_wait_30_seconds_cicd" { - depends_on = [azurerm_synapse_firewall_rule.cicd] + depends_on = [] create_duration = "30s" } From c10d607127e2e5c4f2ee2e2ee6ddae9695a07ce8 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Sun, 7 Aug 2022 15:08:32 +0800 Subject: [PATCH 102/151] CICD Test --- .../DeploymentV2/terraform_layer2/synapse.tf | 29 +++++++++---------- 1 file changed, 14 insertions(+), 15 deletions(-) diff --git a/solution/DeploymentV2/terraform_layer2/synapse.tf b/solution/DeploymentV2/terraform_layer2/synapse.tf index 6ff3a888..17931aa6 100644 --- a/solution/DeploymentV2/terraform_layer2/synapse.tf +++ b/solution/DeploymentV2/terraform_layer2/synapse.tf @@ -119,23 +119,22 @@ resource "azurerm_synapse_spark_pool" "synapse_spark_pool" { # -------------------------------------------------------------------------------------------------------------------- # Synapse Workspace Firewall Rules (Allow Public Access) -# - These are now done outside of terraform as they are required prior to terraform in order to be able to read state # -------------------------------------------------------------------------------------------------------------------- -# resource "azurerm_synapse_firewall_rule" "cicd" { -# count = var.deploy_adls && var.deploy_synapse ? 1 : 0 -# name = "CICDAgent" -# synapse_workspace_id = azurerm_synapse_workspace.synapse[0].id -# start_ip_address = var.ip_address -# end_ip_address = var.ip_address -# } +resource "azurerm_synapse_firewall_rule" "cicd" { + count = var.deploy_adls && var.deploy_synapse ? 1 : 0 + name = "CICDAgent" + synapse_workspace_id = azurerm_synapse_workspace.synapse[0].id + start_ip_address = var.ip_address + end_ip_address = var.ip_address +} -# resource "azurerm_synapse_firewall_rule" "cicd_user" { -# count = var.deploy_adls && var.deploy_synapse ? 1 : 0 -# name = "CICDUser" -# synapse_workspace_id = azurerm_synapse_workspace.synapse[0].id -# start_ip_address = var.ip_address2 -# end_ip_address = var.ip_address2 -# } +resource "azurerm_synapse_firewall_rule" "cicd_user" { + count = var.deploy_adls && var.deploy_synapse ? 1 : 0 + name = "CICDUser" + synapse_workspace_id = azurerm_synapse_workspace.synapse[0].id + start_ip_address = var.ip_address2 + end_ip_address = var.ip_address2 +} # -------------------------------------------------------------------------------------------------------------------- # Synapse Workspace Firewall Rules (Allow Public Access) From 45eea0774bcede7fbc4f7c45380b31f97c43cd07 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Sun, 7 Aug 2022 15:24:53 +0800 Subject: [PATCH 103/151] Fixed GitDeploy not being picked up in GatherOutputs --- .../Patterns/GatherOutputsFromTerraform_DataFactoryFolder.psm1 | 1 + .../Patterns/GatherOutputsFromTerraform_SynapseFolder.psm1 | 1 + 2 files changed, 2 insertions(+) diff --git a/solution/DataFactory/Patterns/GatherOutputsFromTerraform_DataFactoryFolder.psm1 b/solution/DataFactory/Patterns/GatherOutputsFromTerraform_DataFactoryFolder.psm1 index 3c3faf79..6d639094 100644 --- a/solution/DataFactory/Patterns/GatherOutputsFromTerraform_DataFactoryFolder.psm1 +++ b/solution/DataFactory/Patterns/GatherOutputsFromTerraform_DataFactoryFolder.psm1 @@ -7,6 +7,7 @@ function GatherOutputsFromTerraform_DataFactoryFolder() Import-Module (Convert-Path -Path ($DeploymentFolderPath + "/pwshmodules/GatherOutputsFromTerraform.psm1")) -Force Import-Module (Convert-Path -Path ($DeploymentFolderPath + "/pwshmodules/Deploy_0_Prep.psm1")) -Force + $gitDeploy = ([System.Environment]::GetEnvironmentVariable('gitDeploy') -eq 'true') if($gitDeploy) { #Do Nothing as prepare will already have been run diff --git a/solution/Synapse/Patterns/GatherOutputsFromTerraform_SynapseFolder.psm1 b/solution/Synapse/Patterns/GatherOutputsFromTerraform_SynapseFolder.psm1 index 4679650e..bea58f2f 100644 --- a/solution/Synapse/Patterns/GatherOutputsFromTerraform_SynapseFolder.psm1 +++ b/solution/Synapse/Patterns/GatherOutputsFromTerraform_SynapseFolder.psm1 @@ -6,6 +6,7 @@ function GatherOutputsFromTerraform_SynapseFolder() { Import-Module (Convert-Path -Path ($DeploymentFolderPath + "/pwshmodules/GatherOutputsFromTerraform.psm1")) -Force Import-Module (Convert-Path -Path ($DeploymentFolderPath + "/pwshmodules/Deploy_0_Prep.psm1")) -Force + $gitDeploy = ([System.Environment]::GetEnvironmentVariable('gitDeploy') -eq 'true') if ($gitDeploy) { #Do Nothing as prepare will already have been run } From 73f0d395e90f5f25701199a3eaa4d0d9675c8308 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Mon, 8 Aug 2022 05:46:41 +0800 Subject: [PATCH 104/151] Updated UAT --- .../DeploymentV2/environments/vars/uat/common_vars_values.jsonc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/solution/DeploymentV2/environments/vars/uat/common_vars_values.jsonc b/solution/DeploymentV2/environments/vars/uat/common_vars_values.jsonc index b9132e69..a51f5569 100644 --- a/solution/DeploymentV2/environments/vars/uat/common_vars_values.jsonc +++ b/solution/DeploymentV2/environments/vars/uat/common_vars_values.jsonc @@ -40,6 +40,6 @@ "GIT_ADF_USER_NAME": "#####", "GIT_ADF_EMAIL_ADDRESS": "#####", "FeatureTemplateOverrides": { - "is_onprem_datafactory_ir_registered":false + "is_onprem_datafactory_ir_registered":true } } From 35b29cd937a9603b94b666c6aa9090b14084b76b Mon Sep 17 00:00:00 2001 From: John Rampono Date: Mon, 8 Aug 2022 10:39:12 +0800 Subject: [PATCH 105/151] Updating Read.me --- .devcontainer/Dockerfile | 5 - .../featuretemplates/functional_tests.jsonc | 2 +- solution/DeploymentV2/environments/readme.md | 44 +++ .../environments/vars/common_vars_schema.json | 14 + .../vars/common_vars_schema.jsonnet | 302 ++++++++++++++++++ .../vars/common_vars_template.jsonnet | 2 + 6 files changed, 363 insertions(+), 6 deletions(-) create mode 100644 solution/DeploymentV2/environments/readme.md create mode 100644 solution/DeploymentV2/environments/vars/common_vars_schema.jsonnet diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index 0e6814a2..f67ce574 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -37,11 +37,6 @@ RUN bash /tmp/library-scripts/common-debian.sh "${INSTALL_ZSH}" "${USERNAME}" "$ && mv terragrunt_linux_amd64 terragrunt \ && chmod u+x terragrunt \ && mv terragrunt /usr/local/bin/terragrunt - && curl -sSLo install.sh https://install.hclq.sh - && sh install.sh - && rm install.sh - - # [Optional] Uncomment this section to install additional OS packages. # RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \ diff --git a/solution/DeploymentV2/environments/featuretemplates/functional_tests.jsonc b/solution/DeploymentV2/environments/featuretemplates/functional_tests.jsonc index 6c3e9c71..a11a262a 100644 --- a/solution/DeploymentV2/environments/featuretemplates/functional_tests.jsonc +++ b/solution/DeploymentV2/environments/featuretemplates/functional_tests.jsonc @@ -17,6 +17,6 @@ {"Name":"publish_datafactory_pipelines","Value":true}, {"Name":"publish_web_app_addcurrentuserasadmin","Value":true}, {"Name":"deploy_selfhostedsql","Value":true}, - {"Name":"is_onprem_datafactory_ir_registered","Value":true}, + {"Name":"is_onprem_datafactory_ir_registered","Value":false}, {"Name":"publish_sif_database","Value":true} ] \ No newline at end of file diff --git a/solution/DeploymentV2/environments/readme.md b/solution/DeploymentV2/environments/readme.md new file mode 100644 index 00000000..19aaa7fb --- /dev/null +++ b/solution/DeploymentV2/environments/readme.md @@ -0,0 +1,44 @@ + +This folder contains the configuration settings and feature flags that determine what a specific ADS Go Fast deployment will be comprised of. + +# Feature Templates Directory + + +# Vars Directory +Contains a folder for each deployment environment. These folders control the number of "Analytics Landing Zones" that can be deployed by the repository. You can use these folders to allow for multiple environments that will support your SDLC (eg. Development, UAT, Production) and you can also use them to allow for multiple hub / spoke production deployments within a decentralised analytics architecture (Eg, EDW, Finance-Analytics, Operations-Analytics etc.). + +Within each of these folders is a file called "common_vars_values.jsonc". This file contains all of the configuration settings specific to a single Analytics Lockbox deployment. + +| 01.Property | 02.Type | 03.Description | +| ---------------------------------- | ------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| ARM_DATALAKE_NAME | string | Post Layer 1 Resource name. Deployment of Terraform Layer One will give you these values. They are required to allow subsequent agent deployments to open firewall rules to allow deployment updates.. | +| ARM_KEYVAULT_NAME | string | Post Layer 1 Resource name. Deployment of Terraform Layer One will give you these values. They are required to allow subsequent agent deployments to open firewall rules to allow deployment updates.. | +| ARM_PAL_PARTNER_ID | string | ID of Implementation Partner for PAL purposes. Set to 0 if not in use | +| ARM_SYNAPSE_WORKSPACE_NAME | string | Post Layer 1 Resource name. Deployment of Terraform Layer One will give you these values. They are required to allow subsequent agent deployments to open firewall rules to allow deployment updates. | +| FeatureTemplateOverrides | object | Each deployment references a feature template (eg. basic_deployment, full_deployment etc). This collection of objects allows you to override the default feature template values. | +| GIT_ADF_EMAIL_ADDRESS | string | Email address of git user used for publishing artefacts. Only used if synapse_git_toggle_integration or adf_git_toggle_integration is true in the hcl file. Set to '#####' if not using Git integration for Synapse and ADF. | +| GIT_ADF_REPOSITORY_BRANCH_NAME | string | Git Branch Name associated with the branch that will be used to publish Synapse artefacts. Only used if synapse_git_toggle_integration is true in the hcl file. Set to '#####' if not using Git integration for Synapse and ADF. | +| GIT_ADF_REPOSITORY_NAME | string | Git Repository name used for for publishing ADF and Synapse artefacts. Only used if synapse_git_toggle_integration or adf_git_toggle_integration is true in the hcl file.Set to '#####' if not using Git integration for Synapse and ADF. | +| GIT_ADF_USER_NAME | string | Usernames of git user used for publishing artefacts. Only used if synapse_git_toggle_integration or adf_git_toggle_integration is true in the hcl file. Set to '#####' if not using Git integration for Synapse and ADF. | +| GIT_EMAIL_ADDRESS | string | Email address of git user used for publishing artefacts. Only used if synapse_git_toggle_integration or adf_git_toggle_integration is true in the hcl file. Set to '#####' if not using Git integration for Synapse and ADF. | +| GIT_REPOSITORY_NAME | string | Git Repository name used for for publishing ADF and Synapse artefacts. Only used if synapse_git_toggle_integration or adf_git_toggle_integration is true in the hcl file. Set to '#####' if not using Git integration for Synapse and ADF. | +| GIT_SYNAPSE_REPOSITORY_BRANCH_NAME | string | Git Branch Name associated with the branch that will be used to publish Synapse artefacts. Only used if synapse_git_toggle_integration is true in the hcl file. Set to '#####' if not using Git integration for Synapse and ADF. | +| GIT_USER_NAME | string | Usernames of git user used for publishing artefacts. Only used if synapse_git_toggle_integration or adf_git_toggle_integration is true in the hcl file. Set to '#####' if not using Git integration for Synapse and ADF. | +| WEB_APP_ADMIN_SECURITY_GROUP | string | Name of the security group whos memebers will be given admin access to framework web front end.Set to '#####' if not in use. | +| WEB_APP_ADMIN_USER | string | Object_Id of the user that you would like to have direct, explicit admin access to framework web front end.Set to '#####' if not in use. This setting over-rides the WEB_APP_ADMIN_SECURITY_GROUP. Use in scenarios where use of a security group is not possible. | +| azure_sql_aad_administrators | object | Object with a property for each user that is to be a Azure SQL Administrator. This should be a user or AAD Group. The property name should be the username of the user in AAD and should be unique. The property value is the object_id of the user. You MUST provide the sql_aad_admin property which will determine the AAD Admin setting for the Azure SQL Server. | +| deployment_principal_layers1and3 | string | Object Id of the AAD account that will manage layer's 1 & 3. Note leave this blank if you are going to also include this principal in the resource owner's collection. | +| domain | string | Azure Active Directory Domain to be used for authentication | +| environment_tag | string | Environment tag to be used in resource naming convention. Keep to 3 letters to prevent names becoming too long. | +| ip_address | string | | +| ip_address2 | string | | +| owner_tag | string | Owner tag to be applied to resources. | +| prefix | string | Prefix to be applied to all resource names. Keep to 3 letters to prevent names becoming too long. | +| resource_group_name | string | Name of the resource group into which the analytics landing zone will be deployed | +| resource_location | string | Azure region to be used for deployment. | +| resource_owners | array | | +| subscription_id | string | Id of the subscription into which the analytics landing zone will be deployed | +| synapse_administrators | object | Object with a property for each user that is to be a Synapse Administrator. The property name is descriptive only and should be unique. The property value is the object_id of the user. Leave as empty object when not is use. NOTE - do not include the Synpase Workspace creator in this list as it will be added automatically. | +| synapse_contributors | object | Object with a property for each user that is to be a Synapse Contributor. The property name is descriptive only and should be unique. The property value is the object_id of the user. Leave as empty object when not is use. | +| synapse_publishers | object | Object with a property for each user that is to be a Synapse Publisher. The property name is descriptive only and should be unique. The property value is the object_id of the user. Leave as empty object when not is use. | +| tenant_id | string | | \ No newline at end of file diff --git a/solution/DeploymentV2/environments/vars/common_vars_schema.json b/solution/DeploymentV2/environments/vars/common_vars_schema.json index e51ba379..8297a2de 100644 --- a/solution/DeploymentV2/environments/vars/common_vars_schema.json +++ b/solution/DeploymentV2/environments/vars/common_vars_schema.json @@ -258,6 +258,20 @@ "examples": [ "test@test.com" ] + }, + "FeatureTemplateOverrides": { + "type": "object", + "description": "Each deployment references a feature template (eg. basic_deployment, full_deployment etc). This collection of objects allows you to override the default feature template values.", + "required": [], + "properties": + { + + }, + "examples": [ + {"is_onprem_datafactory_ir_registered":true} + ] } + + } } \ No newline at end of file diff --git a/solution/DeploymentV2/environments/vars/common_vars_schema.jsonnet b/solution/DeploymentV2/environments/vars/common_vars_schema.jsonnet new file mode 100644 index 00000000..3f9792c9 --- /dev/null +++ b/solution/DeploymentV2/environments/vars/common_vars_schema.jsonnet @@ -0,0 +1,302 @@ +local schema = { + "type": "object", + "required": [], + "properties": { + "owner_tag": { + "type": "string", + "default": "Contoso", + "description": "Owner tag to be applied to resources.", + "examples": [ + "Contoso" + ] + }, + "resource_location": { + "type": "string", + "default": "australiaeast", + "description": "Azure region to be used for deployment.", + "enum": [ + "australiaeast", + "brazilsouth", + "eastasia", + "eastus", + "eastus2", + "northeurope", + "southcentralus", + "southeastasia", + "westcentralus", + "westeurope", + "westus", + "westus2" + ], + "examples":[] + }, + "environment_tag": { + "type": "string", + "default": "dev", + "description": "Environment tag to be used in resource naming convention. Keep to 3 letters to prevent names becoming too long.", + "examples": [ + "dev" + ] + }, + "domain": { + "type": "string", + "default": "mydomain.com", + "description": "Azure Active Directory Domain to be used for authentication", + "examples": [ + "mydomain.com" + ] + }, + "subscription_id": { + "type": "string", + "default": "035ass64-f00d-48e2-b582-4fe1ss905ee3", + "description": "Id of the subscription into which the analytics landing zone will be deployed", + "examples": [ + "035a1ss4-f00d-48e2-b582-4fe1ss905ee3" + ] + }, + "prefix": { + "type": "string", + "default": "ads", + "description": "Prefix to be applied to all resource names. Keep to 3 letters to prevent names becoming too long.", + "examples": [ + "ads" + ] + }, + "resource_group_name": { + "type": "string", + "default": "gf1", + "description": "Name of the resource group into which the analytics landing zone will be deployed", + "examples": [ + "gf1" + ] + }, + "ip_address": { + "type": "string", + "examples": [], + "description": "" + }, + "ip_address2": { + "type": "string", + "examples": [], + "description": "" + }, + "tenant_id": { + "type": "string", + "examples": [], + "description": "" + }, + "deployment_principal_layers1and3": { + "description": "Object Id of the AAD account that will manage layer's 1 & 3. Note leave this blank if you are going to also include this principal in the resource owner's collection.", + "type": "string", + "default": "", + "examples": [] + }, + "resource_owners": { + "type": "array", + "items": { + "type": "string" + }, + "examples": [], + "description": "" + }, + "synapse_administrators": { + "type": "object", + "required": [], + "properties": {}, + "default": {}, + "description": "Object with a property for each user that is to be a Synapse Administrator. The property name is descriptive only and should be unique. The property value is the object_id of the user. Leave as empty object when not is use. NOTE - do not include the Synpase Workspace creator in this list as it will be added automatically.", + "examples": [ + { + "user_1": "5c736d19-4076-4a76-87f3-6fbfd77f007d", + "user_2": "5c732d19-4076-4a76-87f3-6fbfd72f007d" + }] + }, + "azure_sql_aad_administrators": { + "type": "object", + "required": [], + "default": {"sql_aad_admin": ""}, + "description": "Object with a property for each user that is to be a Azure SQL Administrator. This should be a user or AAD Group. The property name should be the username of the user in AAD and should be unique. The property value is the object_id of the user. You MUST provide the sql_aad_admin property which will determine the AAD Admin setting for the Azure SQL Server. ", + "properties": { + "sql_aad_admin": { + "type": "string" + }, + "deploy_agent": { + "type": "string" + } + }, + "examples": [ + { + "sql_aad_admin": "5c736d19-4076-4a76-87f3-6fbfd77f007d", + "deploy_agent": "5c732d19-4076-4a76-87f3-6fbfd72f007d" + } + ] + }, + "synapse_publishers": { + "type": "object", + "required": [], + "properties": {}, + "default": {}, + "description": "Object with a property for each user that is to be a Synapse Publisher. The property name is descriptive only and should be unique. The property value is the object_id of the user. Leave as empty object when not is use.", + "examples": [ + { + "user_1": "5c736d19-4076-4a76-87f3-6fbfd77f007d", + "user_2": "5c732d19-4076-4a76-87f3-6fbfd72f007d" + } + ] + + }, + "synapse_contributors": { + "type": "object", + "required": [], + "properties": {}, + "default": {}, + "description": "Object with a property for each user that is to be a Synapse Contributor. The property name is descriptive only and should be unique. The property value is the object_id of the user. Leave as empty object when not is use.", + "examples": [ + { + "user_1": "5c736d19-4076-4a76-87f3-6fbfd77f007d", + "user_2": "5c732d19-4076-4a76-87f3-6fbfd72f007d" + }] + }, + "WEB_APP_ADMIN_USER": { + "type": "string", + "default": "#####", + "description": "Object_Id of the user that you would like to have direct, explicit admin access to framework web front end.Set to '#####' if not in use. This setting over-rides the WEB_APP_ADMIN_SECURITY_GROUP. Use in scenarios where use of a security group is not possible.", + "examples": [ + "5c732d19-4076-4a76-87f3-6fbfd72f007d" + ] + }, + "WEB_APP_ADMIN_SECURITY_GROUP": { + "type": "string", + "default": "#####", + "description": "Name of the security group whos memebers will be given admin access to framework web front end.Set to '#####' if not in use.", + "examples": [ + "mysecuritygroup" + ] + }, + "ARM_SYNAPSE_WORKSPACE_NAME": { + "type": "string", + "default": "adsstgsynwadslwra", + "description": "Post Layer 1 Resource name. Deployment of Terraform Layer One will give you these values. They are required to allow subsequent agent deployments to open firewall rules to allow deployment updates.", + "examples": [ + "adsstgsynwadslwra" + ] + }, + "ARM_KEYVAULT_NAME": { + "type": "string", + "default": "ads-stg-kv-ads-lwra", + "description": "Post Layer 1 Resource name. Deployment of Terraform Layer One will give you these values. They are required to allow subsequent agent deployments to open firewall rules to allow deployment updates..", + "examples": [ + "ads-stg-kv-ads-lwra" + ] + }, + "ARM_DATALAKE_NAME": { + "type": "string", + "default": "adsstgdlsadslwraadsl", + "description": "Post Layer 1 Resource name. Deployment of Terraform Layer One will give you these values. They are required to allow subsequent agent deployments to open firewall rules to allow deployment updates..", + "examples": [ + "adsstgdlsadslwraadsl" + ] + }, + "ARM_PAL_PARTNER_ID": { + "type": "string", + "default": "0", + "description": "ID of Implementation Partner for PAL purposes. Set to 0 if not in use", + "examples": [ + "0" + ] + }, + "GIT_REPOSITORY_NAME": { + "type": "string", + "default": "#####", + "description": "Git Repository name used for for publishing ADF and Synapse artefacts. Only used if synapse_git_toggle_integration or adf_git_toggle_integration is true in the hcl file. Set to '#####' if not using Git integration for Synapse and ADF.", + "examples": [ + "mytestrepo" + ] + }, + "GIT_SYNAPSE_REPOSITORY_BRANCH_NAME": { + "type": "string", + "default": "#####", + "description": "Git Branch Name associated with the branch that will be used to publish Synapse artefacts. Only used if synapse_git_toggle_integration is true in the hcl file. Set to '#####' if not using Git integration for Synapse and ADF.", + "examples": [ + "main" + ] + }, + "GIT_USER_NAME": { + "type": "string", + "default": "#####", + "description": "Usernames of git user used for publishing artefacts. Only used if synapse_git_toggle_integration or adf_git_toggle_integration is true in the hcl file. Set to '#####' if not using Git integration for Synapse and ADF.", + "examples": [ + "test user" + ] + }, + "GIT_EMAIL_ADDRESS": { + "type": "string", + "default": "#####", + "description": "Email address of git user used for publishing artefacts. Only used if synapse_git_toggle_integration or adf_git_toggle_integration is true in the hcl file. Set to '#####' if not using Git integration for Synapse and ADF.", + "examples": [ + "test@test.com" + ] + }, + "GIT_ADF_REPOSITORY_NAME": { + "type": "string", + "default": "#####", + "description": "Git Repository name used for for publishing ADF and Synapse artefacts. Only used if synapse_git_toggle_integration or adf_git_toggle_integration is true in the hcl file.Set to '#####' if not using Git integration for Synapse and ADF.", + "examples": [ + "mytestrepo" + ] + }, + "GIT_ADF_REPOSITORY_BRANCH_NAME": { + "type": "string", + "default": "#####", + "description": "Git Branch Name associated with the branch that will be used to publish Synapse artefacts. Only used if synapse_git_toggle_integration is true in the hcl file. Set to '#####' if not using Git integration for Synapse and ADF.", + "examples": [ + "main" + ] + }, + "GIT_ADF_USER_NAME": { + "type": "string", + "default": "#####", + "description": "Usernames of git user used for publishing artefacts. Only used if synapse_git_toggle_integration or adf_git_toggle_integration is true in the hcl file. Set to '#####' if not using Git integration for Synapse and ADF.", + "examples": [ + "test user" + ] + }, + "GIT_ADF_EMAIL_ADDRESS": { + "type": "string", + "default": "#####", + "description": "Email address of git user used for publishing artefacts. Only used if synapse_git_toggle_integration or adf_git_toggle_integration is true in the hcl file. Set to '#####' if not using Git integration for Synapse and ADF.", + "examples": [ + "test@test.com" + ] + }, + "FeatureTemplateOverrides": { + "type": "object", + "description": "Each deployment references a feature template (eg. basic_deployment, full_deployment etc). This collection of objects allows you to override the default feature template values.", + "required": [], + "properties": + { + + }, + "examples": [ + {"is_onprem_datafactory_ir_registered":true} + ] + } + + + } + }; + +local properties = schema["properties"]; + +{ + "test": [ // Object comprehension. + { + ["01.Property"]: sd, + ["02.Type"]: properties[sd]["type"], + ["03.Description"]: properties[sd]["description"], + + #["Examples"]: properties[sd]["examples"], + } + for sd in std.objectFields(properties) + ] +} \ No newline at end of file diff --git a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet index 49216c47..85887e13 100644 --- a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet +++ b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet @@ -701,6 +701,8 @@ local SecretFileSensitiveVars = { // Object comprehension. + + From 340f7491d9af8cd187e031d4259c2332649ade1a Mon Sep 17 00:00:00 2001 From: John Rampono Date: Mon, 8 Aug 2022 12:49:17 +0800 Subject: [PATCH 106/151] Updated PreprocessEnvironment to allow injection of feature template vars --- solution/DeploymentV2/environments/readme.md | 39 +- .../vars/PreprocessEnvironment.ps1 | 51 +- .../environments/vars/common_vars_schema.json | 635 ++++++++++-------- .../vars/common_vars_schema.jsonnet | 296 +------- .../vars/common_vars_template.jsonnet | 16 + 5 files changed, 431 insertions(+), 606 deletions(-) diff --git a/solution/DeploymentV2/environments/readme.md b/solution/DeploymentV2/environments/readme.md index 19aaa7fb..739cef77 100644 --- a/solution/DeploymentV2/environments/readme.md +++ b/solution/DeploymentV2/environments/readme.md @@ -3,42 +3,13 @@ This folder contains the configuration settings and feature flags that determine # Feature Templates Directory +- full_deployment +- basic_deployment +- full_deployment_no_purview +- functional_tests + # Vars Directory Contains a folder for each deployment environment. These folders control the number of "Analytics Landing Zones" that can be deployed by the repository. You can use these folders to allow for multiple environments that will support your SDLC (eg. Development, UAT, Production) and you can also use them to allow for multiple hub / spoke production deployments within a decentralised analytics architecture (Eg, EDW, Finance-Analytics, Operations-Analytics etc.). Within each of these folders is a file called "common_vars_values.jsonc". This file contains all of the configuration settings specific to a single Analytics Lockbox deployment. - -| 01.Property | 02.Type | 03.Description | -| ---------------------------------- | ------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| ARM_DATALAKE_NAME | string | Post Layer 1 Resource name. Deployment of Terraform Layer One will give you these values. They are required to allow subsequent agent deployments to open firewall rules to allow deployment updates.. | -| ARM_KEYVAULT_NAME | string | Post Layer 1 Resource name. Deployment of Terraform Layer One will give you these values. They are required to allow subsequent agent deployments to open firewall rules to allow deployment updates.. | -| ARM_PAL_PARTNER_ID | string | ID of Implementation Partner for PAL purposes. Set to 0 if not in use | -| ARM_SYNAPSE_WORKSPACE_NAME | string | Post Layer 1 Resource name. Deployment of Terraform Layer One will give you these values. They are required to allow subsequent agent deployments to open firewall rules to allow deployment updates. | -| FeatureTemplateOverrides | object | Each deployment references a feature template (eg. basic_deployment, full_deployment etc). This collection of objects allows you to override the default feature template values. | -| GIT_ADF_EMAIL_ADDRESS | string | Email address of git user used for publishing artefacts. Only used if synapse_git_toggle_integration or adf_git_toggle_integration is true in the hcl file. Set to '#####' if not using Git integration for Synapse and ADF. | -| GIT_ADF_REPOSITORY_BRANCH_NAME | string | Git Branch Name associated with the branch that will be used to publish Synapse artefacts. Only used if synapse_git_toggle_integration is true in the hcl file. Set to '#####' if not using Git integration for Synapse and ADF. | -| GIT_ADF_REPOSITORY_NAME | string | Git Repository name used for for publishing ADF and Synapse artefacts. Only used if synapse_git_toggle_integration or adf_git_toggle_integration is true in the hcl file.Set to '#####' if not using Git integration for Synapse and ADF. | -| GIT_ADF_USER_NAME | string | Usernames of git user used for publishing artefacts. Only used if synapse_git_toggle_integration or adf_git_toggle_integration is true in the hcl file. Set to '#####' if not using Git integration for Synapse and ADF. | -| GIT_EMAIL_ADDRESS | string | Email address of git user used for publishing artefacts. Only used if synapse_git_toggle_integration or adf_git_toggle_integration is true in the hcl file. Set to '#####' if not using Git integration for Synapse and ADF. | -| GIT_REPOSITORY_NAME | string | Git Repository name used for for publishing ADF and Synapse artefacts. Only used if synapse_git_toggle_integration or adf_git_toggle_integration is true in the hcl file. Set to '#####' if not using Git integration for Synapse and ADF. | -| GIT_SYNAPSE_REPOSITORY_BRANCH_NAME | string | Git Branch Name associated with the branch that will be used to publish Synapse artefacts. Only used if synapse_git_toggle_integration is true in the hcl file. Set to '#####' if not using Git integration for Synapse and ADF. | -| GIT_USER_NAME | string | Usernames of git user used for publishing artefacts. Only used if synapse_git_toggle_integration or adf_git_toggle_integration is true in the hcl file. Set to '#####' if not using Git integration for Synapse and ADF. | -| WEB_APP_ADMIN_SECURITY_GROUP | string | Name of the security group whos memebers will be given admin access to framework web front end.Set to '#####' if not in use. | -| WEB_APP_ADMIN_USER | string | Object_Id of the user that you would like to have direct, explicit admin access to framework web front end.Set to '#####' if not in use. This setting over-rides the WEB_APP_ADMIN_SECURITY_GROUP. Use in scenarios where use of a security group is not possible. | -| azure_sql_aad_administrators | object | Object with a property for each user that is to be a Azure SQL Administrator. This should be a user or AAD Group. The property name should be the username of the user in AAD and should be unique. The property value is the object_id of the user. You MUST provide the sql_aad_admin property which will determine the AAD Admin setting for the Azure SQL Server. | -| deployment_principal_layers1and3 | string | Object Id of the AAD account that will manage layer's 1 & 3. Note leave this blank if you are going to also include this principal in the resource owner's collection. | -| domain | string | Azure Active Directory Domain to be used for authentication | -| environment_tag | string | Environment tag to be used in resource naming convention. Keep to 3 letters to prevent names becoming too long. | -| ip_address | string | | -| ip_address2 | string | | -| owner_tag | string | Owner tag to be applied to resources. | -| prefix | string | Prefix to be applied to all resource names. Keep to 3 letters to prevent names becoming too long. | -| resource_group_name | string | Name of the resource group into which the analytics landing zone will be deployed | -| resource_location | string | Azure region to be used for deployment. | -| resource_owners | array | | -| subscription_id | string | Id of the subscription into which the analytics landing zone will be deployed | -| synapse_administrators | object | Object with a property for each user that is to be a Synapse Administrator. The property name is descriptive only and should be unique. The property value is the object_id of the user. Leave as empty object when not is use. NOTE - do not include the Synpase Workspace creator in this list as it will be added automatically. | -| synapse_contributors | object | Object with a property for each user that is to be a Synapse Contributor. The property name is descriptive only and should be unique. The property value is the object_id of the user. Leave as empty object when not is use. | -| synapse_publishers | object | Object with a property for each user that is to be a Synapse Publisher. The property name is descriptive only and should be unique. The property value is the object_id of the user. Leave as empty object when not is use. | -| tenant_id | string | | \ No newline at end of file diff --git a/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 b/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 index 148a6846..8b8c3a3c 100644 --- a/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 +++ b/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 @@ -54,22 +54,11 @@ $hiddenoutput = !(Test-Path $newfolder) ? ($F = New-Item -itemType Directory -Na $obj = Get-Content ($newfolder + "/common_vars.json") | ConvertFrom-Json -#featureTemplateOverrides -$fto_vals = ((Get-Content -Path "./uat/common_vars_values.jsonc") | ConvertFrom-Json -Depth 10).FeatureTemplateOverrides -$fto_keys = $fto_vals | Get-Member | Where-Object {$_.MemberType -eq "NoteProperty"} - -foreach($t in ($obj.ForEnvVar | Get-Member | Where-Object {$_.MemberType -eq "NoteProperty"})) +$envarprops = ($obj.ForEnvVar | Get-Member | Where-Object {$_.MemberType -eq "NoteProperty"}) +foreach($t in $envarprops) { $Name = $t.Name - $Value = $obj.ForEnvVar[0].$Name - - #Feature Template Value Overrides - if(($fto_keys | Where-Object {$_.Name -eq $Name.Replace("TF_VAR_","")}).count -gt 0) - { - $fto_prop = ($fto_keys | Where-Object {$_.Name -eq $Name.Replace("TF_VAR_","")}).Name - Write-Warning "Overriding Feature Template value for $fto_prop" - $Value = $fto_vals.$fto_prop - } + $Value = $obj.ForEnvVar[0].$Name if($Value.GetType().Name -eq "Boolean") { @@ -86,6 +75,40 @@ foreach($t in ($obj.ForEnvVar | Get-Member | Where-Object {$_.MemberType -eq "No } } +#Feature Template Value Overrides +$fto_vals = ((Get-Content -Path "./uat/common_vars_values.jsonc") | ConvertFrom-Json -Depth 10).FeatureTemplateOverrides +$fto_keys = $fto_vals | Get-Member | Where-Object {$_.MemberType -eq "NoteProperty"} + +foreach($fto in $fto_keys) +{ + $Name = $fto.Name + if (($envarprops | Where-Object {$_.Name -eq "TF_VAR_$Name"}).Count -gt 0) + { + $ev_prop = ($envarprops | Where-Object {$_.Name -eq "TF_VAR_$Name"}).Name + $Value = $fto_vals.$Name + Write-Warning "Overriding Feature Template value for $ev_prop with value of $Value" + } + else { + Write-Warning "Inserting Feature Template value for $ev_prop with value of $Value" + $Value = $fto_vals.$Name + } + + if($Value.GetType().Name -eq "Boolean") + { + $Value = $Value.ToString().ToLower() + } + if($Value.GetType().Name -eq "PSCustomObject") + { + $Value = ($Value | ConvertTo-Json -Depth 10) + } + + if([string]::IsNullOrEmpty($Value) -eq $false -and $Value -ne '#####') + { + [Environment]::SetEnvironmentVariable($Name, $Value) + } +} + + foreach($t in ($obj.ForSecretFile | Get-Member | Where-Object {$_.MemberType -eq "NoteProperty"})) { $Name = $t.Name diff --git a/solution/DeploymentV2/environments/vars/common_vars_schema.json b/solution/DeploymentV2/environments/vars/common_vars_schema.json index 8297a2de..f3a3ce37 100644 --- a/solution/DeploymentV2/environments/vars/common_vars_schema.json +++ b/solution/DeploymentV2/environments/vars/common_vars_schema.json @@ -1,277 +1,378 @@ { - "type": "object", - "required": [], - "properties": { - "owner_tag": { - "type": "string", - "default": "Contoso", - "description": "Owner tag to be applied to resources.", - "examples": [ - "Contoso" - ] - }, - "resource_location": { - "type": "string", - "default": "australiaeast", - "description": "Azure region to be used for deployment.", - "enum": [ - "australiaeast", - "brazilsouth", - "eastasia", - "eastus", - "eastus2", - "northeurope", - "southcentralus", - "southeastasia", - "westcentralus", - "westeurope", - "westus", - "westus2" - ] - }, - "environment_tag": { - "type": "string", - "default": "dev", - "description": "Environment tag to be used in resource naming convention. Keep to 3 letters to prevent names becoming too long.", - "examples": [ - "dev" - ] - }, - "domain": { - "type": "string", - "default": "mydomain.com", - "description": "Azure Active Directory Domain to be used for authentication", - "examples": [ - "mydomain.com" - ] - }, - "subscription_id": { - "type": "string", - "default": "035ass64-f00d-48e2-b582-4fe1ss905ee3", - "description": "Id of the subscription into which the analytics landing zone will be deployed", - "examples": [ - "035a1ss4-f00d-48e2-b582-4fe1ss905ee3" - ] - }, - "prefix": { - "type": "string", - "default": "ads", - "description": "Prefix to be applied to all resource names. Keep to 3 letters to prevent names becoming too long.", - "examples": [ - "ads" - ] - }, - "resource_group_name": { - "type": "string", - "default": "gf1", - "description": "Name of the resource group into which the analytics landing zone will be deployed", - "examples": [ - "gf1" - ] - }, - "ip_address": { + "type": "object", + "required": [], + "properties": { + "owner_tag": { + "type": "string", + "default": "Contoso", + "description": "Owner tag to be applied to resources.", + "examples": [ + "Contoso" + ], + "options": { + "category": "Target Azure Environment" + } + }, + "resource_location": { + "type": "string", + "default": "australiaeast", + "description": "Azure region to be used for deployment.", + "enum": [ + "australiaeast", + "brazilsouth", + "eastasia", + "eastus", + "eastus2", + "northeurope", + "southcentralus", + "southeastasia", + "westcentralus", + "westeurope", + "westus", + "westus2" + ], + "examples":[], + "options": { + "category": "Target Azure Environment" + } + }, + "environment_tag": { + "type": "string", + "default": "dev", + "description": "Environment tag to be used in resource naming convention. Keep to 3 letters to prevent names becoming too long.", + "examples": [ + "dev" + ], + "options": { + "category": "Target Azure Environment" + } + }, + "domain": { + "type": "string", + "default": "mydomain.com", + "description": "Azure Active Directory Domain to be used for authentication", + "examples": [ + "mydomain.com" + ], + "options": { + "category": "Target Azure Environment" + } + }, + "subscription_id": { + "type": "string", + "default": "035ass64-f00d-48e2-b582-4fe1ss905ee3", + "description": "Id of the subscription into which the analytics landing zone will be deployed", + "examples": [ + "035a1ss4-f00d-48e2-b582-4fe1ss905ee3" + ], + "options": { + "category": "Target Azure Environment" + } + }, + "prefix": { + "type": "string", + "default": "ads", + "description": "Prefix to be applied to all resource names. Keep to 3 letters to prevent names becoming too long.", + "examples": [ + "ads" + ], + "options": { + "category": "Target Azure Environment" + } + }, + "resource_group_name": { + "type": "string", + "default": "gf1", + "description": "Name of the resource group into which the analytics landing zone will be deployed", + "examples": [ + "gf1" + ], + "options": { + "category": "Target Azure Environment" + } + }, + "ip_address": { + "type": "string", + "examples": [], + "description": "", + "options": { + "category": "Target Azure Environment" + } + }, + "ip_address2": { + "type": "string", + "examples": [], + "description": "", + "options": { + "category": "Target Azure Environment" + } + }, + "tenant_id": { + "type": "string", + "examples": ["ee3e45d7-7080-4b63-9d85-86fbfa917bb1"], + "description": "The id of the Azure tenant that you wish to deploy into.", + "options": { + "category": "Target Azure Environment" + } + }, + "deployment_principal_layers1and3": { + "description": "Object Id of the AAD account that will manage layer's 1 & 3. Note leave this blank if you are going to also include this principal in the resource owner's collection.", + "type": "string", + "default": "", + "examples": [] + }, + "resource_owners": { + "type": "array", + "items": { "type": "string" }, - "ip_address2": { - "type": "string" - }, - "tenant_id": { - "type": "string" - }, - "deployment_principal_layers1and3": { - "description": "Object Id of the AAD account that will manage layer's 1 & 3. Note leave this blank if you are going to also include this principal in the resource owner's collection.", - "type": "string", - "default": "" - }, - "resource_owners": { - "type": "array", - "items": { + "default":[], + "examples": ["ee3e45d7-7080-4b63-9d85-86fbfa917bb1"], + "description": "List of object id's relating to identities that will own all deployed resources.", + "options": { + "category": "Target Azure Environment" + } + }, + "synapse_administrators": { + "type": "object", + "required": [], + "properties": {}, + "default": {}, + "description": "Object with a property for each user that is to be a Synapse Administrator. The property name is descriptive only and should be unique. The property value is the object_id of the user. Leave as empty object when not is use. NOTE - do not include the Synpase Workspace creator in this list as it will be added automatically.", + "examples": [ + { + "user_1": "5c736d19-4076-4a76-87f3-6fbfd77f007d", + "user_2": "5c732d19-4076-4a76-87f3-6fbfd72f007d" + }], + "options": { + "category": "Target Azure Environment" + } + }, + "azure_sql_aad_administrators": { + "type": "object", + "required": [], + "default": {"sql_aad_admin": ""}, + "description": "Object with a property for each user that is to be a Azure SQL Administrator. This should be a user or AAD Group. The property name should be the username of the user in AAD and should be unique. The property value is the object_id of the user. You MUST provide the sql_aad_admin property which will determine the AAD Admin setting for the Azure SQL Server. ", + "properties": { + "sql_aad_admin": { + "type": "string" + }, + "deploy_agent": { "type": "string" } }, - "synapse_administrators": { - "type": "object", - "required": [], - "properties": {}, - "default": {}, - "description": "Object with a property for each user that is to be a Synapse Administrator. The property name is descriptive only and should be unique. The property value is the object_id of the user. Leave as empty object when not is use. NOTE - do not include the Synpase Workspace creator in this list as it will be added automatically.", - "examples": [ - { - "user_1": "5c736d19-4076-4a76-87f3-6fbfd77f007d", - "user_2": "5c732d19-4076-4a76-87f3-6fbfd72f007d" - }] - }, - "azure_sql_aad_administrators": { - "type": "object", - "required": [], - "default": {"sql_aad_admin": ""}, - "description": "Object with a property for each user that is to be a Azure SQL Administrator. This should be a user or AAD Group. The property name should be the username of the user in AAD and should be unique. The property value is the object_id of the user. You MUST provide the sql_aad_admin property which will determine the AAD Admin setting for the Azure SQL Server. ", - "properties": { - "sql_aad_admin": { - "type": "string" - }, - "deploy_agent": { - "type": "string" + "examples": [ + { + "sql_aad_admin": "5c736d19-4076-4a76-87f3-6fbfd77f007d", + "deploy_agent": "5c732d19-4076-4a76-87f3-6fbfd72f007d" } - }, - "examples": [ - { - "sql_aad_admin": "5c736d19-4076-4a76-87f3-6fbfd77f007d", - "deploy_agent": "5c732d19-4076-4a76-87f3-6fbfd72f007d" - } - ] - }, - "synapse_publishers": { - "type": "object", - "required": [], - "properties": {}, - "default": {}, - "description": "Object with a property for each user that is to be a Synapse Publisher. The property name is descriptive only and should be unique. The property value is the object_id of the user. Leave as empty object when not is use.", - "examples": [ - { - "user_1": "5c736d19-4076-4a76-87f3-6fbfd77f007d", - "user_2": "5c732d19-4076-4a76-87f3-6fbfd72f007d" - } - ] - - }, - "synapse_contributors": { - "type": "object", - "required": [], - "properties": {}, - "default": {}, - "description": "Object with a property for each user that is to be a Synapse Contributor. The property name is descriptive only and should be unique. The property value is the object_id of the user. Leave as empty object when not is use.", - "examples": [ - { - "user_1": "5c736d19-4076-4a76-87f3-6fbfd77f007d", - "user_2": "5c732d19-4076-4a76-87f3-6fbfd72f007d" - }] - }, - "WEB_APP_ADMIN_USER": { - "type": "string", - "default": "#####", - "description": "Object_Id of the user that you would like to have direct, explicit admin access to framework web front end.Set to '#####' if not in use. This setting over-rides the WEB_APP_ADMIN_SECURITY_GROUP. Use in scenarios where use of a security group is not possible.", - "examples": [ - "5c732d19-4076-4a76-87f3-6fbfd72f007d" - ] - }, - "WEB_APP_ADMIN_SECURITY_GROUP": { - "type": "string", - "default": "#####", - "description": "Name of the security group whos memebers will be given admin access to framework web front end.Set to '#####' if not in use.", - "examples": [ - "mysecuritygroup" - ] - }, - "ARM_SYNAPSE_WORKSPACE_NAME": { - "type": "string", - "default": "adsstgsynwadslwra", - "description": "Post Layer 1 Resource name. Deployment of Terraform Layer One will give you these values. They are required to allow subsequent agent deployments to open firewall rules to allow deployment updates.", - "examples": [ - "adsstgsynwadslwra" - ] - }, - "ARM_KEYVAULT_NAME": { - "type": "string", - "default": "ads-stg-kv-ads-lwra", - "description": "Post Layer 1 Resource name. Deployment of Terraform Layer One will give you these values. They are required to allow subsequent agent deployments to open firewall rules to allow deployment updates..", - "examples": [ - "ads-stg-kv-ads-lwra" - ] - }, - "ARM_DATALAKE_NAME": { - "type": "string", - "default": "adsstgdlsadslwraadsl", - "description": "Post Layer 1 Resource name. Deployment of Terraform Layer One will give you these values. They are required to allow subsequent agent deployments to open firewall rules to allow deployment updates..", - "examples": [ - "adsstgdlsadslwraadsl" - ] - }, - "ARM_PAL_PARTNER_ID": { - "type": "string", - "default": "0", - "description": "ID of Implementation Partner for PAL purposes. Set to 0 if not in use", - "examples": [ - "0" - ] - }, - "GIT_REPOSITORY_NAME": { - "type": "string", - "default": "#####", - "description": "Git Repository name used for for publishing ADF and Synapse artefacts. Only used if synapse_git_toggle_integration or adf_git_toggle_integration is true in the hcl file. Set to '#####' if not using Git integration for Synapse and ADF.", - "examples": [ - "mytestrepo" - ] - }, - "GIT_SYNAPSE_REPOSITORY_BRANCH_NAME": { - "type": "string", - "default": "#####", - "description": "Git Branch Name associated with the branch that will be used to publish Synapse artefacts. Only used if synapse_git_toggle_integration is true in the hcl file. Set to '#####' if not using Git integration for Synapse and ADF.", - "examples": [ - "main" - ] - }, - "GIT_USER_NAME": { - "type": "string", - "default": "#####", - "description": "Usernames of git user used for publishing artefacts. Only used if synapse_git_toggle_integration or adf_git_toggle_integration is true in the hcl file. Set to '#####' if not using Git integration for Synapse and ADF.", - "examples": [ - "test user" - ] - }, - "GIT_EMAIL_ADDRESS": { - "type": "string", - "default": "#####", - "description": "Email address of git user used for publishing artefacts. Only used if synapse_git_toggle_integration or adf_git_toggle_integration is true in the hcl file. Set to '#####' if not using Git integration for Synapse and ADF.", - "examples": [ - "test@test.com" - ] - }, - "GIT_ADF_REPOSITORY_NAME": { - "type": "string", - "default": "#####", - "description": "Git Repository name used for for publishing ADF and Synapse artefacts. Only used if synapse_git_toggle_integration or adf_git_toggle_integration is true in the hcl file.Set to '#####' if not using Git integration for Synapse and ADF.", - "examples": [ - "mytestrepo" - ] - }, - "GIT_ADF_REPOSITORY_BRANCH_NAME": { - "type": "string", - "default": "#####", - "description": "Git Branch Name associated with the branch that will be used to publish Synapse artefacts. Only used if synapse_git_toggle_integration is true in the hcl file. Set to '#####' if not using Git integration for Synapse and ADF.", - "examples": [ - "main" - ] - }, - "GIT_ADF_USER_NAME": { - "type": "string", - "default": "#####", - "description": "Usernames of git user used for publishing artefacts. Only used if synapse_git_toggle_integration or adf_git_toggle_integration is true in the hcl file. Set to '#####' if not using Git integration for Synapse and ADF.", - "examples": [ - "test user" - ] - }, - "GIT_ADF_EMAIL_ADDRESS": { - "type": "string", - "default": "#####", - "description": "Email address of git user used for publishing artefacts. Only used if synapse_git_toggle_integration or adf_git_toggle_integration is true in the hcl file. Set to '#####' if not using Git integration for Synapse and ADF.", - "examples": [ - "test@test.com" - ] - }, - "FeatureTemplateOverrides": { - "type": "object", - "description": "Each deployment references a feature template (eg. basic_deployment, full_deployment etc). This collection of objects allows you to override the default feature template values.", - "required": [], - "properties": - { - - }, - "examples": [ - {"is_onprem_datafactory_ir_registered":true} - ] + ], + "options": { + "category": "Target Azure Environment" } + }, + "synapse_publishers": { + "type": "object", + "required": [], + "properties": {}, + "default": {}, + "description": "Object with a property for each user that is to be a Synapse Publisher. The property name is descriptive only and should be unique. The property value is the object_id of the user. Leave as empty object when not is use.", + "examples": [ + { + "user_1": "5c736d19-4076-4a76-87f3-6fbfd77f007d", + "user_2": "5c732d19-4076-4a76-87f3-6fbfd72f007d" + } + ], + "options": { + "category": "Target Azure Environment" + } + + }, + "synapse_contributors": { + "type": "object", + "required": [], + "properties": {}, + "default": {}, + "description": "Object with a property for each user that is to be a Synapse Contributor. The property name is descriptive only and should be unique. The property value is the object_id of the user. Leave as empty object when not is use.", + "examples": [ + { + "user_1": "5c736d19-4076-4a76-87f3-6fbfd77f007d", + "user_2": "5c732d19-4076-4a76-87f3-6fbfd72f007d" + }], + "options": { + "category": "Target Azure Environment" + } + }, + "WEB_APP_ADMIN_USER": { + "type": "string", + "default": "#####", + "description": "Object_Id of the user that you would like to have direct, explicit admin access to framework web front end.Set to '#####' if not in use. This setting over-rides the WEB_APP_ADMIN_SECURITY_GROUP. Use in scenarios where use of a security group is not possible.", + "examples": [ + "5c732d19-4076-4a76-87f3-6fbfd72f007d" + ], + "options": { + "category": "Target Azure Environment" + } + }, + "WEB_APP_ADMIN_SECURITY_GROUP": { + "type": "string", + "default": "#####", + "description": "Name of the security group whos memebers will be given admin access to framework web front end.Set to '#####' if not in use.", + "examples": [ + "mysecuritygroup" + ], + "options": { + "category": "Target Azure Environment" + } + }, + "ARM_SYNAPSE_WORKSPACE_NAME": { + "type": "string", + "default": "adsstgsynwadslwra", + "description": "Post Layer 1 Resource name. Deployment of Terraform Layer One will give you these values. They are required to allow subsequent agent deployments to open firewall rules to allow deployment updates.", + "examples": [ + "adsstgsynwadslwra" + ], + "options": { + "category": "Target Azure Environment" + } + }, + "ARM_KEYVAULT_NAME": { + "type": "string", + "default": "ads-stg-kv-ads-lwra", + "description": "Post Layer 1 Resource name. Deployment of Terraform Layer One will give you these values. They are required to allow subsequent agent deployments to open firewall rules to allow deployment updates..", + "examples": [ + "ads-stg-kv-ads-lwra" + ], + "options": { + "category": "Target Azure Environment" + } + }, + "ARM_DATALAKE_NAME": { + "type": "string", + "default": "adsstgdlsadslwraadsl", + "description": "Post Layer 1 Resource name. Deployment of Terraform Layer One will give you these values. They are required to allow subsequent agent deployments to open firewall rules to allow deployment updates..", + "examples": [ + "adsstgdlsadslwraadsl" + ], + "options": { + "category": "Target Azure Environment" + } + }, + "ARM_PAL_PARTNER_ID": { + "type": "string", + "default": "0", + "description": "ID of Implementation Partner for PAL purposes. Set to 0 if not in use", + "examples": [ + "0" + ], + "options": { + "category": "Target Azure Environment" + } + }, + "GIT_REPOSITORY_NAME": { + "type": "string", + "default": "#####", + "description": "Git Repository name used for for publishing ADF and Synapse artefacts. Only used if synapse_git_toggle_integration or adf_git_toggle_integration is true in the hcl file. Set to '#####' if not using Git integration for Synapse and ADF.", + "examples": [ + "mytestrepo" + ], + "options": { + "category": "Target Azure Environment" + } + }, + "GIT_SYNAPSE_REPOSITORY_BRANCH_NAME": { + "type": "string", + "default": "#####", + "description": "Git Branch Name associated with the branch that will be used to publish Synapse artefacts. Only used if synapse_git_toggle_integration is true in the hcl file. Set to '#####' if not using Git integration for Synapse and ADF.", + "examples": [ + "main" + ], + "options": { + "category": "Target Azure Environment" + } + }, + "GIT_USER_NAME": { + "type": "string", + "default": "#####", + "description": "Usernames of git user used for publishing artefacts. Only used if synapse_git_toggle_integration or adf_git_toggle_integration is true in the hcl file. Set to '#####' if not using Git integration for Synapse and ADF.", + "examples": [ + "test user" + ], + "options": { + "category": "Target Azure Environment" + } + }, + "GIT_EMAIL_ADDRESS": { + "type": "string", + "default": "#####", + "description": "Email address of git user used for publishing artefacts. Only used if synapse_git_toggle_integration or adf_git_toggle_integration is true in the hcl file. Set to '#####' if not using Git integration for Synapse and ADF.", + "examples": [ + "test@test.com" + ], + "options": { + "category": "Target Azure Environment" + } + }, + "GIT_ADF_REPOSITORY_NAME": { + "type": "string", + "default": "#####", + "description": "Git Repository name used for for publishing ADF and Synapse artefacts. Only used if synapse_git_toggle_integration or adf_git_toggle_integration is true in the hcl file.Set to '#####' if not using Git integration for Synapse and ADF.", + "examples": [ + "mytestrepo" + ], + "options": { + "category": "Target Azure Environment" + } + }, + "GIT_ADF_REPOSITORY_BRANCH_NAME": { + "type": "string", + "default": "#####", + "description": "Git Branch Name associated with the branch that will be used to publish Synapse artefacts. Only used if synapse_git_toggle_integration is true in the hcl file. Set to '#####' if not using Git integration for Synapse and ADF.", + "examples": [ + "main" + ], + "options": { + "category": "Target Azure Environment" + } + }, + "GIT_ADF_USER_NAME": { + "type": "string", + "default": "#####", + "description": "Usernames of git user used for publishing artefacts. Only used if synapse_git_toggle_integration or adf_git_toggle_integration is true in the hcl file. Set to '#####' if not using Git integration for Synapse and ADF.", + "examples": [ + "test user" + ], + "options": { + "category": "Target Azure Environment" + } + }, + "GIT_ADF_EMAIL_ADDRESS": { + "type": "string", + "default": "#####", + "description": "Email address of git user used for publishing artefacts. Only used if synapse_git_toggle_integration or adf_git_toggle_integration is true in the hcl file. Set to '#####' if not using Git integration for Synapse and ADF.", + "examples": [ + "test@test.com" + ], + "options": { + "category": "Target Azure Environment" + } + }, + "FeatureTemplateOverrides": { + "type": "object", + "description": "Each deployment references a feature template (eg. basic_deployment, full_deployment etc). This collection of objects allows you to override the default feature template values.", + "required": [], + "properties": + { - + }, + "examples": [ + {"is_onprem_datafactory_ir_registered":true} + ], + "options": { + "category": "Target Azure Environment" + } } - } \ No newline at end of file + + + } +} \ No newline at end of file diff --git a/solution/DeploymentV2/environments/vars/common_vars_schema.jsonnet b/solution/DeploymentV2/environments/vars/common_vars_schema.jsonnet index 3f9792c9..762da1d3 100644 --- a/solution/DeploymentV2/environments/vars/common_vars_schema.jsonnet +++ b/solution/DeploymentV2/environments/vars/common_vars_schema.jsonnet @@ -1,299 +1,13 @@ -local schema = { - "type": "object", - "required": [], - "properties": { - "owner_tag": { - "type": "string", - "default": "Contoso", - "description": "Owner tag to be applied to resources.", - "examples": [ - "Contoso" - ] - }, - "resource_location": { - "type": "string", - "default": "australiaeast", - "description": "Azure region to be used for deployment.", - "enum": [ - "australiaeast", - "brazilsouth", - "eastasia", - "eastus", - "eastus2", - "northeurope", - "southcentralus", - "southeastasia", - "westcentralus", - "westeurope", - "westus", - "westus2" - ], - "examples":[] - }, - "environment_tag": { - "type": "string", - "default": "dev", - "description": "Environment tag to be used in resource naming convention. Keep to 3 letters to prevent names becoming too long.", - "examples": [ - "dev" - ] - }, - "domain": { - "type": "string", - "default": "mydomain.com", - "description": "Azure Active Directory Domain to be used for authentication", - "examples": [ - "mydomain.com" - ] - }, - "subscription_id": { - "type": "string", - "default": "035ass64-f00d-48e2-b582-4fe1ss905ee3", - "description": "Id of the subscription into which the analytics landing zone will be deployed", - "examples": [ - "035a1ss4-f00d-48e2-b582-4fe1ss905ee3" - ] - }, - "prefix": { - "type": "string", - "default": "ads", - "description": "Prefix to be applied to all resource names. Keep to 3 letters to prevent names becoming too long.", - "examples": [ - "ads" - ] - }, - "resource_group_name": { - "type": "string", - "default": "gf1", - "description": "Name of the resource group into which the analytics landing zone will be deployed", - "examples": [ - "gf1" - ] - }, - "ip_address": { - "type": "string", - "examples": [], - "description": "" - }, - "ip_address2": { - "type": "string", - "examples": [], - "description": "" - }, - "tenant_id": { - "type": "string", - "examples": [], - "description": "" - }, - "deployment_principal_layers1and3": { - "description": "Object Id of the AAD account that will manage layer's 1 & 3. Note leave this blank if you are going to also include this principal in the resource owner's collection.", - "type": "string", - "default": "", - "examples": [] - }, - "resource_owners": { - "type": "array", - "items": { - "type": "string" - }, - "examples": [], - "description": "" - }, - "synapse_administrators": { - "type": "object", - "required": [], - "properties": {}, - "default": {}, - "description": "Object with a property for each user that is to be a Synapse Administrator. The property name is descriptive only and should be unique. The property value is the object_id of the user. Leave as empty object when not is use. NOTE - do not include the Synpase Workspace creator in this list as it will be added automatically.", - "examples": [ - { - "user_1": "5c736d19-4076-4a76-87f3-6fbfd77f007d", - "user_2": "5c732d19-4076-4a76-87f3-6fbfd72f007d" - }] - }, - "azure_sql_aad_administrators": { - "type": "object", - "required": [], - "default": {"sql_aad_admin": ""}, - "description": "Object with a property for each user that is to be a Azure SQL Administrator. This should be a user or AAD Group. The property name should be the username of the user in AAD and should be unique. The property value is the object_id of the user. You MUST provide the sql_aad_admin property which will determine the AAD Admin setting for the Azure SQL Server. ", - "properties": { - "sql_aad_admin": { - "type": "string" - }, - "deploy_agent": { - "type": "string" - } - }, - "examples": [ - { - "sql_aad_admin": "5c736d19-4076-4a76-87f3-6fbfd77f007d", - "deploy_agent": "5c732d19-4076-4a76-87f3-6fbfd72f007d" - } - ] - }, - "synapse_publishers": { - "type": "object", - "required": [], - "properties": {}, - "default": {}, - "description": "Object with a property for each user that is to be a Synapse Publisher. The property name is descriptive only and should be unique. The property value is the object_id of the user. Leave as empty object when not is use.", - "examples": [ - { - "user_1": "5c736d19-4076-4a76-87f3-6fbfd77f007d", - "user_2": "5c732d19-4076-4a76-87f3-6fbfd72f007d" - } - ] - - }, - "synapse_contributors": { - "type": "object", - "required": [], - "properties": {}, - "default": {}, - "description": "Object with a property for each user that is to be a Synapse Contributor. The property name is descriptive only and should be unique. The property value is the object_id of the user. Leave as empty object when not is use.", - "examples": [ - { - "user_1": "5c736d19-4076-4a76-87f3-6fbfd77f007d", - "user_2": "5c732d19-4076-4a76-87f3-6fbfd72f007d" - }] - }, - "WEB_APP_ADMIN_USER": { - "type": "string", - "default": "#####", - "description": "Object_Id of the user that you would like to have direct, explicit admin access to framework web front end.Set to '#####' if not in use. This setting over-rides the WEB_APP_ADMIN_SECURITY_GROUP. Use in scenarios where use of a security group is not possible.", - "examples": [ - "5c732d19-4076-4a76-87f3-6fbfd72f007d" - ] - }, - "WEB_APP_ADMIN_SECURITY_GROUP": { - "type": "string", - "default": "#####", - "description": "Name of the security group whos memebers will be given admin access to framework web front end.Set to '#####' if not in use.", - "examples": [ - "mysecuritygroup" - ] - }, - "ARM_SYNAPSE_WORKSPACE_NAME": { - "type": "string", - "default": "adsstgsynwadslwra", - "description": "Post Layer 1 Resource name. Deployment of Terraform Layer One will give you these values. They are required to allow subsequent agent deployments to open firewall rules to allow deployment updates.", - "examples": [ - "adsstgsynwadslwra" - ] - }, - "ARM_KEYVAULT_NAME": { - "type": "string", - "default": "ads-stg-kv-ads-lwra", - "description": "Post Layer 1 Resource name. Deployment of Terraform Layer One will give you these values. They are required to allow subsequent agent deployments to open firewall rules to allow deployment updates..", - "examples": [ - "ads-stg-kv-ads-lwra" - ] - }, - "ARM_DATALAKE_NAME": { - "type": "string", - "default": "adsstgdlsadslwraadsl", - "description": "Post Layer 1 Resource name. Deployment of Terraform Layer One will give you these values. They are required to allow subsequent agent deployments to open firewall rules to allow deployment updates..", - "examples": [ - "adsstgdlsadslwraadsl" - ] - }, - "ARM_PAL_PARTNER_ID": { - "type": "string", - "default": "0", - "description": "ID of Implementation Partner for PAL purposes. Set to 0 if not in use", - "examples": [ - "0" - ] - }, - "GIT_REPOSITORY_NAME": { - "type": "string", - "default": "#####", - "description": "Git Repository name used for for publishing ADF and Synapse artefacts. Only used if synapse_git_toggle_integration or adf_git_toggle_integration is true in the hcl file. Set to '#####' if not using Git integration for Synapse and ADF.", - "examples": [ - "mytestrepo" - ] - }, - "GIT_SYNAPSE_REPOSITORY_BRANCH_NAME": { - "type": "string", - "default": "#####", - "description": "Git Branch Name associated with the branch that will be used to publish Synapse artefacts. Only used if synapse_git_toggle_integration is true in the hcl file. Set to '#####' if not using Git integration for Synapse and ADF.", - "examples": [ - "main" - ] - }, - "GIT_USER_NAME": { - "type": "string", - "default": "#####", - "description": "Usernames of git user used for publishing artefacts. Only used if synapse_git_toggle_integration or adf_git_toggle_integration is true in the hcl file. Set to '#####' if not using Git integration for Synapse and ADF.", - "examples": [ - "test user" - ] - }, - "GIT_EMAIL_ADDRESS": { - "type": "string", - "default": "#####", - "description": "Email address of git user used for publishing artefacts. Only used if synapse_git_toggle_integration or adf_git_toggle_integration is true in the hcl file. Set to '#####' if not using Git integration for Synapse and ADF.", - "examples": [ - "test@test.com" - ] - }, - "GIT_ADF_REPOSITORY_NAME": { - "type": "string", - "default": "#####", - "description": "Git Repository name used for for publishing ADF and Synapse artefacts. Only used if synapse_git_toggle_integration or adf_git_toggle_integration is true in the hcl file.Set to '#####' if not using Git integration for Synapse and ADF.", - "examples": [ - "mytestrepo" - ] - }, - "GIT_ADF_REPOSITORY_BRANCH_NAME": { - "type": "string", - "default": "#####", - "description": "Git Branch Name associated with the branch that will be used to publish Synapse artefacts. Only used if synapse_git_toggle_integration is true in the hcl file. Set to '#####' if not using Git integration for Synapse and ADF.", - "examples": [ - "main" - ] - }, - "GIT_ADF_USER_NAME": { - "type": "string", - "default": "#####", - "description": "Usernames of git user used for publishing artefacts. Only used if synapse_git_toggle_integration or adf_git_toggle_integration is true in the hcl file. Set to '#####' if not using Git integration for Synapse and ADF.", - "examples": [ - "test user" - ] - }, - "GIT_ADF_EMAIL_ADDRESS": { - "type": "string", - "default": "#####", - "description": "Email address of git user used for publishing artefacts. Only used if synapse_git_toggle_integration or adf_git_toggle_integration is true in the hcl file. Set to '#####' if not using Git integration for Synapse and ADF.", - "examples": [ - "test@test.com" - ] - }, - "FeatureTemplateOverrides": { - "type": "object", - "description": "Each deployment references a feature template (eg. basic_deployment, full_deployment etc). This collection of objects allows you to override the default feature template values.", - "required": [], - "properties": - { - - }, - "examples": [ - {"is_onprem_datafactory_ir_registered":true} - ] - } - - - } - }; +local schema = import './common_vars_schema.json'; local properties = schema["properties"]; { - "test": [ // Object comprehension. + "SchemaTableForMarkDown": [ // Object comprehension. { - ["01.Property"]: sd, - ["02.Type"]: properties[sd]["type"], - ["03.Description"]: properties[sd]["description"], + ["1.PROPERTY"]: sd, + ["2.TYPE"]: properties[sd]["type"], + ["3.DESCRIPTION"]: properties[sd]["description"], #["Examples"]: properties[sd]["examples"], } diff --git a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet index 85887e13..90a70be5 100644 --- a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet +++ b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet @@ -689,6 +689,22 @@ local SecretFileSensitiveVars = { // Object comprehension. + + + + + + + + + + + + + + + + From 3397989198ba86a0ef2012888b0b1486c568566b Mon Sep 17 00:00:00 2001 From: John Rampono Date: Mon, 8 Aug 2022 13:34:00 +0800 Subject: [PATCH 107/151] Fixed TF_VAR override issue --- .../environments/vars/PreprocessEnvironment.ps1 | 15 +++++++++++---- .../vars/common_vars_template.jsonnet | 16 ++++++++++++++++ 2 files changed, 27 insertions(+), 4 deletions(-) diff --git a/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 b/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 index 8b8c3a3c..544f6cd2 100644 --- a/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 +++ b/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 @@ -95,17 +95,24 @@ foreach($fto in $fto_keys) if($Value.GetType().Name -eq "Boolean") { + Write-Warning $Value.GetType().Name $Value = $Value.ToString().ToLower() } if($Value.GetType().Name -eq "PSCustomObject") - { + { + Write-Warning $Value.GetType().Name $Value = ($Value | ConvertTo-Json -Depth 10) } if([string]::IsNullOrEmpty($Value) -eq $false -and $Value -ne '#####') - { - [Environment]::SetEnvironmentVariable($Name, $Value) - } + { + #Write-Warning "Injecting Envar 'TF_VAR_$Name': $Value" + [Environment]::SetEnvironmentVariable("TF_VAR_$Name", $Value) + } + else + { + #Write-Warning "Value Supressed" + } } diff --git a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet index 90a70be5..9aa5864f 100644 --- a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet +++ b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet @@ -705,6 +705,22 @@ local SecretFileSensitiveVars = { // Object comprehension. + + + + + + + + + + + + + + + + From 5d29503d6f20e6da7b27cb0c7f9fad2d074a0c88 Mon Sep 17 00:00:00 2001 From: h-sha <97069267+h-sha@users.noreply.github.com> Date: Tue, 9 Aug 2022 08:02:23 +1000 Subject: [PATCH 108/151] terraform purview layer 1 fix / docker file fix --- .devcontainer/Dockerfile | 5 +- .../vars/PreprocessEnvironment.ps1 | 6 +-- .../vars/common_vars_template.jsonnet | 20 ++++++++ .../vars/staging/common_vars_values.jsonc | 49 +++++++++++-------- .../DeploymentV2/terraform_layer2/layer1.tf | 4 +- .../DeploymentV2/terraform_layer2/outputs.tf | 4 +- 6 files changed, 57 insertions(+), 31 deletions(-) diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index 0e6814a2..72086ab5 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -36,10 +36,7 @@ RUN bash /tmp/library-scripts/common-debian.sh "${INSTALL_ZSH}" "${USERNAME}" "$ && wget https://github.com/gruntwork-io/terragrunt/releases/download/v0.35.14/terragrunt_linux_amd64 \ && mv terragrunt_linux_amd64 terragrunt \ && chmod u+x terragrunt \ - && mv terragrunt /usr/local/bin/terragrunt - && curl -sSLo install.sh https://install.hclq.sh - && sh install.sh - && rm install.sh + && mv terragrunt /usr/local/bin/terragrunt diff --git a/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 b/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 index 148a6846..7c6337d1 100644 --- a/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 +++ b/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 @@ -69,8 +69,8 @@ foreach($t in ($obj.ForEnvVar | Get-Member | Where-Object {$_.MemberType -eq "No $fto_prop = ($fto_keys | Where-Object {$_.Name -eq $Name.Replace("TF_VAR_","")}).Name Write-Warning "Overriding Feature Template value for $fto_prop" $Value = $fto_vals.$fto_prop + #Write-Warning "Overriding Feature Template value to $Value" } - if($Value.GetType().Name -eq "Boolean") { $Value = $Value.ToString().ToLower() @@ -79,10 +79,10 @@ foreach($t in ($obj.ForEnvVar | Get-Member | Where-Object {$_.MemberType -eq "No { $Value = ($Value | ConvertTo-Json -Depth 10) } - + #Write-Warning "Checking Value change $Value" if([string]::IsNullOrEmpty($Value) -eq $false -and $Value -ne '#####') { - [Environment]::SetEnvironmentVariable($Name, $Value) + [Environment]::SetEnvironmentVariable($Name, $Value) } } diff --git a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet index 49216c47..4501d5da 100644 --- a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet +++ b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet @@ -684,6 +684,26 @@ local SecretFileSensitiveVars = { // Object comprehension. + + + + + + + + + + + + + + + + + + + + diff --git a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc index 73200918..9535fbb1 100644 --- a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc +++ b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc @@ -1,45 +1,54 @@ { "$schema": "./../common_vars_schema.json", - "owner_tag": "Contoso", + "owner_tag": "Arkahna", "resource_location": "australiaeast", "environment_tag": "stg", - "domain": "microsoft.com", - "subscription_id": "035a1364-f00d-48e2-b582-4fe125905ee3", - "prefix": "ads", - "resource_group_name": "gfuat", - "ip_address": "144.138.148.220", - "ip_address2": "163.47.54.2", - "tenant_id": "72f988bf-86f1-41af-91ab-2d7cd011db47", - "deployment_principal_layers1and3": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", + "domain": "arkahna.io", + "subscription_id": "687fe1ae-a520-4f86-b921-a80664c40f9b", + "prefix": "ark", + "resource_group_name": "lockboxfeaturetest", + "ip_address": "159.196.32.59", + "ip_address2": "", + "tenant_id": "0fee3d31-b963-4a1c-8f4a-ca367205aa65", + "deployment_principal_layers1and3": "", "resource_owners": [ - "4c732d19-4076-4a76-87f3-6fbfd77f007d", - "ccbdbba4-669c-48d6-86b8-75c9ab2ee578" + "ed0568a1-1ee0-46ee-b7e8-0540c4660de2", + "72213528-b7fa-4d51-adc5-78bd2d684509" ], "synapse_administrators": { - "jorampon@microsoft.com": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578" + "hugo.sharpe@arkahna.io": "ed0568a1-1ee0-46ee-b7e8-0540c4660de2", + "dlzdevtesting": "72213528-b7fa-4d51-adc5-78bd2d684509" }, "azure_sql_aad_administrators": { - "sql_aad_admin": "6f467924-4d92-40e3-b348-b3154d5cd437", - "AdsGoFastDeployerjkcgkaibkungm": "4c732d19-4076-4a76-87f3-6fbfd77f007d", - "jorampon@microsoft.com": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578" + "sql_aad_admin": "ed0568a1-1ee0-46ee-b7e8-0540c4660de2", + "dlzdevtesting": "72213528-b7fa-4d51-adc5-78bd2d684509" }, "synapse_publishers": {}, "synapse_contributors": {}, - "WEB_APP_ADMIN_USER": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", + "WEB_APP_ADMIN_USER": "ed0568a1-1ee0-46ee-b7e8-0540c4660de2", "WEB_APP_ADMIN_SECURITY_GROUP": "#####", "ARM_SYNAPSE_WORKSPACE_NAME": "adsstgsynwadspmu3", "ARM_KEYVAULT_NAME": "ads-stg-kv-ads-pmu3", "ARM_DATALAKE_NAME": "adsstgdlsadspmu3adsl", "ARM_PAL_PARTNER_ID": "0", - "GIT_REPOSITORY_NAME": "#####", - "GIT_SYNAPSE_REPOSITORY_BRANCH_NAME": "#####", - "GIT_USER_NAME": "#####", + "GIT_REPOSITORY_NAME": "testLockbox", + "GIT_SYNAPSE_REPOSITORY_BRANCH_NAME": "featuretest", + "GIT_USER_NAME": "h-sha", "GIT_EMAIL_ADDRESS": "#####", "GIT_ADF_REPOSITORY_NAME": "#####", "GIT_ADF_REPOSITORY_BRANCH_NAME": "#####", "GIT_ADF_USER_NAME": "#####", "GIT_ADF_EMAIL_ADDRESS": "#####", "FeatureTemplateOverrides": { - "is_onprem_datafactory_ir_registered":false + "is_onprem_datafactory_ir_registered":false, + "synapse_git_toggle":true, + "adf_git_toggle":false, + "deploy_sentinel": false, + "publish_sif_database": false, + "synapse_git_toggle_integration": true, + "synapse_git_repository_owner": "h-sha", + "synapse_git_repository_name":"testLockbox", + "synapse_git_repository_root_folder": "/Synapse", + "synapse_git_use_pat": false } } diff --git a/solution/DeploymentV2/terraform_layer2/layer1.tf b/solution/DeploymentV2/terraform_layer2/layer1.tf index bb563b93..13be1709 100644 --- a/solution/DeploymentV2/terraform_layer2/layer1.tf +++ b/solution/DeploymentV2/terraform_layer2/layer1.tf @@ -7,7 +7,7 @@ config = { container_name = "tstate" key = "terraform_layer1.tfstate" - resource_group_name = "gfuat" - storage_account_name = "gfuatstate" + resource_group_name = "lockboxfeaturetest" + storage_account_name = "lockboxfeatureteststate" } } diff --git a/solution/DeploymentV2/terraform_layer2/outputs.tf b/solution/DeploymentV2/terraform_layer2/outputs.tf index 20c1113c..6aedd257 100644 --- a/solution/DeploymentV2/terraform_layer2/outputs.tf +++ b/solution/DeploymentV2/terraform_layer2/outputs.tf @@ -70,7 +70,7 @@ output "purview_sp_name" { value = local.purview_ir_app_reg_name } output "azurerm_purview_account_purview_id" { - value = azurerm_purview_account.purview[0].id + value = var.deploy_purview ? azurerm_purview_account.purview[0].id : "" } output "is_vnet_isolated" { value = var.is_vnet_isolated @@ -312,7 +312,7 @@ output "private_dns_zone_purview_studio_id" { } output "azurerm_purview_account_purview_name" { - value = azurerm_purview_account.purview[0].name + value = var.deploy_purview ? azurerm_purview_account.purview[0].name : "" } output "plink_subnet_id" { From 740fb637e88bcb791cad4e0292427d47b3533cd7 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Tue, 9 Aug 2022 06:17:24 +0800 Subject: [PATCH 109/151] Fixed UAT Hardcoding --- .../DeploymentV2/environments/vars/PreprocessEnvironment.ps1 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 b/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 index 544f6cd2..f694d27b 100644 --- a/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 +++ b/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 @@ -76,7 +76,7 @@ foreach($t in $envarprops) } #Feature Template Value Overrides -$fto_vals = ((Get-Content -Path "./uat/common_vars_values.jsonc") | ConvertFrom-Json -Depth 10).FeatureTemplateOverrides +$fto_vals = ((Get-Content -Path "./$Environment/common_vars_values.jsonc") | ConvertFrom-Json -Depth 10).FeatureTemplateOverrides $fto_keys = $fto_vals | Get-Member | Where-Object {$_.MemberType -eq "NoteProperty"} foreach($fto in $fto_keys) From 194f4ef8241d4538255c4fae3f1eecec33f4f3ac Mon Sep 17 00:00:00 2001 From: John Rampono Date: Tue, 9 Aug 2022 06:23:13 +0800 Subject: [PATCH 110/151] Removed additional write-host --- .../DeploymentV2/environments/vars/PreprocessEnvironment.ps1 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 b/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 index f694d27b..f744940d 100644 --- a/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 +++ b/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 @@ -100,7 +100,7 @@ foreach($fto in $fto_keys) } if($Value.GetType().Name -eq "PSCustomObject") { - Write-Warning $Value.GetType().Name + #Write-Warning $Value.GetType().Name $Value = ($Value | ConvertTo-Json -Depth 10) } From 9aabb141a90638aa307e8149c6546033efbc8fd1 Mon Sep 17 00:00:00 2001 From: h-sha <97069267+h-sha@users.noreply.github.com> Date: Tue, 9 Aug 2022 09:16:58 +1000 Subject: [PATCH 111/151] added conditional to IP set for resources --- .../vars/PreprocessEnvironment.ps1 | 6 +-- .../vars/common_vars_template.jsonnet | 16 ++++++++ .../vars/staging/common_vars_values.jsonc | 14 +++---- .../pwshmodules/Deploy_0_Prep.psm1 | 40 +++++++++++-------- 4 files changed, 47 insertions(+), 29 deletions(-) diff --git a/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 b/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 index 69f6a5c1..1b3d213d 100644 --- a/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 +++ b/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 @@ -76,7 +76,7 @@ foreach($t in $envarprops) } #Feature Template Value Overrides -$fto_vals = ((Get-Content -Path "./uat/common_vars_values.jsonc") | ConvertFrom-Json -Depth 10).FeatureTemplateOverrides +$fto_vals = ((Get-Content -Path "./$Environment/common_vars_values.jsonc") | ConvertFrom-Json -Depth 10).FeatureTemplateOverrides $fto_keys = $fto_vals | Get-Member | Where-Object {$_.MemberType -eq "NoteProperty"} foreach($fto in $fto_keys) @@ -95,12 +95,12 @@ foreach($fto in $fto_keys) if($Value.GetType().Name -eq "Boolean") { - Write-Warning $Value.GetType().Name + #Write-Warning $Value.GetType().Name $Value = $Value.ToString().ToLower() } if($Value.GetType().Name -eq "PSCustomObject") { - Write-Warning $Value.GetType().Name + #Write-Warning $Value.GetType().Name $Value = ($Value | ConvertTo-Json -Depth 10) } diff --git a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet index 9aa5864f..bdd53347 100644 --- a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet +++ b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet @@ -721,6 +721,22 @@ local SecretFileSensitiveVars = { // Object comprehension. + + + + + + + + + + + + + + + + diff --git a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc index 9535fbb1..f2643484 100644 --- a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc +++ b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc @@ -12,24 +12,20 @@ "tenant_id": "0fee3d31-b963-4a1c-8f4a-ca367205aa65", "deployment_principal_layers1and3": "", "resource_owners": [ - "ed0568a1-1ee0-46ee-b7e8-0540c4660de2", - "72213528-b7fa-4d51-adc5-78bd2d684509" + "ed0568a1-1ee0-46ee-b7e8-0540c4660de2" ], "synapse_administrators": { - "hugo.sharpe@arkahna.io": "ed0568a1-1ee0-46ee-b7e8-0540c4660de2", - "dlzdevtesting": "72213528-b7fa-4d51-adc5-78bd2d684509" }, "azure_sql_aad_administrators": { - "sql_aad_admin": "ed0568a1-1ee0-46ee-b7e8-0540c4660de2", - "dlzdevtesting": "72213528-b7fa-4d51-adc5-78bd2d684509" + "sql_aad_admin": "ed0568a1-1ee0-46ee-b7e8-0540c4660de2" }, "synapse_publishers": {}, "synapse_contributors": {}, "WEB_APP_ADMIN_USER": "ed0568a1-1ee0-46ee-b7e8-0540c4660de2", "WEB_APP_ADMIN_SECURITY_GROUP": "#####", - "ARM_SYNAPSE_WORKSPACE_NAME": "adsstgsynwadspmu3", - "ARM_KEYVAULT_NAME": "ads-stg-kv-ads-pmu3", - "ARM_DATALAKE_NAME": "adsstgdlsadspmu3adsl", + "ARM_SYNAPSE_WORKSPACE_NAME": "adsstgsynwadsp9cl", + "ARM_KEYVAULT_NAME": "ads-stg-kv-ads-p9cl", + "ARM_DATALAKE_NAME": "adsstgdlsadsp9cladsl", "ARM_PAL_PARTNER_ID": "0", "GIT_REPOSITORY_NAME": "testLockbox", "GIT_SYNAPSE_REPOSITORY_BRANCH_NAME": "featuretest", diff --git a/solution/DeploymentV2/pwshmodules/Deploy_0_Prep.psm1 b/solution/DeploymentV2/pwshmodules/Deploy_0_Prep.psm1 index 01802078..ee5c9a91 100644 --- a/solution/DeploymentV2/pwshmodules/Deploy_0_Prep.psm1 +++ b/solution/DeploymentV2/pwshmodules/Deploy_0_Prep.psm1 @@ -87,27 +87,33 @@ function PrepareDeployment ( Set-Location $deploymentFolderPath [System.Environment]::SetEnvironmentVariable('TFenvironmentName', $environmentName) - - try { - $hiddenoutput = az storage account network-rule add --resource-group $env:TF_VAR_resource_group_name --account-name $env:TF_VAR_state_storage_account_name --ip-address $env:TF_VAR_ip_address - $hiddenoutput = az keyvault network-rule add -g $env:TF_VAR_resource_group_name --name $env:keyVaultName --ip-address $env:TF_VAR_ip_address/32 - $hiddenoutput = az synapse workspace firewall-rule create --name CICDAgent --resource-group $env:TF_VAR_resource_group_name --start-ip-address $env:TF_VAR_ip_address --end-ip-address $env:TF_VAR_ip_address --workspace-name $env:ARM_SYNAPSE_WORKSPACE_NAME - $hiddenoutput = az storage account network-rule add --resource-group $env:TF_VAR_resource_group_name --account-name $env:datalakeName --ip-address $env:TF_VAR_ip_address - } - catch { - Write-Warning 'Opening Firewalls for IP Address One Failed' + if ($env:TF_VAR_ip_address -ne "") + { + try { + $hiddenoutput = az storage account network-rule add --resource-group $env:TF_VAR_resource_group_name --account-name $env:TF_VAR_state_storage_account_name --ip-address $env:TF_VAR_ip_address + $hiddenoutput = az keyvault network-rule add -g $env:TF_VAR_resource_group_name --name $env:keyVaultName --ip-address $env:TF_VAR_ip_address/32 + $hiddenoutput = az synapse workspace firewall-rule create --name CICDAgent --resource-group $env:TF_VAR_resource_group_name --start-ip-address $env:TF_VAR_ip_address --end-ip-address $env:TF_VAR_ip_address --workspace-name $env:ARM_SYNAPSE_WORKSPACE_NAME + $hiddenoutput = az storage account network-rule add --resource-group $env:TF_VAR_resource_group_name --account-name $env:datalakeName --ip-address $env:TF_VAR_ip_address + } + catch { + Write-Warning 'Opening Firewalls for IP Address One Failed' + } } - try { - $hiddenoutput = az storage account network-rule add --resource-group $env:TF_VAR_resource_group_name --account-name $env:TF_VAR_state_storage_account_name --ip-address $env:TF_VAR_ip_address2 - $hiddenoutput = az keyvault network-rule add -g $env:TF_VAR_resource_group_name --name $env:keyVaultName --ip-address $env:TF_VAR_ip_address2/32 - $hiddenoutput = az synapse workspace firewall-rule create --name CICDUser --resource-group $env:TF_VAR_resource_group_name --start-ip-address $env:TF_VAR_ip_address2 --end-ip-address $env:TF_VAR_ip_address2 --workspace-name $env:ARM_SYNAPSE_WORKSPACE_NAME - $hiddenoutput = az storage account network-rule add --resource-group $env:TF_VAR_resource_group_name --account-name $env:datalakeName --ip-address $env:TF_VAR_ip_address2 - } - catch { - Write-Warning 'Opening Firewalls for IP Address Two Failed' + if ($env:TF_VAR_ip_address2 -ne "") + { + try { + $hiddenoutput = az storage account network-rule add --resource-group $env:TF_VAR_resource_group_name --account-name $env:TF_VAR_state_storage_account_name --ip-address $env:TF_VAR_ip_address2 + $hiddenoutput = az keyvault network-rule add -g $env:TF_VAR_resource_group_name --name $env:keyVaultName --ip-address $env:TF_VAR_ip_address2/32 + $hiddenoutput = az synapse workspace firewall-rule create --name CICDUser --resource-group $env:TF_VAR_resource_group_name --start-ip-address $env:TF_VAR_ip_address2 --end-ip-address $env:TF_VAR_ip_address2 --workspace-name $env:ARM_SYNAPSE_WORKSPACE_NAME + $hiddenoutput = az storage account network-rule add --resource-group $env:TF_VAR_resource_group_name --account-name $env:datalakeName --ip-address $env:TF_VAR_ip_address2 + } + catch { + Write-Warning 'Opening Firewalls for IP Address Two Failed' + } } + if ([string]::IsNullOrEmpty($PathToReturnTo) -ne $true) { Write-Debug "Returning to $PathToReturnTo" Set-Location $PathToReturnTo From 68d8feb64183b7ac6f26a4dee728065ab194d8bb Mon Sep 17 00:00:00 2001 From: John Rampono Date: Tue, 9 Aug 2022 08:05:09 +0800 Subject: [PATCH 112/151] Read Me Updates --- solution/DeploymentV2/environments/readme.md | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/solution/DeploymentV2/environments/readme.md b/solution/DeploymentV2/environments/readme.md index 739cef77..a3383f10 100644 --- a/solution/DeploymentV2/environments/readme.md +++ b/solution/DeploymentV2/environments/readme.md @@ -1,7 +1,8 @@ -This folder contains the configuration settings and feature flags that determine what a specific ADS Go Fast deployment will be comprised of. +This folder contains the configuration settings and feature flags that determine what a specific ADS Go Fast deployment will be comprised of and into which Azure subscription it will be deployed. It contains two sub-folders which are discussed below: # Feature Templates Directory +This folder contains a file for each of our "Fast Start Templates". Fast Start Templates provide a currated and pre-defined list of "feature toggles" that can be applied to any deployment. The ADS Go Fast deployment has a number of feature toggles that allow various services and features to be turned on or off depending on the particular needs of a deployment. Rather than select these individually the "fast start templates" provide a quick and efficient way of deploying a pre-defined and often used combination of these feature toggles. The standard fast start templates are listed below. You can add your own by simply copying one of the existing files in this directory and renaming it. - full_deployment - basic_deployment @@ -10,6 +11,6 @@ This folder contains the configuration settings and feature flags that determine # Vars Directory -Contains a folder for each deployment environment. These folders control the number of "Analytics Landing Zones" that can be deployed by the repository. You can use these folders to allow for multiple environments that will support your SDLC (eg. Development, UAT, Production) and you can also use them to allow for multiple hub / spoke production deployments within a decentralised analytics architecture (Eg, EDW, Finance-Analytics, Operations-Analytics etc.). +This director contains a sub-directory for each deployment environment. These folders control the number of "Analytics Landing Zones" that can be deployed by the repository. You can use these folders to allow for multiple environments that will support your SDLC (eg. Development, UAT, Production) and you can also use them to allow for multiple hub / spoke production deployments within a decentralised analytics architecture (Eg, EDW, Finance-Analytics, Operations-Analytics etc.). -Within each of these folders is a file called "common_vars_values.jsonc". This file contains all of the configuration settings specific to a single Analytics Lockbox deployment. +Within each of these folders is a file called "common_vars_values.jsonc". This file contains all of the configuration settings specific to a single Analytics Lockbox deployment. For a full explaination of each of various settings refer to the json schema file titled "common_vars_schema.json". Visual Studio Code should provide intellisense and contextual help text based on this schema file. From ad9ade9165ad3094e9f122f8c3526907b59c8421 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Tue, 9 Aug 2022 10:42:24 +0800 Subject: [PATCH 113/151] added in --only-show-errors to prepare --- solution/DeploymentV2/Prepare.ps1 | 51 ++++++++++++++++--------------- 1 file changed, 27 insertions(+), 24 deletions(-) diff --git a/solution/DeploymentV2/Prepare.ps1 b/solution/DeploymentV2/Prepare.ps1 index cd4207c9..549b227b 100644 --- a/solution/DeploymentV2/Prepare.ps1 +++ b/solution/DeploymentV2/Prepare.ps1 @@ -69,11 +69,11 @@ if ($gitDeploy) 'Microsoft.Sql') ForEach ($provider in $providers) { - az provider register --namespace $provider + az provider register --namespace $provider --only-show-errors } - az storage account create --resource-group $resourceGroupName --name $stateStorageName --sku Standard_LRS --allow-blob-public-access false --https-only true --min-tls-version TLS1_2 --public-network-access Disabled - az storage container create --name tstate --account-name $stateStorageName --auth-mode login + az storage account create --resource-group $resourceGroupName --name $stateStorageName --sku Standard_LRS --allow-blob-public-access false --https-only true --min-tls-version TLS1_2 --public-network-access Disabled --only-show-errors + az storage container create --name tstate --account-name $stateStorageName --auth-mode login --only-show-errors } else { @@ -111,7 +111,7 @@ else ForEach ($provider in $providers) { $progress += 5; - az provider register --namespace $provider + az provider register --namespace $provider --only-show-errors Write-Progress -Activity "Registering Azure Resource Providers" -Status "${progress}% Complete:" -PercentComplete $progress } } @@ -124,7 +124,7 @@ else $env:TF_VAR_subscription_id = $currentAccount.id $env:TF_VAR_ip_address = (Invoke-WebRequest ifconfig.me/ip).Content - $env:TF_VAR_domain = az account show --query 'user.name' | cut -d '@' -f 2 | sed 's/\"//' + $env:TF_VAR_domain = az account show --query 'user.name' --only-show-errors | cut -d '@' -f 2 | sed 's/\"//' #------------------------------------------------------------------------------------------------------------ # Create the resource group and terraform state store @@ -136,21 +136,21 @@ else if([string]::IsNullOrEmpty($env:TF_VAR_resource_group_name) -eq $false) { $progress = 0 Write-Progress -Activity "Creating Resource Group" -Status "${progress}% Complete:" -PercentComplete $progress - $rg = az group create -n $env:TF_VAR_resource_group_name -l australiaeast + $rg = az group create -n $env:TF_VAR_resource_group_name -l australiaeast --only-show-errors if([string]::IsNullOrEmpty($env:TF_VAR_storage_account_name) -eq $false) { $progress+=5 Write-Progress -Activity "Creating Storage Account" -Status "${progress}% Complete:" -PercentComplete $progress - $storageId = az storage account create --resource-group $env:TF_VAR_resource_group_name --name $env:TF_VAR_storage_account_name --sku Standard_LRS --allow-blob-public-access false --https-only true --min-tls-version TLS1_2 --query id -o tsv + $storageId = az storage account create --resource-group $env:TF_VAR_resource_group_name --name $env:TF_VAR_storage_account_name --sku Standard_LRS --allow-blob-public-access false --https-only true --min-tls-version TLS1_2 --query id -o tsv --only-show-errors $progress+=5 - $userObjectId = az ad signed-in-user show --query id -o tsv + $userObjectId = az ad signed-in-user show --query id -o tsv --only-show-errors Write-Progress -Activity "Assigning Blob Contributor" -Status "${progress}% Complete:" -PercentComplete $progress - $assignment = az role assignment create --role "Storage Blob Data Contributor" --assignee-object-id $userObjectId --assignee-principal-type User + $assignment = az role assignment create --role "Storage Blob Data Contributor" --assignee-object-id $userObjectId --assignee-principal-type User --only-show-errors $progress+=5 Write-Progress -Activity "Creating State Container" -Status "${progress}% Complete:" -PercentComplete $progress - $container = az storage container create --name $CONTAINER_NAME --account-name $env:TF_VAR_storage_account_name --auth-mode login + $container = az storage container create --name $CONTAINER_NAME --account-name $env:TF_VAR_storage_account_name --auth-mode login --only-show-errors Write-Progress -Activity "Finished" -Completed } @@ -158,9 +158,17 @@ else } - $assigneeobject = Read-Host "Enter the object id of the AAD account that you would like to have ownership of the new resource group" + $assigneeobject = Read-Host "Enter the object id of the AAD account or Group that you would like to have ownership of the new resource group." + $sqlAdmin = Read-Host "Enter the object id of the AAD account or Group that you would like to have SQL AAD Admin on the Azure SQL Server instances created. Leave blank if this is an end-to-end interactive user deployment. Provide a security group or the deployment service principal if this is an agent deployment" + if([string]::IsNullOrEmpty($assigneeobject)) { + #Write-Host "Skipping Resource Group Ownership Assignment" + $assigneeobject = $currentAccount.id + } + + az role assignment create --role "Owner" --scope "/subscriptions/${env:TF_VAR_subscription_id}/resourcegroups/${env:TF_VAR_resource_group_name}" --assignee-object-id $assigneeobject + #------------------------------------------------------------------------------------------------------------ # Print pretty output for user #------------------------------------------------------------------------------------------------------------ @@ -232,19 +240,14 @@ else $common_vars_values.deployment_principal_layers1and3 = (az ad signed-in-user show | ConvertFrom-Json).id $foundUser = $false - foreach($u in $common_vars_values.synapse_administrators) - { - if ($u.(($u | Get-Member)[-1].Name) -eq ($common_vars_values.WEB_APP_ADMIN_USER)) - { - $foundUser = $true - break - } - } - if($foundUser -eq $true) - { - $userPrincipalName = (az ad signed-in-user show | ConvertFrom-Json).userPrincipalName - $common_vars_values.synapse_administrators.$userPrincipalName = (az ad signed-in-user show | ConvertFrom-Json).id - } + $common_vars_values.resource_owners = @("$assigneeobject") + + $common_vars_values.synapse_administrators = @{} + + $userPrincipalName = "sql_aad_admin" + $common_vars_values.synapse_administrators.$userPrincipalName = "" + $userPrincipalName = (az ad signed-in-user show --only-show-errors | ConvertFrom-Json).userPrincipalName + $common_vars_values.synapse_administrators.$userPrincipalName = (az ad signed-in-user show | ConvertFrom-Json).id $common_vars_values | Convertto-Json -Depth 10 | Set-Content ./environments/vars/$environmentName/common_vars_values.jsonc From 50080b36ce07b66c1a744bd308b220360cb2067f Mon Sep 17 00:00:00 2001 From: h-sha <97069267+h-sha@users.noreply.github.com> Date: Tue, 9 Aug 2022 12:44:38 +1000 Subject: [PATCH 114/151] purview private link naming fixed layer 3 --- .../vars/common_vars_template.jsonnet | 36 +++++ .../vars/staging/common_vars_values.jsonc | 6 +- .../DeploymentV2/terraform_layer3/layer2.tf | 4 +- .../DeploymentV2/terraform_layer3/locals.tf | 4 +- .../arm/privatelinks.json | 0 .../main.tf | 100 +++++++------- .../outputs.tf | 0 .../vars.tf | 124 +++++++++--------- .../DeploymentV2/terraform_layer3/purview.tf | 4 +- 9 files changed, 157 insertions(+), 121 deletions(-) rename solution/DeploymentV2/terraform_layer3/{ => modules}/purview_ingestion_private_endpoints/arm/privatelinks.json (100%) rename solution/DeploymentV2/terraform_layer3/{ => modules}/purview_ingestion_private_endpoints/main.tf (96%) rename solution/DeploymentV2/terraform_layer3/{ => modules}/purview_ingestion_private_endpoints/outputs.tf (100%) rename solution/DeploymentV2/terraform_layer3/{ => modules}/purview_ingestion_private_endpoints/vars.tf (96%) diff --git a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet index bdd53347..096a2291 100644 --- a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet +++ b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet @@ -734,6 +734,42 @@ local SecretFileSensitiveVars = { // Object comprehension. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc index f2643484..e082466d 100644 --- a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc +++ b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc @@ -23,9 +23,9 @@ "synapse_contributors": {}, "WEB_APP_ADMIN_USER": "ed0568a1-1ee0-46ee-b7e8-0540c4660de2", "WEB_APP_ADMIN_SECURITY_GROUP": "#####", - "ARM_SYNAPSE_WORKSPACE_NAME": "adsstgsynwadsp9cl", - "ARM_KEYVAULT_NAME": "ads-stg-kv-ads-p9cl", - "ARM_DATALAKE_NAME": "adsstgdlsadsp9cladsl", + "ARM_SYNAPSE_WORKSPACE_NAME": "arkstgsynwadsp9cl", + "ARM_KEYVAULT_NAME": "ark-stg-kv-ads-p9cl", + "ARM_DATALAKE_NAME": "arkstgdlsadsp9cladsl", "ARM_PAL_PARTNER_ID": "0", "GIT_REPOSITORY_NAME": "testLockbox", "GIT_SYNAPSE_REPOSITORY_BRANCH_NAME": "featuretest", diff --git a/solution/DeploymentV2/terraform_layer3/layer2.tf b/solution/DeploymentV2/terraform_layer3/layer2.tf index 5aacfbae..9d5f5e8c 100644 --- a/solution/DeploymentV2/terraform_layer3/layer2.tf +++ b/solution/DeploymentV2/terraform_layer3/layer2.tf @@ -7,7 +7,7 @@ config = { container_name = "tstate" key = "terraform_layer2.tfstate" - resource_group_name = "gfuat" - storage_account_name = "gfuatstate" + resource_group_name = "lockboxfeaturetest" + storage_account_name = "lockboxfeatureteststate" } } diff --git a/solution/DeploymentV2/terraform_layer3/locals.tf b/solution/DeploymentV2/terraform_layer3/locals.tf index 80353201..30777200 100644 --- a/solution/DeploymentV2/terraform_layer3/locals.tf +++ b/solution/DeploymentV2/terraform_layer3/locals.tf @@ -6,8 +6,8 @@ locals { aad_webapp_name = (var.aad_webapp_name != "" ? var.aad_webapp_name : "ADS GoFast Web Portal (${var.environment_tag})") aad_functionapp_name = (var.aad_functionapp_name != "" ? var.aad_functionapp_name : "ADS GoFast Orchestration App (${var.environment_tag})") purview_name = data.terraform_remote_state.layer2.outputs.purview_name - purview_account_plink = (data.terraform_remote_state.layer2.outputs.purview_name != "" ? data.terraform_remote_state.layer2.outputs.purview_name : "${var.prefix}-${var.environment_tag}-pura-${lower(var.app_name)}-plink-${element(split("-", module.naming.data_factory.name_unique),length(split("-", module.naming.data_factory.name_unique))-1)}") - purview_portal_plink = (data.terraform_remote_state.layer2.outputs.purview_name != "" ? data.terraform_remote_state.layer2.outputs.purview_name : "${var.prefix}-${var.environment_tag}-purp-${lower(var.app_name)}-plink-${element(split("-", module.naming.data_factory.name_unique),length(split("-", module.naming.data_factory.name_unique))-1)}") + purview_account_plink = "${var.prefix}-${var.environment_tag}-pura-${lower(var.app_name)}-plink-${element(split("-", module.naming.data_factory.name_unique),length(split("-", module.naming.data_factory.name_unique))-1)}" + purview_portal_plink = "${var.prefix}-${var.environment_tag}-purp-${lower(var.app_name)}-plink-${element(split("-", module.naming.data_factory.name_unique),length(split("-", module.naming.data_factory.name_unique))-1)}" purview_resource_group_name = "managed-${module.naming.resource_group.name_unique}-purview" purview_ir_app_reg_name = data.terraform_remote_state.layer2.outputs.purview_sp_name private_dns_zone_servicebus_id = data.terraform_remote_state.layer2.outputs.private_dns_zone_servicebus_id diff --git a/solution/DeploymentV2/terraform_layer3/purview_ingestion_private_endpoints/arm/privatelinks.json b/solution/DeploymentV2/terraform_layer3/modules/purview_ingestion_private_endpoints/arm/privatelinks.json similarity index 100% rename from solution/DeploymentV2/terraform_layer3/purview_ingestion_private_endpoints/arm/privatelinks.json rename to solution/DeploymentV2/terraform_layer3/modules/purview_ingestion_private_endpoints/arm/privatelinks.json diff --git a/solution/DeploymentV2/terraform_layer3/purview_ingestion_private_endpoints/main.tf b/solution/DeploymentV2/terraform_layer3/modules/purview_ingestion_private_endpoints/main.tf similarity index 96% rename from solution/DeploymentV2/terraform_layer3/purview_ingestion_private_endpoints/main.tf rename to solution/DeploymentV2/terraform_layer3/modules/purview_ingestion_private_endpoints/main.tf index 55d1752d..9482f3c9 100644 --- a/solution/DeploymentV2/terraform_layer3/purview_ingestion_private_endpoints/main.tf +++ b/solution/DeploymentV2/terraform_layer3/modules/purview_ingestion_private_endpoints/main.tf @@ -1,50 +1,50 @@ -resource "azurerm_resource_group_template_deployment" "ingestion_private_endpoints" { - name = "purview_ingestion_private_endpoints_${var.name_suffix}" - resource_group_name = var.resource_group_name - deployment_mode = "Incremental" - parameters_content = jsonencode({ - "purviewAccountName" = { - value = var.purview_account_name - }, - "subscriptionId" = { - value = var.subscription_id - }, - "location" = { - value = var.resource_location - }, - "queuePrivateLinkName" = { - value = var.queue_privatelink_name - }, - "storagePrivateLinkName" = { - value = var.storage_privatelink_name - }, - "eventHubPrivateLinkName" = { - value = var.eventhub_privatelink_name - }, - "subnetId" = { - value = var.subnet_id - }, - "managedResourceGroupName" = { - value = var.managed_resource_group_name - }, - "resourceGroupName" = { - value = var.resource_group_name - }, - "queueDnsId" = { - value = var.queue_private_dns_id - }, - "storageDnsId" = { - value = var.blob_private_dns_id - }, - "serviceBusDnsId" = { - value = var.servicebus_private_dns_id - } - }) - lifecycle { - ignore_changes = [ - tags, - template_content - ] - } - template_content = file("${path.module}/arm/privatelinks.json") -} +resource "azurerm_resource_group_template_deployment" "ingestion_private_endpoints" { + name = "purview_ingestion_private_endpoints_${var.name_suffix}" + resource_group_name = var.resource_group_name + deployment_mode = "Incremental" + parameters_content = jsonencode({ + "purviewAccountName" = { + value = var.purview_account_name + }, + "subscriptionId" = { + value = var.subscription_id + }, + "location" = { + value = var.resource_location + }, + "queuePrivateLinkName" = { + value = var.queue_privatelink_name + }, + "storagePrivateLinkName" = { + value = var.storage_privatelink_name + }, + "eventHubPrivateLinkName" = { + value = var.eventhub_privatelink_name + }, + "subnetId" = { + value = var.subnet_id + }, + "managedResourceGroupName" = { + value = var.managed_resource_group_name + }, + "resourceGroupName" = { + value = var.resource_group_name + }, + "queueDnsId" = { + value = var.queue_private_dns_id + }, + "storageDnsId" = { + value = var.blob_private_dns_id + }, + "serviceBusDnsId" = { + value = var.servicebus_private_dns_id + } + }) + lifecycle { + ignore_changes = [ + tags, + template_content + ] + } + template_content = file("${path.module}/arm/privatelinks.json") +} diff --git a/solution/DeploymentV2/terraform_layer3/purview_ingestion_private_endpoints/outputs.tf b/solution/DeploymentV2/terraform_layer3/modules/purview_ingestion_private_endpoints/outputs.tf similarity index 100% rename from solution/DeploymentV2/terraform_layer3/purview_ingestion_private_endpoints/outputs.tf rename to solution/DeploymentV2/terraform_layer3/modules/purview_ingestion_private_endpoints/outputs.tf diff --git a/solution/DeploymentV2/terraform_layer3/purview_ingestion_private_endpoints/vars.tf b/solution/DeploymentV2/terraform_layer3/modules/purview_ingestion_private_endpoints/vars.tf similarity index 96% rename from solution/DeploymentV2/terraform_layer3/purview_ingestion_private_endpoints/vars.tf rename to solution/DeploymentV2/terraform_layer3/modules/purview_ingestion_private_endpoints/vars.tf index 59c213f7..d9201716 100644 --- a/solution/DeploymentV2/terraform_layer3/purview_ingestion_private_endpoints/vars.tf +++ b/solution/DeploymentV2/terraform_layer3/modules/purview_ingestion_private_endpoints/vars.tf @@ -1,62 +1,62 @@ -variable "resource_group_name" { - description = "The name of the resource group to deploy into" - type = string -} - -variable "purview_account_name" { - description = "The name of the data factory" - type = string -} -variable "resource_location" { - description = "The uri of the shared keyvault" - type = string -} - -variable "subscription_id" { - description = "The Id of the azure sub" - type = string -} - -variable "queue_privatelink_name" { - description = "The name of the queue private link" - type = string -} - -variable "storage_privatelink_name" { - description = "The name of the storage private link" - type = string -} - -variable "eventhub_privatelink_name" { - description = "The name of the eventhub private link" - type = string -} - -variable "queue_private_dns_id" { - description = "The id of the queue private DNS" - type = string -} - -variable "blob_private_dns_id" { - description = "The id of the queue private DNS" - type = string -} - -variable "servicebus_private_dns_id" { - description = "The id of the queue private DNS" - type = string -} - -variable "subnet_id" { - description = "The id of the subnet to attach the purview ingestion resources" - type = string -} -variable "managed_resource_group_name" { - description = "The name of the purview managed resource group" - type = string -} -variable "name_suffix" { - description = "Used to give resource group deployments unique names for an environment" - type = string -} - +variable "resource_group_name" { + description = "The name of the resource group to deploy into" + type = string +} + +variable "purview_account_name" { + description = "The name of the data factory" + type = string +} +variable "resource_location" { + description = "The uri of the shared keyvault" + type = string +} + +variable "subscription_id" { + description = "The Id of the azure sub" + type = string +} + +variable "queue_privatelink_name" { + description = "The name of the queue private link" + type = string +} + +variable "storage_privatelink_name" { + description = "The name of the storage private link" + type = string +} + +variable "eventhub_privatelink_name" { + description = "The name of the eventhub private link" + type = string +} + +variable "queue_private_dns_id" { + description = "The id of the queue private DNS" + type = string +} + +variable "blob_private_dns_id" { + description = "The id of the queue private DNS" + type = string +} + +variable "servicebus_private_dns_id" { + description = "The id of the queue private DNS" + type = string +} + +variable "subnet_id" { + description = "The id of the subnet to attach the purview ingestion resources" + type = string +} +variable "managed_resource_group_name" { + description = "The name of the purview managed resource group" + type = string +} +variable "name_suffix" { + description = "Used to give resource group deployments unique names for an environment" + type = string +} + diff --git a/solution/DeploymentV2/terraform_layer3/purview.tf b/solution/DeploymentV2/terraform_layer3/purview.tf index 177460e0..a230f44f 100644 --- a/solution/DeploymentV2/terraform_layer3/purview.tf +++ b/solution/DeploymentV2/terraform_layer3/purview.tf @@ -27,7 +27,7 @@ resource "azurerm_private_endpoint" "purview_account_private_endpoint_with_dns" tags = local.tags lifecycle { - ignore_changes = all + ignore_changes = [tags] } } @@ -63,7 +63,7 @@ resource "azurerm_private_endpoint" "purview_portal_private_endpoint_with_dns" { # Azure private endpoints module "purview_ingestion_private_endpoints" { - source = "./purview_ingestion_private_endpoints" + source = "./modules/purview_ingestion_private_endpoints" count = var.is_vnet_isolated && var.deploy_purview ? 1 : 0 resource_group_name = var.resource_group_name purview_account_name = data.terraform_remote_state.layer2.outputs.azurerm_purview_account_purview_name From 0f322c8c6abdb57d0e32ff5a8a9833a124a12a3d Mon Sep 17 00:00:00 2001 From: John Rampono Date: Tue, 9 Aug 2022 19:06:27 +0800 Subject: [PATCH 115/151] testing self hosted runenr --- .github/workflows/02.continuous-delivery-uat.yml | 6 +++--- .../environments/vars/common_vars_template.jsonnet | 10 ++++++++++ solution/DeploymentV2/terraform_layer3/layer2.tf | 4 ++-- 3 files changed, 15 insertions(+), 5 deletions(-) diff --git a/.github/workflows/02.continuous-delivery-uat.yml b/.github/workflows/02.continuous-delivery-uat.yml index 7fbfbe66..2217a00d 100644 --- a/.github/workflows/02.continuous-delivery-uat.yml +++ b/.github/workflows/02.continuous-delivery-uat.yml @@ -7,7 +7,7 @@ on: jobs: deploy-to-env-one: - name: Deploy to Environment One + name: Deploy to Environment One concurrency: terraform environment: name: uat @@ -52,7 +52,7 @@ jobs: #TF_LOG : TRACE - runs-on: ubuntu-latest + runs-on: self-hosted steps: - name: PrintInfo run: | @@ -169,7 +169,7 @@ jobs: #PROD ENVIRONMENT #TF_LOG : TRACE - runs-on: ubuntu-latest + runs-on: self-hosted steps: - name: Checkout diff --git a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet index 096a2291..e7641db8 100644 --- a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet +++ b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet @@ -780,6 +780,16 @@ local SecretFileSensitiveVars = { // Object comprehension. + + + + + + + + + + diff --git a/solution/DeploymentV2/terraform_layer3/layer2.tf b/solution/DeploymentV2/terraform_layer3/layer2.tf index 9d5f5e8c..5aacfbae 100644 --- a/solution/DeploymentV2/terraform_layer3/layer2.tf +++ b/solution/DeploymentV2/terraform_layer3/layer2.tf @@ -7,7 +7,7 @@ config = { container_name = "tstate" key = "terraform_layer2.tfstate" - resource_group_name = "lockboxfeaturetest" - storage_account_name = "lockboxfeatureteststate" + resource_group_name = "gfuat" + storage_account_name = "gfuatstate" } } From f084de02f3d50bf8acebe58fce3c3df69cdcfa44 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Tue, 9 Aug 2022 19:37:09 +0800 Subject: [PATCH 116/151] Disabled Az-Pwsh session --- .github/workflows/02.continuous-delivery-uat.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/02.continuous-delivery-uat.yml b/.github/workflows/02.continuous-delivery-uat.yml index 2217a00d..abf23b01 100644 --- a/.github/workflows/02.continuous-delivery-uat.yml +++ b/.github/workflows/02.continuous-delivery-uat.yml @@ -70,7 +70,7 @@ jobs: uses: azure/login@v1 with: creds: '{"clientId":"${{ secrets.ARM_CLIENT_ID }}","clientSecret":"${{ secrets.ARM_CLIENT_SECRET }}","subscriptionId":"${{ secrets.ARM_SUBSCRIPTION_ID }}","tenantId":"${{ secrets.ARM_TENANT_ID }}"}' - enable-AzPSSession: true + enable-AzPSSession: false - name: Open Firewalls for Agent id: open_firewalls From bffca292b2757bbe88c30f706e94f60016877adb Mon Sep 17 00:00:00 2001 From: John Rampono Date: Tue, 9 Aug 2022 20:06:51 +0800 Subject: [PATCH 117/151] Fixed small bug in private link approvals --- solution/DeploymentV2/pwshmodules/Deploy_4_PrivateLinks.psm1 | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/solution/DeploymentV2/pwshmodules/Deploy_4_PrivateLinks.psm1 b/solution/DeploymentV2/pwshmodules/Deploy_4_PrivateLinks.psm1 index 54a26033..46d515f2 100644 --- a/solution/DeploymentV2/pwshmodules/Deploy_4_PrivateLinks.psm1 +++ b/solution/DeploymentV2/pwshmodules/Deploy_4_PrivateLinks.psm1 @@ -20,7 +20,7 @@ function DeployPrivateLinks ( $result = az network private-endpoint-connection approve -g $tout.resource_group_name -n $id_parts[$id_parts.length - 1] --resource-name $tout.keyvault_name --type Microsoft.Keyvault/vaults --description "Approved by Deploy.ps1" } } - $links = az network private-endpoint-connection list -g $tout.resource_group_name -n $sqlserver_name --type 'Microsoft.Sql/servers' | ConvertFrom-Json + $links = az network private-endpoint-connection list -g $tout.resource_group_name -n $tout.sqlserver_name --type 'Microsoft.Sql/servers' | ConvertFrom-Json foreach ($link in $links) { if ($link.properties.privateLinkServiceConnectionState.status -eq "Pending") { $id_parts = $link.id.Split("/"); @@ -29,7 +29,7 @@ function DeployPrivateLinks ( } } - $links = az network private-endpoint-connection list -g $tout.resource_group_name -n $synapse_workspace_name --type 'Microsoft.Synapse/workspaces' | ConvertFrom-Json + $links = az network private-endpoint-connection list -g $tout.resource_group_name -n $tout.synapse_workspace_name --type 'Microsoft.Synapse/workspaces' | ConvertFrom-Json foreach ($link in $links) { if ($link.properties.privateLinkServiceConnectionState.status -eq "Pending") { $id_parts = $link.id.Split("/"); From 6828b29b6e5491f0d9d3be245a8fa8e3bd44056d Mon Sep 17 00:00:00 2001 From: John Rampono Date: Tue, 9 Aug 2022 20:15:42 +0800 Subject: [PATCH 118/151] Another small fix to private links --- solution/DeploymentV2/pwshmodules/Deploy_4_PrivateLinks.psm1 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/solution/DeploymentV2/pwshmodules/Deploy_4_PrivateLinks.psm1 b/solution/DeploymentV2/pwshmodules/Deploy_4_PrivateLinks.psm1 index 46d515f2..31afb080 100644 --- a/solution/DeploymentV2/pwshmodules/Deploy_4_PrivateLinks.psm1 +++ b/solution/DeploymentV2/pwshmodules/Deploy_4_PrivateLinks.psm1 @@ -43,7 +43,7 @@ function DeployPrivateLinks ( if ($link.properties.privateLinkServiceConnectionState.status -eq "Pending") { $id_parts = $link.id.Split("/"); Write-Host "- " + $id_parts[$id_parts.length - 1] - $result = az network private-endpoint-connection approve -g $tout.resource_group_name -n $id_parts[$id_parts.length - 1] --resource-name $blobstorage_name --type Microsoft.Storage/storageAccounts --description "Approved by Deploy.ps1" + $result = az network private-endpoint-connection approve -g $tout.resource_group_name -n $id_parts[$id_parts.length - 1] --resource-name $tout.blobstorage_name --type Microsoft.Storage/storageAccounts --description "Approved by Deploy.ps1" } } $links = az network private-endpoint-connection list -g $tout.resource_group_name -n $tout.adlsstorage_name --type 'Microsoft.Storage/storageAccounts' | ConvertFrom-Json From 764c94c98fb7ddf324523cae640b8778c50d5475 Mon Sep 17 00:00:00 2001 From: h-sha <97069267+h-sha@users.noreply.github.com> Date: Wed, 10 Aug 2022 08:39:37 +1000 Subject: [PATCH 119/151] lb --- .../DeploymentV2/environments/vars/common_vars_template.jsonnet | 2 ++ 1 file changed, 2 insertions(+) diff --git a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet index 096a2291..e756d458 100644 --- a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet +++ b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet @@ -787,6 +787,8 @@ local SecretFileSensitiveVars = { // Object comprehension. + + From 355d03b186f69a0ed7dd0bb46def4c4e1e572b96 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Thu, 11 Aug 2022 10:25:17 +0800 Subject: [PATCH 120/151] Fixes to prepare.ps1 based on new layered tf --- solution/DeploymentV2/Prepare.ps1 | 43 +++++++--- .../vars/PreprocessEnvironment.ps1 | 1 + .../vars/admz/common_vars_values.jsonc | 20 +++-- .../environments/vars/common_vars_schema.json | 4 +- .../vars/common_vars_template.jsonnet | 86 +++++++++++++++++++ .../vars/staging/common_vars_values.jsonc | 41 +++++---- .../pwshmodules/Deploy_0_Prep.psm1 | 18 ++-- .../DeploymentV2/terraform_layer2/layer1.tf | 4 +- .../terraform_layer3/03-deploy.ps1 | 22 +++-- .../DeploymentV2/terraform_layer3/layer2.tf | 4 +- 10 files changed, 179 insertions(+), 64 deletions(-) diff --git a/solution/DeploymentV2/Prepare.ps1 b/solution/DeploymentV2/Prepare.ps1 index 549b227b..56132aeb 100644 --- a/solution/DeploymentV2/Prepare.ps1 +++ b/solution/DeploymentV2/Prepare.ps1 @@ -161,13 +161,13 @@ else $assigneeobject = Read-Host "Enter the object id of the AAD account or Group that you would like to have ownership of the new resource group." $sqlAdmin = Read-Host "Enter the object id of the AAD account or Group that you would like to have SQL AAD Admin on the Azure SQL Server instances created. Leave blank if this is an end-to-end interactive user deployment. Provide a security group or the deployment service principal if this is an agent deployment" - if([string]::IsNullOrEmpty($assigneeobject)) { - #Write-Host "Skipping Resource Group Ownership Assignment" - $assigneeobject = $currentAccount.id + if([string]::IsNullOrEmpty($assigneeobject -eq $false)) { + #Write-Host "Skipping Resource Group Ownership Assignment" + $output = az role assignment create --role "Owner" --scope "/subscriptions/${env:TF_VAR_subscription_id}/resourcegroups/${env:TF_VAR_resource_group_name}" --assignee-object-id $assigneeobject --only-show-errors } - az role assignment create --role "Owner" --scope "/subscriptions/${env:TF_VAR_subscription_id}/resourcegroups/${env:TF_VAR_resource_group_name}" --assignee-object-id $assigneeobject + #------------------------------------------------------------------------------------------------------------ # Print pretty output for user @@ -234,21 +234,36 @@ else $common_vars_values.resource_group_name = $env:TF_VAR_resource_group_name $common_vars_values.domain = $env:TF_VAR_domain $common_vars_values.subscription_id = $env:TF_VAR_subscription_id + $common_vars_values.ip_address = $env:TF_VAR_ip_address $common_vars_values.ip_address2 = $env:TF_VAR_ip_address $common_vars_values.tenant_id = $env:TF_VAR_tenant_id - $common_vars_values.WEB_APP_ADMIN_USER = (az ad signed-in-user show | ConvertFrom-Json).id - $common_vars_values.deployment_principal_layers1and3 = (az ad signed-in-user show | ConvertFrom-Json).id + $common_vars_values.WEB_APP_ADMIN_USER = (az ad signed-in-user show --only-show-errors | ConvertFrom-Json).id + $foundUser = $false + $common_vars_values.resource_owners = @() + $common_vars_values.synapse_administrators = @{} - $common_vars_values.resource_owners = @("$assigneeobject") + if([string]::IsNullOrEmpty($assigneeobject) -eq $false) + { + $common_vars_values.deployment_principal_layers1and3 = $assigneeobject + $userPrincipalName = (az ad signed-in-user show --only-show-errors | ConvertFrom-Json).userPrincipalName + $common_vars_values.synapse_administrators.$userPrincipalName = (az ad signed-in-user show --only-show-errors | ConvertFrom-Json).id + } + else + { + $owner = (az ad signed-in-user show | ConvertFrom-Json).id + $common_vars_values.resource_owners = @("$owner") + $common_vars_values.deployment_principal_layers1and3 = "" + #$assigneeobject = ((az ad user show --id $currentAccount.user.name) | ConvertFrom-Json -Depth 10).id + } + + if([string]::IsNullOrEmpty($sqlAdmin) -eq $false) + { + $common_vars_values.azure_sql_aad_administrators = @{} + $userPrincipalName = "sql_aad_admin" + $common_vars_values.azure_sql_aad_administrators.$userPrincipalName = $sqlAdmin + } - $common_vars_values.synapse_administrators = @{} - - $userPrincipalName = "sql_aad_admin" - $common_vars_values.synapse_administrators.$userPrincipalName = "" - $userPrincipalName = (az ad signed-in-user show --only-show-errors | ConvertFrom-Json).userPrincipalName - $common_vars_values.synapse_administrators.$userPrincipalName = (az ad signed-in-user show | ConvertFrom-Json).id - $common_vars_values | Convertto-Json -Depth 10 | Set-Content ./environments/vars/$environmentName/common_vars_values.jsonc diff --git a/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 b/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 index 1b3d213d..63346364 100644 --- a/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 +++ b/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 @@ -89,6 +89,7 @@ foreach($fto in $fto_keys) Write-Warning "Overriding Feature Template value for $ev_prop with value of $Value" } else { + $ev_prop = "TF_VAR_$Name" Write-Warning "Inserting Feature Template value for $ev_prop with value of $Value" $Value = $fto_vals.$Name } diff --git a/solution/DeploymentV2/environments/vars/admz/common_vars_values.jsonc b/solution/DeploymentV2/environments/vars/admz/common_vars_values.jsonc index 85927813..2962ba5f 100644 --- a/solution/DeploymentV2/environments/vars/admz/common_vars_values.jsonc +++ b/solution/DeploymentV2/environments/vars/admz/common_vars_values.jsonc @@ -6,17 +6,19 @@ "domain": "microsoft.com", "subscription_id": "035a1364-f00d-48e2-b582-4fe125905ee3", "prefix": "ads", - "resource_group_name": "gft2", + "resource_group_name": "gft1", "ip_address": "144.138.148.220", "ip_address2": "144.138.148.220", "tenant_id": "72f988bf-86f1-41af-91ab-2d7cd011db47", - "synapse_administrators": - {"Jorampon":"ccbdbba4-669c-48d6-86b8-75c9ab2ee578"}, - "WEB_APP_ADMIN_USER":"ccbdbba4-669c-48d6-86b8-75c9ab2ee578", - "ARM_SYNAPSE_WORKSPACE_NAME": "adsdevsynwadskhpv", //Required by deployment script (pwsh) to openfirewall and provide access.. note that this will not be known until first deployment - "ARM_KEYVAULT_NAME": "ads-dev-kv-ads-khpv", //Required by deployment script (pwsh) to openfirewall and provide access.. note that this will not be known until first deployment - "ARM_DATALAKE_NAME": "adsdevdlsadskhpvadsl", //Required by deployment script (pwsh) to openfirewall and provide access.. note that this will not be known until first deployment + "synapse_administrators": { + "jorampon@microsoft.com": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", + "sql_aad_admin": "" + }, + "WEB_APP_ADMIN_USER": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", + "ARM_SYNAPSE_WORKSPACE_NAME": "adsdevsynwadskhpv", + "ARM_KEYVAULT_NAME": "ads-dev-kv-ads-khpv", + "ARM_DATALAKE_NAME": "adsdevdlsadskhpvadsl", "FeatureTemplateOverrides": { - "is_onprem_datafactory_ir_registered":false + "is_onprem_datafactory_ir_registered": false } -} \ No newline at end of file +} diff --git a/solution/DeploymentV2/environments/vars/common_vars_schema.json b/solution/DeploymentV2/environments/vars/common_vars_schema.json index f3a3ce37..d22b9a73 100644 --- a/solution/DeploymentV2/environments/vars/common_vars_schema.json +++ b/solution/DeploymentV2/environments/vars/common_vars_schema.json @@ -94,7 +94,7 @@ "ip_address": { "type": "string", "examples": [], - "description": "", + "description": "Primary Deployment IP Address. This is usually the IP of the CICD agent. If deploying locally set it to your IPAddress. The agent will update this dynamically in the CICD workflow.", "options": { "category": "Target Azure Environment" } @@ -102,7 +102,7 @@ "ip_address2": { "type": "string", "examples": [], - "description": "", + "description": "Secondary Deployment IP Address. If deploying locally set it to your IPAddress. If Layers 1 & 3 are deploying from an IP Address that is different to the primary IP then put the layers 1 & 3 IP addess here.", "options": { "category": "Target Azure Environment" } diff --git a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet index e7641db8..75923876 100644 --- a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet +++ b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet @@ -780,6 +780,92 @@ local SecretFileSensitiveVars = { // Object comprehension. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc index e082466d..e8fe4b95 100644 --- a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc +++ b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc @@ -1,31 +1,30 @@ { "$schema": "./../common_vars_schema.json", - "owner_tag": "Arkahna", + "owner_tag": "Contoso", "resource_location": "australiaeast", "environment_tag": "stg", - "domain": "arkahna.io", - "subscription_id": "687fe1ae-a520-4f86-b921-a80664c40f9b", - "prefix": "ark", - "resource_group_name": "lockboxfeaturetest", - "ip_address": "159.196.32.59", - "ip_address2": "", - "tenant_id": "0fee3d31-b963-4a1c-8f4a-ca367205aa65", + "domain": "microsoft.com", + "subscription_id": "035a1364-f00d-48e2-b582-4fe125905ee3", + "prefix": "ads", + "resource_group_name": "gfth", + "ip_address": "144.138.148.220", + "ip_address2": "144.138.148.220", + "tenant_id": "72f988bf-86f1-41af-91ab-2d7cd011db47", "deployment_principal_layers1and3": "", "resource_owners": [ - "ed0568a1-1ee0-46ee-b7e8-0540c4660de2" + "ccbdbba4-669c-48d6-86b8-75c9ab2ee578" ], - "synapse_administrators": { - }, + "synapse_administrators": {}, "azure_sql_aad_administrators": { - "sql_aad_admin": "ed0568a1-1ee0-46ee-b7e8-0540c4660de2" + "sql_aad_admin": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578" }, "synapse_publishers": {}, "synapse_contributors": {}, - "WEB_APP_ADMIN_USER": "ed0568a1-1ee0-46ee-b7e8-0540c4660de2", + "WEB_APP_ADMIN_USER": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", "WEB_APP_ADMIN_SECURITY_GROUP": "#####", - "ARM_SYNAPSE_WORKSPACE_NAME": "arkstgsynwadsp9cl", - "ARM_KEYVAULT_NAME": "ark-stg-kv-ads-p9cl", - "ARM_DATALAKE_NAME": "arkstgdlsadsp9cladsl", + "ARM_SYNAPSE_WORKSPACE_NAME": "adsstgsynwadsxu3g", + "ARM_KEYVAULT_NAME": "ads-stg-kv-ads-xu3g", + "ARM_DATALAKE_NAME": "adsstgdlsadsxu3gadsl", "ARM_PAL_PARTNER_ID": "0", "GIT_REPOSITORY_NAME": "testLockbox", "GIT_SYNAPSE_REPOSITORY_BRANCH_NAME": "featuretest", @@ -36,14 +35,14 @@ "GIT_ADF_USER_NAME": "#####", "GIT_ADF_EMAIL_ADDRESS": "#####", "FeatureTemplateOverrides": { - "is_onprem_datafactory_ir_registered":false, - "synapse_git_toggle":true, - "adf_git_toggle":false, + "is_onprem_datafactory_ir_registered": false, + "synapse_git_toggle": false, + "adf_git_toggle": false, "deploy_sentinel": false, "publish_sif_database": false, - "synapse_git_toggle_integration": true, + "synapse_git_toggle_integration": false, "synapse_git_repository_owner": "h-sha", - "synapse_git_repository_name":"testLockbox", + "synapse_git_repository_name": "testLockbox", "synapse_git_repository_root_folder": "/Synapse", "synapse_git_use_pat": false } diff --git a/solution/DeploymentV2/pwshmodules/Deploy_0_Prep.psm1 b/solution/DeploymentV2/pwshmodules/Deploy_0_Prep.psm1 index ee5c9a91..5162606f 100644 --- a/solution/DeploymentV2/pwshmodules/Deploy_0_Prep.psm1 +++ b/solution/DeploymentV2/pwshmodules/Deploy_0_Prep.psm1 @@ -90,10 +90,11 @@ function PrepareDeployment ( if ($env:TF_VAR_ip_address -ne "") { try { - $hiddenoutput = az storage account network-rule add --resource-group $env:TF_VAR_resource_group_name --account-name $env:TF_VAR_state_storage_account_name --ip-address $env:TF_VAR_ip_address - $hiddenoutput = az keyvault network-rule add -g $env:TF_VAR_resource_group_name --name $env:keyVaultName --ip-address $env:TF_VAR_ip_address/32 - $hiddenoutput = az synapse workspace firewall-rule create --name CICDAgent --resource-group $env:TF_VAR_resource_group_name --start-ip-address $env:TF_VAR_ip_address --end-ip-address $env:TF_VAR_ip_address --workspace-name $env:ARM_SYNAPSE_WORKSPACE_NAME - $hiddenoutput = az storage account network-rule add --resource-group $env:TF_VAR_resource_group_name --account-name $env:datalakeName --ip-address $env:TF_VAR_ip_address + $hiddenoutput = az storage account network-rule add --resource-group $env:TF_VAR_resource_group_name --account-name $env:TF_VAR_state_storage_account_name --ip-address $env:TF_VAR_ip_address --only-show-errors + $hiddenoutput = az keyvault network-rule add -g $env:TF_VAR_resource_group_name --name $env:keyVaultName --ip-address $env:TF_VAR_ip_address/32 --only-show-errors + $hiddenoutput = az synapse workspace firewall-rule create --name CICDAgent --resource-group $env:TF_VAR_resource_group_name --start-ip-address $env:TF_VAR_ip_address --end-ip-address $env:TF_VAR_ip_address --workspace-name $env:ARM_SYNAPSE_WORKSPACE_NAME --only-show-errors + $hiddenoutput = az storage account network-rule add --resource-group $env:TF_VAR_resource_group_name --account-name $env:datalakeName --ip-address $env:TF_VAR_ip_address --only-show-errors + $hiddenoutput =az storage account network-rule add --resource-group $env:TF_VAR_resource_group_name --account-name $env:TF_VAR_state_storage_account_name --ip-address $env:TF_VAR_ip_address --only-show-errors } catch { Write-Warning 'Opening Firewalls for IP Address One Failed' @@ -103,10 +104,11 @@ function PrepareDeployment ( if ($env:TF_VAR_ip_address2 -ne "") { try { - $hiddenoutput = az storage account network-rule add --resource-group $env:TF_VAR_resource_group_name --account-name $env:TF_VAR_state_storage_account_name --ip-address $env:TF_VAR_ip_address2 - $hiddenoutput = az keyvault network-rule add -g $env:TF_VAR_resource_group_name --name $env:keyVaultName --ip-address $env:TF_VAR_ip_address2/32 - $hiddenoutput = az synapse workspace firewall-rule create --name CICDUser --resource-group $env:TF_VAR_resource_group_name --start-ip-address $env:TF_VAR_ip_address2 --end-ip-address $env:TF_VAR_ip_address2 --workspace-name $env:ARM_SYNAPSE_WORKSPACE_NAME - $hiddenoutput = az storage account network-rule add --resource-group $env:TF_VAR_resource_group_name --account-name $env:datalakeName --ip-address $env:TF_VAR_ip_address2 + $hiddenoutput = az storage account network-rule add --resource-group $env:TF_VAR_resource_group_name --account-name $env:TF_VAR_state_storage_account_name --ip-address $env:TF_VAR_ip_address2 --only-show-errors + $hiddenoutput = az keyvault network-rule add -g $env:TF_VAR_resource_group_name --name $env:keyVaultName --ip-address $env:TF_VAR_ip_address2/32 --only-show-errors + $hiddenoutput = az synapse workspace firewall-rule create --name CICDUser --resource-group $env:TF_VAR_resource_group_name --start-ip-address $env:TF_VAR_ip_address2 --end-ip-address $env:TF_VAR_ip_address2 --workspace-name $env:ARM_SYNAPSE_WORKSPACE_NAME --only-show-errors + $hiddenoutput = az storage account network-rule add --resource-group $env:TF_VAR_resource_group_name --account-name $env:datalakeName --ip-address $env:TF_VAR_ip_address2 --only-show-errors + $hiddenoutput = az storage account network-rule add --resource-group $env:TF_VAR_resource_group_name --account-name $env:TF_VAR_state_storage_account_name --ip-address $env:TF_VAR_ip_address2 --only-show-errors } catch { Write-Warning 'Opening Firewalls for IP Address Two Failed' diff --git a/solution/DeploymentV2/terraform_layer2/layer1.tf b/solution/DeploymentV2/terraform_layer2/layer1.tf index 13be1709..a938ebce 100644 --- a/solution/DeploymentV2/terraform_layer2/layer1.tf +++ b/solution/DeploymentV2/terraform_layer2/layer1.tf @@ -7,7 +7,7 @@ config = { container_name = "tstate" key = "terraform_layer1.tfstate" - resource_group_name = "lockboxfeaturetest" - storage_account_name = "lockboxfeatureteststate" + resource_group_name = "gfth" + storage_account_name = "gfthstate" } } diff --git a/solution/DeploymentV2/terraform_layer3/03-deploy.ps1 b/solution/DeploymentV2/terraform_layer3/03-deploy.ps1 index 6699588c..ac9999a2 100644 --- a/solution/DeploymentV2/terraform_layer3/03-deploy.ps1 +++ b/solution/DeploymentV2/terraform_layer3/03-deploy.ps1 @@ -39,9 +39,19 @@ $ipaddress2 = $env:TF_VAR_ip_address2 PrepareDeployment -gitDeploy $gitDeploy -deploymentFolderPath $deploymentFolderPath -FeatureTemplate $FeatureTemplate -PathToReturnTo $PathToReturnTo -#------------------------------------------------------------------------------------------------------------ -# Main Terraform - Layer1 -#------------------------------------------------------------------------------------------------------------ -Write-Host "Starting Terraform Deployment- Layer 3" -terragrunt init --terragrunt-config vars/$env:environmentName/terragrunt.hcl -reconfigure -terragrunt apply -auto-approve --terragrunt-config vars/$env:environmentName/terragrunt.hcl + +#Check to make sure that purview account is fully deployed +$pname = ((az storage blob download -c "tstate" -n "terraform_layer2.tfstate" --account-name $env:TF_VAR_state_storage_account_name --auth-mode login) | ConvertFrom-Json).outputs.purview_name +$pstate = (az purview account show --name $pname.value -g $env:TF_VAR_resource_group_name | ConvertFrom-Json -Depth 10).provisioningState +if($pstate -ne "Succeeded") +{ + Write-Error "Purview account has not yet completed provisioning - Wait For completion and then retry" +} +else { + #------------------------------------------------------------------------------------------------------------ + # Main Terraform - Layer1 + #------------------------------------------------------------------------------------------------------------ + Write-Host "Starting Terraform Deployment- Layer 3" + terragrunt init --terragrunt-config vars/$env:environmentName/terragrunt.hcl -reconfigure + terragrunt apply -auto-approve --terragrunt-config vars/$env:environmentName/terragrunt.hcl +} diff --git a/solution/DeploymentV2/terraform_layer3/layer2.tf b/solution/DeploymentV2/terraform_layer3/layer2.tf index 5aacfbae..a20c9e0b 100644 --- a/solution/DeploymentV2/terraform_layer3/layer2.tf +++ b/solution/DeploymentV2/terraform_layer3/layer2.tf @@ -7,7 +7,7 @@ config = { container_name = "tstate" key = "terraform_layer2.tfstate" - resource_group_name = "gfuat" - storage_account_name = "gfuatstate" + resource_group_name = "gfth" + storage_account_name = "gfthstate" } } From 4cd907738449363265a0127c3662d9196ef1a65f Mon Sep 17 00:00:00 2001 From: John Rampono Date: Fri, 12 Aug 2022 10:58:54 +0800 Subject: [PATCH 121/151] Final fixes for layered terraform. Added terraform output summaries --- README.md | 2 +- solution/DeploymentV2/Deploy.ps1 | 153 ++---------------- solution/DeploymentV2/Prepare.ps1 | 20 ++- .../vars/admz/common_vars_values.jsonc | 3 +- .../vars/common_vars_template.jsonnet | 128 +++++++++++++++ .../vars/local/common_vars_values.jsonc | 3 +- .../vars/staging/common_vars_values.jsonc | 11 +- .../vars/uat/common_vars_values.jsonc | 3 +- .../terraform_layer1/01-deploy.ps1 | 19 ++- .../terraform_layer2/02-deploy.ps1 | 31 +++- .../terraform_layer2/02-publish.ps1 | 41 ++++- .../DeploymentV2/terraform_layer2/layer1.tf | 4 +- .../DeploymentV2/terraform_layer2/synapse.tf | 2 +- .../terraform_layer3/03-deploy.ps1 | 3 + .../terraform_layer3/03-publish.ps1 | 10 +- .../DeploymentV2/terraform_layer3/layer2.tf | 4 +- .../DeploymentV2/terraform_layer3/purview.tf | 4 + 17 files changed, 274 insertions(+), 167 deletions(-) diff --git a/README.md b/README.md index 31e9a3b2..cd1ac157 100644 --- a/README.md +++ b/README.md @@ -56,7 +56,7 @@ Once you have set up these pre-requisites you will then need to [Clone](https:// :triangular_flag_on_post: If you want a stable deployment it is highly recommended that you checkout one of the official release tags. For example, if you wish to deploy v1.0.2 run the line below from within the directory into which you cloned the repository. ```bash -git checkout tags/v1.0.2 +git checkout tags/v1.0.4 ``` ## Deployment diff --git a/solution/DeploymentV2/Deploy.ps1 b/solution/DeploymentV2/Deploy.ps1 index 604979e6..793904f6 100644 --- a/solution/DeploymentV2/Deploy.ps1 +++ b/solution/DeploymentV2/Deploy.ps1 @@ -20,155 +20,26 @@ # # You can run this script multiple times if needed. # -# ./Deploy.ps1 -RunTerraformLayer1 $true -FeatureTemplate "basic_deployment" -PerformPostIACPublishing $false -# ./Deploy.ps1 -RunTerraformLayer2 $true -FeatureTemplate "basic_deployment" -PerformPostIACPublishing $true -# ./Deploy.ps1 -RunTerraformLayer3 $true -FeatureTemplate "basic_deployment" -$PublishSQLLogins $true -# #---------------------------------------------------------------------------------------------------------------- -param ( - [Parameter(Mandatory=$false)] - [bool]$RunTerraformLayer1=0, - [Parameter(Mandatory=$false)] - [bool]$RunTerraformLayer2=0, - [Parameter(Mandatory=$false)] - [bool]$RunTerraformLayer3=0, - [Parameter(Mandatory=$false)] - [bool]$PublishMetadataDatabase=$true, - [Parameter(Mandatory=$false)] - [bool]$PublishSQLLogins=0, - [Parameter(Mandatory=$false)] - [bool]$PerformPostIACPublishing=0, - [Parameter(Mandatory=$false)] - [string]$FeatureTemplate="basic_deployment" -) - -#------------------------------------------------------------------------------------------------------------ -# Module Imports #Mandatory -#------------------------------------------------------------------------------------------------------------ -import-Module ./pwshmodules/GatherOutputsFromTerraform.psm1 -force -import-Module ./pwshmodules/Deploy_0_Prep.psm1 -force -#------------------------------------------------------------------------------------------------------------ -# Preparation #Mandatory -#------------------------------------------------------------------------------------------------------------ +$PathToReturnTo = (Get-Location).Path $deploymentFolderPath = (Get-Location).Path -$gitDeploy = ([System.Environment]::GetEnvironmentVariable('gitDeploy') -eq 'true') -$skipTerraformDeployment = ([System.Environment]::GetEnvironmentVariable('skipTerraformDeployment') -eq 'true') -$ipaddress = $env:TF_VAR_ip_address -$ipaddress2 = $env:TF_VAR_ip_address2 -PrepareDeployment -gitDeploy $gitDeploy -deploymentFolderPath $deploymentFolderPath -FeatureTemplate $FeatureTemplate +Set-Location ./terraform_layer1 +./01-deploy.ps1 -#------------------------------------------------------------------------------------------------------------ -# Main Terraform -#------------------------------------------------------------------------------------------------------------ -./Deploy_1_Infra0.ps1 -RunTerraformLayer1 $RunTerraformLayer1 -RunTerraformLayer2 $RunTerraformLayer2 -RunTerraformLayer3 $RunTerraformLayer3 -skipTerraformDeployment $skipTerraformDeployment - - -#------------------------------------------------------------------------------------------------------------ -# Get all the outputs from terraform so we can use them in subsequent steps #Mandatory -#------------------------------------------------------------------------------------------------------------ -Set-Location "./terraform_layer2" -Write-Host "Reading Terraform Outputs" -#Run Init Just in Case we skipped the Infra Section -#$init = terragrunt init --terragrunt-config vars/$environmentName/terragrunt.hcl -reconfigure -$tout = GatherOutputsFromTerraform -TerraformFolderPath "./" -$outputs = terragrunt output -json --terragrunt-config ./vars/$env:environmentName/terragrunt.hcl | ConvertFrom-Json -$subscription_id =$outputs.subscription_id.value -$resource_group_name =$outputs.resource_group_name.value -$webapp_name =$outputs.webapp_name.value -$functionapp_name=$outputs.functionapp_name.value -$purview_name=$outputs.purview_name.value -$sqlserver_name=$outputs.sqlserver_name.value -$blobstorage_name=$outputs.blobstorage_name.value -$adlsstorage_name=$outputs.adlsstorage_name.value -$datafactory_name=$outputs.datafactory_name.value -$keyvault_name=$outputs.keyvault_name.value -#sif database name -$sifdb_name = if([string]::IsNullOrEmpty($outputs.sifdb_name.value)){"SIFDM"} -$stagingdb_name=$outputs.stagingdb_name.value -$sampledb_name=$outputs.sampledb_name.value -$metadatadb_name=$outputs.metadatadb_name.value -$loganalyticsworkspace_id=$outputs.loganalyticsworkspace_id.value -$purview_sp_name=$outputs.purview_sp_name.value -$synapse_workspace_name=if([string]::IsNullOrEmpty($outputs.synapse_workspace_name.value)) {"Dummy"} else {$outputs.synapse_workspace_name.value} -$synapse_sql_pool_name=if([string]::IsNullOrEmpty($outputs.synapse_sql_pool_name.value)) {"Dummy"} else {$outputs.synapse_sql_pool_name.value} -$synapse_spark_pool_name=if([string]::IsNullOrEmpty($outputs.synapse_spark_pool_name.value)) {"Dummy"} else {$outputs.synapse_spark_pool_name.value} -$skipCustomTerraform = if($tout.deploy_custom_terraform) {$false} else {$true} -$skipWebApp = if($tout.publish_web_app -and $tout.deploy_web_app) {$false} else {$true} -$skipFunctionApp = if($tout.publish_function_app -and $tout.deploy_function_app) {$false} else {$true} -$skipDatabase = if($tout.publish_metadata_database -and $tout.deploy_metadata_database) {$false} else {$true} -$skipSQLLogins = if($tout.publish_sql_logins -and $tout.deploy_sql_server) {$false} else {$true} -$skipSynapseLogins = if($tout.publish_sql_logins -and $tout.deploy_synapse) {$false} else {$true} -$skipSampleFiles = if($tout.publish_sample_files){$false} else {$true} -$skipSIF= if($tout.publish_sif_database){$false} else {$true} -$skipNetworking = if($tout.configure_networking){$false} else {$true} -$skipDataFactoryPipelines = if($tout.publish_datafactory_pipelines) {$false} else {$true} -$skipFunctionalTests = if($tout.publish_functional_tests) {$false} else {$true} -$skipConfigurePurview = if($tout.publish_configure_purview) {$false} else {$true} -$AddCurrentUserAsWebAppAdmin = if($tout.publish_web_app_addcurrentuserasadmin) {$true} else {$false} Set-Location $deploymentFolderPath +Set-Location ./terraform_layer2 +./02-deploy.ps1 +Set-Location $deploymentFolderPath +Set-Location ./terraform_layer3 +./03-deploy.ps1 +./03-publish.ps1 -#------------------------------------------------------------------------------------------------------------ -# Run Each SubModule -#------------------------------------------------------------------------------------------------------------ -./Deploy_3_Infra1.ps1 -deploymentFolderPath $deploymentFolderPath -skipTerraformDeployment $skipTerraformDeployment -skipCustomTerraform $skipCustomTerraform - -#------------------------------------------------------------------------------------------------------------ -# SQL Deployment and Users -# In order for a deployment agent service principal to execute the two scripts below you need to give directory read to the Azure SQL Instance Managed Identity and the Synapse Managed Identity -#------------------------------------------------------------------------------------------------------------ -./Deploy_8_SQLLogins.ps1 -PublishSQLLogins $PublishSQLLogins - -#------------------------------------------------------------------------------------------------------------ -# Data Factory & Synapse Artefacts and Samplefiles -#------------------------------------------------------------------------------------------------------------ - -if($PerformPostIACPublishing -eq $false) { - Write-Host "Skipping Post IAC Publishing" -} -else { - ./Deploy_4_PrivateLinks.ps1 - ./Deploy_5_WebApp.ps1 - ./Deploy_6_FuncApp.ps1 - ./Deploy_7_MetadataDB.ps1 -publish_metadata_database $PublishMetadataDatabase - ./Deploy_9_DataFactory.ps1 - ./Deploy_10_SampleFiles.ps1 -} - - -#---------------------------------------------------------------------------------------------------------------- -# Set up Purview -#---------------------------------------------------------------------------------------------------------------- -# This is a WIP - not recommended to use for standard user -#---------------------------------------------------------------------------------------------------------------- -# -if($skipConfigurePurview) { - Write-Host "Skipping experimental Purview Configuration" -} -else { - Write-Host "Running Purview Configuration (experimental) Script" - Set-Location $deploymentFolderPath - Invoke-Expression ./ConfigureAzurePurview.ps1 -} - - -#---------------------------------------------------------------------------------------------------------------- -# Deploy Functional Tests -#---------------------------------------------------------------------------------------------------------------- -# This is for development purposes primarily - If using, understand these may not be all working with most recent platform version as tests can become outdated / missing new required fields. -#---------------------------------------------------------------------------------------------------------------- -if($skipFunctionalTests) { - Write-Host "Skipping Functional Tests Upload" -} -else { - Write-Host "Deploying Functional Tests to Web App" - Set-Location $deploymentFolderPath - Invoke-Expression ./GenerateAndUploadFunctionalTests.ps1 -} - - +Set-Location $deploymentFolderPath +Set-Location ./terraform_layer2 +./02-publish.ps1 Set-Location $deploymentFolderPath Write-Host "Finished" \ No newline at end of file diff --git a/solution/DeploymentV2/Prepare.ps1 b/solution/DeploymentV2/Prepare.ps1 index 56132aeb..e0f0292d 100644 --- a/solution/DeploymentV2/Prepare.ps1 +++ b/solution/DeploymentV2/Prepare.ps1 @@ -159,7 +159,7 @@ else $assigneeobject = Read-Host "Enter the object id of the AAD account or Group that you would like to have ownership of the new resource group." - $sqlAdmin = Read-Host "Enter the object id of the AAD account or Group that you would like to have SQL AAD Admin on the Azure SQL Server instances created. Leave blank if this is an end-to-end interactive user deployment. Provide a security group or the deployment service principal if this is an agent deployment" + $sqlAdmin = Read-Host "Enter the object id of the AAD account or Group that you would like to have SQL AAD Admin on the Azure SQL Server instances created." if([string]::IsNullOrEmpty($assigneeobject -eq $false)) { #Write-Host "Skipping Resource Group Ownership Assignment" @@ -205,10 +205,7 @@ else Write-Host " - domain = " -NoNewline -ForegroundColor green Write-Host "${env:TF_VAR_domain}"; Write-Host " "; - Write-Host "NOTE: It is recommended you copy these into your terraform/vars/local/terragrunt.hcl file for future use" -ForegroundColor blue - Write-Host " " - Write-Host "If you are creating a local development instance only, you can run ./Deploy.ps1 now" -ForegroundColor green - Write-Host " " + Write-Host "NOTE: It is recommended you copy these into your environment/vars/local/common_vars_values.jsonc file for future use" -ForegroundColor blue Write-Host "Press any key to continue..."; #------------------------------------------------------------------------------------------------------------ # Pause incase this was run directly @@ -263,6 +260,15 @@ else $userPrincipalName = "sql_aad_admin" $common_vars_values.azure_sql_aad_administrators.$userPrincipalName = $sqlAdmin } + + + $ResetFlags = Get-SelectionFromUser -Options ('Yes','No') -Prompt "Reset flags for is_onprem_datafactory_ir_registered and deployment_layer3_complete. For brand new deployment select 'Yes'." + if ($ResetFlags -eq "Yes") + { + $common_vars_values.FeatureTemplateOverrides.is_onprem_datafactory_ir_registered = $false + $common_vars_values.FeatureTemplateOverrides.deployment_layer3_complete = $false + } + $common_vars_values | Convertto-Json -Depth 10 | Set-Content ./environments/vars/$environmentName/common_vars_values.jsonc @@ -288,7 +294,9 @@ else } } - +Write-Host "Prepare Complete...." +Write-Host "If you are creating a local development instance only, you can run ./Deploy.ps1 now" -ForegroundColor green +Write-Host " " Set-Location $deploymentFolderPath diff --git a/solution/DeploymentV2/environments/vars/admz/common_vars_values.jsonc b/solution/DeploymentV2/environments/vars/admz/common_vars_values.jsonc index 2962ba5f..a51fe840 100644 --- a/solution/DeploymentV2/environments/vars/admz/common_vars_values.jsonc +++ b/solution/DeploymentV2/environments/vars/admz/common_vars_values.jsonc @@ -19,6 +19,7 @@ "ARM_KEYVAULT_NAME": "ads-dev-kv-ads-khpv", "ARM_DATALAKE_NAME": "adsdevdlsadskhpvadsl", "FeatureTemplateOverrides": { - "is_onprem_datafactory_ir_registered": false + "is_onprem_datafactory_ir_registered": false, + "deployment_layer3_complete": false } } diff --git a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet index c7a650be..7bda3cc4 100644 --- a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet +++ b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet @@ -396,3 +396,131 @@ local SecretFileSensitiveVars = { // Object comprehension. "ForSecretFile": SecretFileVars, "ForSecretFileSensitive": SecretFileSensitiveVars } + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/solution/DeploymentV2/environments/vars/local/common_vars_values.jsonc b/solution/DeploymentV2/environments/vars/local/common_vars_values.jsonc index 85927813..d8058733 100644 --- a/solution/DeploymentV2/environments/vars/local/common_vars_values.jsonc +++ b/solution/DeploymentV2/environments/vars/local/common_vars_values.jsonc @@ -17,6 +17,7 @@ "ARM_KEYVAULT_NAME": "ads-dev-kv-ads-khpv", //Required by deployment script (pwsh) to openfirewall and provide access.. note that this will not be known until first deployment "ARM_DATALAKE_NAME": "adsdevdlsadskhpvadsl", //Required by deployment script (pwsh) to openfirewall and provide access.. note that this will not be known until first deployment "FeatureTemplateOverrides": { - "is_onprem_datafactory_ir_registered":false + "is_onprem_datafactory_ir_registered": false, + "deployment_layer3_complete": false } } \ No newline at end of file diff --git a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc index 03b9c526..82f8ae23 100644 --- a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc +++ b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc @@ -6,7 +6,7 @@ "domain": "microsoft.com", "subscription_id": "035a1364-f00d-48e2-b582-4fe125905ee3", "prefix": "ads", - "resource_group_name": "gfth", + "resource_group_name": "gfh5", "ip_address": "144.138.148.220", "ip_address2": "144.138.148.220", "tenant_id": "72f988bf-86f1-41af-91ab-2d7cd011db47", @@ -19,15 +19,15 @@ "sql_aad_admin": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578" }, "azure_purview_data_curators": { - "hugo": "ed0568a1-1ee0-46ee-b7e8-0540c4660de2" + "john": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578" }, "synapse_publishers": {}, "synapse_contributors": {}, "WEB_APP_ADMIN_USER": "ccbdbba4-669c-48d6-86b8-75c9ab2ee578", "WEB_APP_ADMIN_SECURITY_GROUP": "#####", - "ARM_SYNAPSE_WORKSPACE_NAME": "adsstgsynwadsxu3g", - "ARM_KEYVAULT_NAME": "ads-stg-kv-ads-xu3g", - "ARM_DATALAKE_NAME": "adsstgdlsadsxu3gadsl", + "ARM_SYNAPSE_WORKSPACE_NAME": "adsstgsynwadss1zi", + "ARM_KEYVAULT_NAME": "ads-stg-kv-ads-s1zi", + "ARM_DATALAKE_NAME": "adsstgdlsadss1ziadsl", "ARM_PAL_PARTNER_ID": "0", "GIT_REPOSITORY_NAME": "testLockbox", "GIT_SYNAPSE_REPOSITORY_BRANCH_NAME": "featuretest", @@ -39,6 +39,7 @@ "GIT_ADF_EMAIL_ADDRESS": "#####", "FeatureTemplateOverrides": { "is_onprem_datafactory_ir_registered": false, + "deployment_layer3_complete": true, "synapse_git_toggle": false, "adf_git_toggle": false, "deploy_sentinel": false, diff --git a/solution/DeploymentV2/environments/vars/uat/common_vars_values.jsonc b/solution/DeploymentV2/environments/vars/uat/common_vars_values.jsonc index a51f5569..3b1b485c 100644 --- a/solution/DeploymentV2/environments/vars/uat/common_vars_values.jsonc +++ b/solution/DeploymentV2/environments/vars/uat/common_vars_values.jsonc @@ -40,6 +40,7 @@ "GIT_ADF_USER_NAME": "#####", "GIT_ADF_EMAIL_ADDRESS": "#####", "FeatureTemplateOverrides": { - "is_onprem_datafactory_ir_registered":true + "is_onprem_datafactory_ir_registered": false, + "deployment_layer3_complete": false } } diff --git a/solution/DeploymentV2/terraform_layer1/01-deploy.ps1 b/solution/DeploymentV2/terraform_layer1/01-deploy.ps1 index a7641c99..e919b749 100644 --- a/solution/DeploymentV2/terraform_layer1/01-deploy.ps1 +++ b/solution/DeploymentV2/terraform_layer1/01-deploy.ps1 @@ -43,5 +43,20 @@ PrepareDeployment -gitDeploy $gitDeploy -deploymentFolderPath $deploymentFolderP # Main Terraform - Layer1 #------------------------------------------------------------------------------------------------------------ Write-Host "Starting Terraform Deployment- Layer 1" -terragrunt init --terragrunt-config vars/$env:environmentName/terragrunt.hcl -reconfigure -terragrunt apply -auto-approve --terragrunt-config vars/$env:environmentName/terragrunt.hcl +$output = terragrunt init --terragrunt-config vars/$env:environmentName/terragrunt.hcl -reconfigure +$output = terragrunt apply -auto-approve --terragrunt-config vars/$env:environmentName/terragrunt.hcl -json + +$warnings = ($output | ConvertFrom-Json -Depth 20) | Where-Object {$_."@level" -eq "warn"} +$errors = ($output | ConvertFrom-Json -Depth 20) | Where-Object {$_."@level" -eq "error"} +if($warnings.count -gt 0) +{ + Write-Host "---------------------Terraform Warnings-----------------------------------------------------------" + foreach($o in $warnings) {Write-Warning ($o."@message" + "; Address:" + $o.diagnostic.address + "; Detail:" + $o.diagnostic.detail)} + Write-Host "--------------------------------------------------------------------------------------------------" +} +if($errors.count -gt 0) +{ + Write-Host "---------------------Terraform Errors-------------------------------------------------------------" + foreach($o in $errors) {Write-Error ($o."@message" + "; Address:" + $o.diagnostic.address + "; Detail:" + $o.diagnostic.detail)} + Write-Host "--------------------------------------------------------------------------------------------------" +} \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer2/02-deploy.ps1 b/solution/DeploymentV2/terraform_layer2/02-deploy.ps1 index ff7c6f1f..8ca06641 100644 --- a/solution/DeploymentV2/terraform_layer2/02-deploy.ps1 +++ b/solution/DeploymentV2/terraform_layer2/02-deploy.ps1 @@ -43,5 +43,32 @@ PrepareDeployment -gitDeploy $gitDeploy -deploymentFolderPath $deploymentFolderP # Main Terraform - Layer1 #------------------------------------------------------------------------------------------------------------ Write-Host "Starting Terraform Deployment- Layer 2" -terragrunt init --terragrunt-config vars/$env:environmentName/terragrunt.hcl -reconfigure -terragrunt apply -auto-approve --terragrunt-config vars/$env:environmentName/terragrunt.hcl +$output = terragrunt init --terragrunt-config vars/$env:environmentName/terragrunt.hcl -reconfigure +$output = terragrunt apply -auto-approve --terragrunt-config vars/$env:environmentName/terragrunt.hcl -json #-var synapse_sql_password=$env:TF_VAR_synapse_sql_password + +$warnings = ($output | ConvertFrom-Json -Depth 20) | Where-Object {$_."@level" -eq "warn"} +$errors = ($output | ConvertFrom-Json -Depth 20) | Where-Object {$_."@level" -eq "error"} +if($warnings.count -gt 0) +{ + Write-Host "---------------------Terraform Warnings-----------------------------------------------------------" + foreach($o in $warnings) {Write-Warning ($o."@message" + "; Address:" + $o.diagnostic.address + "; Detail:" + $o.diagnostic.detail)} + Write-Host "--------------------------------------------------------------------------------------------------" +} +if($errors.count -gt 0) +{ + Write-Host "---------------------Terraform Errors-------------------------------------------------------------" + foreach($o in $errors) {Write-Error ($o."@message" + "; Address:" + $o.diagnostic.address + "; Detail:" + $o.diagnostic.detail)} + Write-Host "--------------------------------------------------------------------------------------------------" +} + +#Update Values for variables in Environment +$tout_raw = ((az storage blob download -c "tstate" -n "terraform_layer2.tfstate" --account-name $env:TF_VAR_state_storage_account_name --auth-mode login) | ConvertFrom-Json).outputs + +$envFolderPath = Convert-Path -Path ($deploymentFolderPath + "./environments/vars/$env:environmentName/") +$varsfile = $envFolderPath + "/common_vars_values.jsonc" +$common_vars_values = Get-Content $varsfile | ConvertFrom-Json -Depth 10 +$common_vars_values.ARM_DATALAKE_NAME = $tout_raw.adlsstorage_name.value +$common_vars_values.ARM_KEYVAULT_NAME = $tout_raw.keyvault_name.value +$common_vars_values.ARM_SYNAPSE_WORKSPACE_NAME = $tout_raw.synapse_workspace_name.value +$common_vars_values | Convertto-Json -Depth 10 | Set-Content $varsfile + diff --git a/solution/DeploymentV2/terraform_layer2/02-publish.ps1 b/solution/DeploymentV2/terraform_layer2/02-publish.ps1 index 962ac71c..68cf6307 100644 --- a/solution/DeploymentV2/terraform_layer2/02-publish.ps1 +++ b/solution/DeploymentV2/terraform_layer2/02-publish.ps1 @@ -22,6 +22,8 @@ param ( [string]$FeatureTemplate="" ) + + #------------------------------------------------------------------------------------------------------------ # Module Imports #Mandatory #------------------------------------------------------------------------------------------------------------ @@ -40,6 +42,12 @@ $ipaddress2 = $env:TF_VAR_ip_address2 PrepareDeployment -gitDeploy $gitDeploy -deploymentFolderPath $deploymentFolderPath -FeatureTemplate $FeatureTemplate -PathToReturnTo $PathToReturnTo +if($env:TF_VAR_deployment_layer3_complete -eq $false -or $null -eq $env:TF_VAR_deployment_layer3_complete) +{ + Write-Error "Layer 3 Deployment is not complete. Code will now exit. Run terraform layer 3 for this deployment before running this layer (layer two) again." + exit +} + #------------------------------------------------------------------------------------------------------------ # Get Outputs #Mandatory #------------------------------------------------------------------------------------------------------------ @@ -67,4 +75,35 @@ import-Module ./../pwshmodules/Deploy_10_SampleFiles.psm1 -force DeploySampleFiles -tout $tout -deploymentFolderPath $deploymentFolderPath -PathToReturnTo $PathToReturnTo #import-Module ./../pwshmodules/ConfigureAzurePurview.psm1 -force -#ConfigureAzurePurview -tout $tout \ No newline at end of file +#ConfigureAzurePurview -tout $tout + + +#---------------------------------------------------------------------------------------------------------------- +# Set up Purview +#---------------------------------------------------------------------------------------------------------------- +# This is a WIP - not recommended to use for standard user +#---------------------------------------------------------------------------------------------------------------- +# +if($skipConfigurePurview -or $null -eq $skipConfigurePurview) { + Write-Host "Skipping experimental Purview Configuration" +} +else { + Write-Host "Running Purview Configuration (experimental) Script" + Set-Location $deploymentFolderPath + Invoke-Expression ./ConfigureAzurePurview.ps1 +} + + +#---------------------------------------------------------------------------------------------------------------- +# Deploy Functional Tests +#---------------------------------------------------------------------------------------------------------------- +# This is for development purposes primarily - If using, understand these may not be all working with most recent platform version as tests can become outdated / missing new required fields. +#---------------------------------------------------------------------------------------------------------------- +if($skipFunctionalTests -or $null -eq $skipFunctionalTests) { + Write-Host "Skipping Functional Tests Upload" +} +else { + Write-Host "Deploying Functional Tests to Web App" + Set-Location $deploymentFolderPath + Invoke-Expression ./GenerateAndUploadFunctionalTests.ps1 +} \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer2/layer1.tf b/solution/DeploymentV2/terraform_layer2/layer1.tf index a938ebce..acd8c874 100644 --- a/solution/DeploymentV2/terraform_layer2/layer1.tf +++ b/solution/DeploymentV2/terraform_layer2/layer1.tf @@ -7,7 +7,7 @@ config = { container_name = "tstate" key = "terraform_layer1.tfstate" - resource_group_name = "gfth" - storage_account_name = "gfthstate" + resource_group_name = "gfh5" + storage_account_name = "gfh5state" } } diff --git a/solution/DeploymentV2/terraform_layer2/synapse.tf b/solution/DeploymentV2/terraform_layer2/synapse.tf index 17931aa6..0fe37956 100644 --- a/solution/DeploymentV2/terraform_layer2/synapse.tf +++ b/solution/DeploymentV2/terraform_layer2/synapse.tf @@ -148,7 +148,7 @@ resource "azurerm_synapse_firewall_rule" "public_access" { } resource "time_sleep" "azurerm_synapse_firewall_rule_wait_30_seconds_cicd" { - depends_on = [] + depends_on = [azurerm_synapse_firewall_rule.cicd, azurerm_synapse_firewall_rule.cicd_user] create_duration = "30s" } diff --git a/solution/DeploymentV2/terraform_layer3/03-deploy.ps1 b/solution/DeploymentV2/terraform_layer3/03-deploy.ps1 index ac9999a2..fac47fcc 100644 --- a/solution/DeploymentV2/terraform_layer3/03-deploy.ps1 +++ b/solution/DeploymentV2/terraform_layer3/03-deploy.ps1 @@ -43,6 +43,9 @@ PrepareDeployment -gitDeploy $gitDeploy -deploymentFolderPath $deploymentFolderP #Check to make sure that purview account is fully deployed $pname = ((az storage blob download -c "tstate" -n "terraform_layer2.tfstate" --account-name $env:TF_VAR_state_storage_account_name --auth-mode login) | ConvertFrom-Json).outputs.purview_name $pstate = (az purview account show --name $pname.value -g $env:TF_VAR_resource_group_name | ConvertFrom-Json -Depth 10).provisioningState + +#az purview account show --name $pname.value -g $env:TF_VAR_resource_group_name + if($pstate -ne "Succeeded") { Write-Error "Purview account has not yet completed provisioning - Wait For completion and then retry" diff --git a/solution/DeploymentV2/terraform_layer3/03-publish.ps1 b/solution/DeploymentV2/terraform_layer3/03-publish.ps1 index c0894384..d7a34204 100644 --- a/solution/DeploymentV2/terraform_layer3/03-publish.ps1 +++ b/solution/DeploymentV2/terraform_layer3/03-publish.ps1 @@ -46,4 +46,12 @@ PrepareDeployment -gitDeploy $gitDeploy -deploymentFolderPath $deploymentFolderP $tout = GatherOutputsFromTerraform -TerraformFolderPath $PathToReturnTo ./database.ps1 -tout $tout -deploymentFolderPath $deploymentFolderPath -PathToReturnTo $PathToReturnTo -PublishSQLLogins $true -./app_service.ps1 -aad_webreg_id $tout.aad_webreg_id \ No newline at end of file +./app_service.ps1 -aad_webreg_id $tout.aad_webreg_id + +#Flip Flag on deployment_layer3_complete +$envFolderPath = Convert-Path -Path ($deploymentFolderPath + "./environments/vars/$env:environmentName/") +$varsfile = $envFolderPath + "/common_vars_values.jsonc" +$common_vars_values = Get-Content $varsfile | ConvertFrom-Json -Depth 10 +$common_vars_values.FeatureTemplateOverrides.deployment_layer3_complete = $true +$common_vars_values | Convertto-Json -Depth 10 | Set-Content $varsfile + \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer3/layer2.tf b/solution/DeploymentV2/terraform_layer3/layer2.tf index a20c9e0b..b93ecd9e 100644 --- a/solution/DeploymentV2/terraform_layer3/layer2.tf +++ b/solution/DeploymentV2/terraform_layer3/layer2.tf @@ -7,7 +7,7 @@ config = { container_name = "tstate" key = "terraform_layer2.tfstate" - resource_group_name = "gfth" - storage_account_name = "gfthstate" + resource_group_name = "gfh5" + storage_account_name = "gfh5state" } } diff --git a/solution/DeploymentV2/terraform_layer3/purview.tf b/solution/DeploymentV2/terraform_layer3/purview.tf index a230f44f..0de62a67 100644 --- a/solution/DeploymentV2/terraform_layer3/purview.tf +++ b/solution/DeploymentV2/terraform_layer3/purview.tf @@ -78,4 +78,8 @@ module "purview_ingestion_private_endpoints" { managed_resource_group_name = local.purview_resource_group_name name_suffix = random_id.rg_deployment_unique.id subscription_id = var.subscription_id + + depends_on = [ + azurerm_private_endpoint.purview_account_private_endpoint_with_dns,azurerm_private_endpoint.purview_portal_private_endpoint_with_dns + ] } From e8b5dd3f6bbda56fe804de61effa7101dc3b7717 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Sat, 13 Aug 2022 18:04:08 +0800 Subject: [PATCH 122/151] Added Terraform Output processing function --- .../featuretemplates/functional_tests.jsonc | 5 +- .../vars/PreprocessEnvironment.ps1 | 6 +- .../vars/common_vars_template.jsonnet | 122 ++++++++++++++++++ .../vars/staging/common_vars_values.jsonc | 3 +- .../pwshmodules/Deploy_10_SampleFiles.psm1 | 16 +-- .../pwshmodules/Deploy_4_PrivateLinks.psm1 | 20 +-- .../pwshmodules/Deploy_5_WebApp.psm1 | 2 +- .../pwshmodules/Deploy_6_FuncApp.psm1 | 6 +- .../pwshmodules/Deploy_7_MetadataDB.psm1 | 8 +- .../pwshmodules/Deploy_9_DataFactory.psm1 | 4 +- .../GatherOutputsFromTerraform.psm1 | 8 +- .../pwshmodules/ProcessTerraformApply.psm1 | 23 ++++ .../terraform_layer1/01-deploy.ps1 | 18 +-- .../terraform_layer2/02-deploy.ps1 | 28 ++-- .../DeploymentV2/terraform_layer2/database.tf | 2 +- .../terraform_layer2/virtual_machines.tf | 6 +- .../terraform_layer3/03-deploy.ps1 | 5 +- .../terraform_layer3/database.ps1 | 10 +- .../Services/AzureSynapseService.cs | 2 +- solution/Synapse/Patterns/uploadNotebooks.ps1 | 4 +- 20 files changed, 217 insertions(+), 81 deletions(-) create mode 100644 solution/DeploymentV2/pwshmodules/ProcessTerraformApply.psm1 diff --git a/solution/DeploymentV2/environments/featuretemplates/functional_tests.jsonc b/solution/DeploymentV2/environments/featuretemplates/functional_tests.jsonc index a11a262a..b520abaf 100644 --- a/solution/DeploymentV2/environments/featuretemplates/functional_tests.jsonc +++ b/solution/DeploymentV2/environments/featuretemplates/functional_tests.jsonc @@ -16,7 +16,8 @@ {"Name":"configure_networking","Value":true}, {"Name":"publish_datafactory_pipelines","Value":true}, {"Name":"publish_web_app_addcurrentuserasadmin","Value":true}, + {"Name":"publish_sif_database","Value":true}, {"Name":"deploy_selfhostedsql","Value":true}, - {"Name":"is_onprem_datafactory_ir_registered","Value":false}, - {"Name":"publish_sif_database","Value":true} + {"Name":"is_onprem_datafactory_ir_registered","Value":false}, + {"Name":"publish_sif_database","Value":true} ] \ No newline at end of file diff --git a/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 b/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 index 63346364..716fb799 100644 --- a/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 +++ b/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 @@ -44,7 +44,7 @@ $str = $str + "/*DONOTREMOVETHISCOMMENT:ENVS*/" ($cvjnss[0] + $Str + $cvjnss[2]) | Set-Content "./common_vars_template.jsonnet" -Write-Host "Preparing Environment: $Environment Using $FeatureTemplate Template" +Write-Debug "Preparing Environment: $Environment Using $FeatureTemplate Template" #Prep Output Folder $newfolder = "./../../bin/environments/$Environment/" @@ -86,11 +86,11 @@ foreach($fto in $fto_keys) { $ev_prop = ($envarprops | Where-Object {$_.Name -eq "TF_VAR_$Name"}).Name $Value = $fto_vals.$Name - Write-Warning "Overriding Feature Template value for $ev_prop with value of $Value" + Write-Debug "Overriding Feature Template value for $ev_prop with value of $Value" } else { $ev_prop = "TF_VAR_$Name" - Write-Warning "Inserting Feature Template value for $ev_prop with value of $Value" + Write-Debug "Inserting Feature Template value for $ev_prop with value of $Value" $Value = $fto_vals.$Name } diff --git a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet index 7bda3cc4..3221f023 100644 --- a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet +++ b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet @@ -504,6 +504,128 @@ local SecretFileSensitiveVars = { // Object comprehension. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc index 82f8ae23..f8bedb50 100644 --- a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc +++ b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc @@ -42,8 +42,7 @@ "deployment_layer3_complete": true, "synapse_git_toggle": false, "adf_git_toggle": false, - "deploy_sentinel": false, - "publish_sif_database": false, + "deploy_sentinel": false, "synapse_git_toggle_integration": false, "synapse_git_repository_owner": "h-sha", "synapse_git_repository_name": "testLockbox", diff --git a/solution/DeploymentV2/pwshmodules/Deploy_10_SampleFiles.psm1 b/solution/DeploymentV2/pwshmodules/Deploy_10_SampleFiles.psm1 index b9e34ebe..f8bec577 100644 --- a/solution/DeploymentV2/pwshmodules/Deploy_10_SampleFiles.psm1 +++ b/solution/DeploymentV2/pwshmodules/Deploy_10_SampleFiles.psm1 @@ -22,19 +22,19 @@ function DeploySampleFiles ( Set-Location "../SampleFiles/" Write-Host "Deploying Sample files" if ($tout.is_vnet_isolated -eq $true) { - $result = az storage account update --resource-group $tout.resource_group_name --name $tout.adlsstorage_name --default-action Allow + $result = az storage account update --resource-group $tout.resource_group_name --name $tout.adlsstorage_name --default-action Allow --only-show-errors } - $result = az storage container create --name "datalakelanding" --account-name $tout.adlsstorage_name --auth-mode login - $result = az storage container create --name "datalakeraw" --account-name $tout.adlsstorage_name --auth-mode login - $result = az storage container create --name "datalakeraw" --account-name $tout.blobstorage_name --auth-mode login - $result = az storage container create --name "transientin" --account-name $tout.blobstorage_name --auth-mode login + $result = az storage container create --name "datalakelanding" --account-name $tout.adlsstorage_name --auth-mode login --only-show-errors + $result = az storage container create --name "datalakeraw" --account-name $tout.adlsstorage_name --auth-mode login --only-show-errors + $result = az storage container create --name "datalakeraw" --account-name $tout.blobstorage_name --auth-mode login --only-show-errors + $result = az storage container create --name "transientin" --account-name $tout.blobstorage_name --auth-mode login --only-show-errors - $result = az storage blob upload-batch --overwrite --destination "datalakeraw" --account-name $tout.adlsstorage_name --source ./ --destination-path samples/ --auth-mode login - $result = az storage blob upload-batch --overwrite --destination "datalakeraw" --account-name $tout.blobstorage_name --source ./ --destination-path samples/ --auth-mode login + $result = az storage blob upload-batch --overwrite --destination "datalakeraw" --account-name $tout.adlsstorage_name --source ./ --destination-path samples/ --auth-mode login --only-show-errors + $result = az storage blob upload-batch --overwrite --destination "datalakeraw" --account-name $tout.blobstorage_name --source ./ --destination-path samples/ --auth-mode login --only-show-errors if ($tout.is_vnet_isolated -eq $true) { - $result = az storage account update --resource-group $tout.resource_group_name --name $tout.adlsstorage_name --default-action Deny + $result = az storage account update --resource-group $tout.resource_group_name --name $tout.adlsstorage_name --default-action Deny --only-show-errors } Set-Location $deploymentFolderPath diff --git a/solution/DeploymentV2/pwshmodules/Deploy_4_PrivateLinks.psm1 b/solution/DeploymentV2/pwshmodules/Deploy_4_PrivateLinks.psm1 index 31afb080..3ea22047 100644 --- a/solution/DeploymentV2/pwshmodules/Deploy_4_PrivateLinks.psm1 +++ b/solution/DeploymentV2/pwshmodules/Deploy_4_PrivateLinks.psm1 @@ -12,46 +12,46 @@ function DeployPrivateLinks ( # Approve the Private Link Connections that get generated from the Managed Private Links in ADF #------------------------------------------------------------------------------------------------------------ Write-Host "Approving Private Link Connections" - $links = az network private-endpoint-connection list -g $tout.resource_group_name -n $tout.keyvault_name --type 'Microsoft.KeyVault/vaults' | ConvertFrom-Json + $links = az network private-endpoint-connection list -g $tout.resource_group_name -n $tout.keyvault_name --type 'Microsoft.KeyVault/vaults' --only-show-errors | ConvertFrom-Json foreach ($link in $links) { if ($link.properties.privateLinkServiceConnectionState.status -eq "Pending") { $id_parts = $link.id.Split("/"); Write-Host "- " + $id_parts[$id_parts.length - 1] - $result = az network private-endpoint-connection approve -g $tout.resource_group_name -n $id_parts[$id_parts.length - 1] --resource-name $tout.keyvault_name --type Microsoft.Keyvault/vaults --description "Approved by Deploy.ps1" + $result = az network private-endpoint-connection approve -g $tout.resource_group_name -n $id_parts[$id_parts.length - 1] --resource-name $tout.keyvault_name --type Microsoft.Keyvault/vaults --description "Approved by Deploy.ps1" --only-show-errors } } - $links = az network private-endpoint-connection list -g $tout.resource_group_name -n $tout.sqlserver_name --type 'Microsoft.Sql/servers' | ConvertFrom-Json + $links = az network private-endpoint-connection list -g $tout.resource_group_name -n $tout.sqlserver_name --type 'Microsoft.Sql/servers' --only-show-errors | ConvertFrom-Json foreach ($link in $links) { if ($link.properties.privateLinkServiceConnectionState.status -eq "Pending") { $id_parts = $link.id.Split("/"); Write-Host "- " + $id_parts[$id_parts.length - 1] - $result = az network private-endpoint-connection approve -g $tout.resource_group_name -n $id_parts[$id_parts.length - 1] --resource-name $tout.sqlserver_name --type Microsoft.Sql/servers --description "Approved by Deploy.ps1" + $result = az network private-endpoint-connection approve -g $tout.resource_group_name -n $id_parts[$id_parts.length - 1] --resource-name $tout.sqlserver_name --type Microsoft.Sql/servers --description "Approved by Deploy.ps1" --only-show-errors } } - $links = az network private-endpoint-connection list -g $tout.resource_group_name -n $tout.synapse_workspace_name --type 'Microsoft.Synapse/workspaces' | ConvertFrom-Json + $links = az network private-endpoint-connection list -g $tout.resource_group_name -n $tout.synapse_workspace_name --type 'Microsoft.Synapse/workspaces' --only-show-errors | ConvertFrom-Json foreach ($link in $links) { if ($link.properties.privateLinkServiceConnectionState.status -eq "Pending") { $id_parts = $link.id.Split("/"); Write-Host "- " + $id_parts[$id_parts.length - 1] - $result = az network private-endpoint-connection approve -g $tout.resource_group_name -n $id_parts[$id_parts.length - 1] --resource-name $tout.synapse_workspace_name --type Microsoft.Synapse/workspaces --description "Approved by Deploy.ps1" + $result = az network private-endpoint-connection approve -g $tout.resource_group_name -n $id_parts[$id_parts.length - 1] --resource-name $tout.synapse_workspace_name --type Microsoft.Synapse/workspaces --description "Approved by Deploy.ps1" --only-show-errors } } - $links = az network private-endpoint-connection list -g $tout.resource_group_name -n $tout.blobstorage_name --type 'Microsoft.Storage/storageAccounts' | ConvertFrom-Json + $links = az network private-endpoint-connection list -g $tout.resource_group_name -n $tout.blobstorage_name --type 'Microsoft.Storage/storageAccounts' --only-show-errors | ConvertFrom-Json foreach ($link in $links) { if ($link.properties.privateLinkServiceConnectionState.status -eq "Pending") { $id_parts = $link.id.Split("/"); Write-Host "- " + $id_parts[$id_parts.length - 1] - $result = az network private-endpoint-connection approve -g $tout.resource_group_name -n $id_parts[$id_parts.length - 1] --resource-name $tout.blobstorage_name --type Microsoft.Storage/storageAccounts --description "Approved by Deploy.ps1" + $result = az network private-endpoint-connection approve -g $tout.resource_group_name -n $id_parts[$id_parts.length - 1] --resource-name $tout.blobstorage_name --type Microsoft.Storage/storageAccounts --description "Approved by Deploy.ps1" --only-show-errors } } - $links = az network private-endpoint-connection list -g $tout.resource_group_name -n $tout.adlsstorage_name --type 'Microsoft.Storage/storageAccounts' | ConvertFrom-Json + $links = az network private-endpoint-connection list -g $tout.resource_group_name -n $tout.adlsstorage_name --type 'Microsoft.Storage/storageAccounts' --only-show-errors | ConvertFrom-Json foreach ($link in $links) { if ($link.properties.privateLinkServiceConnectionState.status -eq "Pending") { $id_parts = $link.id.Split("/"); Write-Host "- " + $id_parts[$id_parts.length - 1] - $result = az network private-endpoint-connection approve -g $tout.resource_group_name -n $id_parts[$id_parts.length - 1] --resource-name $tout.adlsstorage_name --type Microsoft.Storage/storageAccounts --description "Approved by Deploy.ps1" + $result = az network private-endpoint-connection approve -g $tout.resource_group_name -n $id_parts[$id_parts.length - 1] --resource-name $tout.adlsstorage_name --type Microsoft.Storage/storageAccounts --description "Approved by Deploy.ps1" --only-show-errors } } } diff --git a/solution/DeploymentV2/pwshmodules/Deploy_5_WebApp.psm1 b/solution/DeploymentV2/pwshmodules/Deploy_5_WebApp.psm1 index 10e122e4..2d03660b 100644 --- a/solution/DeploymentV2/pwshmodules/Deploy_5_WebApp.psm1 +++ b/solution/DeploymentV2/pwshmodules/Deploy_5_WebApp.psm1 @@ -29,7 +29,7 @@ function DeployWebApp ( $Path = $Path + "/Publish.zip" Compress-Archive -Path '.\unzipped\webapplication\*' -DestinationPath $Path -force - $result = az webapp deployment source config-zip --resource-group $tout.resource_group_name --name $tout.webapp_name --src $Path + $result = az webapp deployment source config-zip --resource-group $tout.resource_group_name --name $tout.webapp_name --src $Path --only-show-errors Set-Location $deploymentFolderPath diff --git a/solution/DeploymentV2/pwshmodules/Deploy_6_FuncApp.psm1 b/solution/DeploymentV2/pwshmodules/Deploy_6_FuncApp.psm1 index e9714e8f..e452afa4 100644 --- a/solution/DeploymentV2/pwshmodules/Deploy_6_FuncApp.psm1 +++ b/solution/DeploymentV2/pwshmodules/Deploy_6_FuncApp.psm1 @@ -28,11 +28,11 @@ function DeployFuncApp ( $Path = $Path + "/Publish.zip" Compress-Archive -Path '.\unzipped\functionapp\*' -DestinationPath $Path -force - $result = az functionapp deployment source config-zip --resource-group $tout.resource_group_name --name $tout.functionapp_name --src $Path + $result = az functionapp deployment source config-zip --resource-group $tout.resource_group_name --name $tout.functionapp_name --src $Path --only-show-errors #Make sure we are running V6.0 --TODO: Move this to terraform if possible -- This is now done! - $result = az functionapp config set --net-framework-version v6.0 -n $tout.functionapp_name -g $tout.resource_group_name - $result = az functionapp config appsettings set --name $tout.functionapp_name --resource-group $tout.resource_group_name --settings FUNCTIONS_EXTENSION_VERSION=~4 + $result = az functionapp config set --net-framework-version v6.0 -n $tout.functionapp_name -g $tout.resource_group_name --only-show-errors + $result = az functionapp config appsettings set --name $tout.functionapp_name --resource-group $tout.resource_group_name --settings FUNCTIONS_EXTENSION_VERSION=~4 --only-show-errors Set-Location $deploymentFolderPath diff --git a/solution/DeploymentV2/pwshmodules/Deploy_7_MetadataDB.psm1 b/solution/DeploymentV2/pwshmodules/Deploy_7_MetadataDB.psm1 index a871f9f8..5211476e 100644 --- a/solution/DeploymentV2/pwshmodules/Deploy_7_MetadataDB.psm1 +++ b/solution/DeploymentV2/pwshmodules/Deploy_7_MetadataDB.psm1 @@ -25,19 +25,19 @@ function DeployMataDataDB ( dotnet publish --no-restore --configuration Release --output '..\..\..\DeploymentV2\bin\publish\unzipped\database\' #Add Ip to SQL Firewall - $result = az sql server update -n $tout.sqlserver_name -g $tout.resource_group_name --set publicNetworkAccess="Enabled" + $result = az sql server update -n $tout.sqlserver_name -g $tout.resource_group_name --set publicNetworkAccess="Enabled" --only-show-errors $myIp = $env:TF_VAR_ip_address $myIp2 = $env:TF_VAR_ip_address2 if ($myIp -ne $null) { - $result = az sql server firewall-rule create -g $tout.resource_group_name -s $tout.sqlserver_name -n "CICDAgent" --start-ip-address $myIp --end-ip-address $myIp + $result = az sql server firewall-rule create -g $tout.resource_group_name -s $tout.sqlserver_name -n "CICDAgent" --start-ip-address $myIp --end-ip-address $myIp --only-show-errors } if ($myIp2 -ne $null) { - $result = az sql server firewall-rule create -g $tout.resource_group_name -s $tout.sqlserver_name -n "CICDUser" --start-ip-address $myIp2 --end-ip-address $myIp2 + $result = az sql server firewall-rule create -g $tout.resource_group_name -s $tout.sqlserver_name -n "CICDUser" --start-ip-address $myIp2 --end-ip-address $myIp2 --only-show-errors } #Allow Azure services and resources to access this server - $result = az sql server firewall-rule create -g $tout.resource_group_name -s $tout.sqlserver_name -n "Azure" --start-ip-address 0.0.0.0 --end-ip-address 0.0.0.0 + $result = az sql server firewall-rule create -g $tout.resource_group_name -s $tout.sqlserver_name -n "Azure" --start-ip-address 0.0.0.0 --end-ip-address 0.0.0.0 --only-show-errors Set-Location $deploymentFolderPath Set-Location ".\bin\publish\unzipped\database\" diff --git a/solution/DeploymentV2/pwshmodules/Deploy_9_DataFactory.psm1 b/solution/DeploymentV2/pwshmodules/Deploy_9_DataFactory.psm1 index b6333441..64e099fa 100644 --- a/solution/DeploymentV2/pwshmodules/Deploy_9_DataFactory.psm1 +++ b/solution/DeploymentV2/pwshmodules/Deploy_9_DataFactory.psm1 @@ -22,10 +22,10 @@ function DeployDataFactoryAndSynapseArtefacts ( $myIp2 = $env:TF_VAR_ip_address2 if ($myIp -ne $null) { - $result = az sql server firewall-rule create -g $tout.resource_group_name -s $tout.sqlserver_name -n "CICDAgent" --start-ip-address $myIp --end-ip-address $myIp + $result = az sql server firewall-rule create -g $tout.resource_group_name -s $tout.sqlserver_name -n "CICDAgent" --start-ip-address $myIp --end-ip-address $myIp --only-show-errors } if ($myIp2 -ne $null) { - $result = az sql server firewall-rule create -g $tout.resource_group_name -s $tout.sqlserver_name -n "CICDUser" --start-ip-address $myIp2 --end-ip-address $myIp2 + $result = az sql server firewall-rule create -g $tout.resource_group_name -s $tout.sqlserver_name -n "CICDUser" --start-ip-address $myIp2 --end-ip-address $myIp2 --only-show-errors } $SqlInstalled = Get-InstalledModule SqlServer diff --git a/solution/DeploymentV2/pwshmodules/GatherOutputsFromTerraform.psm1 b/solution/DeploymentV2/pwshmodules/GatherOutputsFromTerraform.psm1 index 6063517d..3eab9b5e 100644 --- a/solution/DeploymentV2/pwshmodules/GatherOutputsFromTerraform.psm1 +++ b/solution/DeploymentV2/pwshmodules/GatherOutputsFromTerraform.psm1 @@ -13,8 +13,8 @@ function GatherOutputsFromTerraform($TerraformFolderPath) #------------------------------------------------------------------------------------------------------------ # Get all the outputs from terraform so we can use them in subsequent steps #------------------------------------------------------------------------------------------------------------ - Write-Host "-------------------------------------------------------------------------------------------------" - Write-Host "Reading Terraform Outputs - Started" + Write-Debug "-------------------------------------------------------------------------------------------------" + Write-Debug "Reading Terraform Outputs - Started" $tout = New-Object PSObject @@ -27,8 +27,8 @@ function GatherOutputsFromTerraform($TerraformFolderPath) $tout | Add-Member -MemberType NoteProperty -Name "resource_group_id" -Value $rgid #Set-Location $CurrentFolderPath - Write-Host "Reading Terraform Outputs - Finished" - Write-Host "-------------------------------------------------------------------------------------------------" + Write-Debug "Reading Terraform Outputs - Finished" + Write-Debug "-------------------------------------------------------------------------------------------------" Set-Location $currentPath return $tout } \ No newline at end of file diff --git a/solution/DeploymentV2/pwshmodules/ProcessTerraformApply.psm1 b/solution/DeploymentV2/pwshmodules/ProcessTerraformApply.psm1 new file mode 100644 index 00000000..0d6d9746 --- /dev/null +++ b/solution/DeploymentV2/pwshmodules/ProcessTerraformApply.psm1 @@ -0,0 +1,23 @@ +function ProcessTerraformApply ( + [Parameter(Mandatory = $true)] + [System.Boolean]$gitDeploy = $false, + [Parameter(Mandatory = $true)] + [System.Object]$output +) { + + $warnings = ($output | ConvertFrom-Json -Depth 20) | Where-Object {$_."@level" -eq "warn"} + $errors = ($output | ConvertFrom-Json -Depth 20) | Where-Object {$_."@level" -eq "error"} + if($warnings.count -gt 0) + { + Write-Host "---------------------Terraform Warnings-----------------------------------------------------------" + foreach($o in $warnings) {Write-Warning ($o."@message" + "; Address:" + $o.diagnostic.address + "; Detail:" + $o.diagnostic.detail)} + Write-Host "--------------------------------------------------------------------------------------------------" + } + if($errors.count -gt 0) + { + Write-Host "---------------------Terraform Errors-------------------------------------------------------------" + foreach($o in $errors) {Write-Error ($o."@message" + "; Address:" + $o.diagnostic.address + "; Detail:" + $o.diagnostic.detail)} + Write-Host "--------------------------------------------------------------------------------------------------" + } + +} diff --git a/solution/DeploymentV2/terraform_layer1/01-deploy.ps1 b/solution/DeploymentV2/terraform_layer1/01-deploy.ps1 index e919b749..0453206b 100644 --- a/solution/DeploymentV2/terraform_layer1/01-deploy.ps1 +++ b/solution/DeploymentV2/terraform_layer1/01-deploy.ps1 @@ -27,6 +27,7 @@ param ( #------------------------------------------------------------------------------------------------------------ import-Module ./../pwshmodules/GatherOutputsFromTerraform.psm1 -force import-Module ./../pwshmodules/Deploy_0_Prep.psm1 -force +import-Module ./../pwshmodules/ProcessTerraformApply.psm1 -force #------------------------------------------------------------------------------------------------------------ # Preparation #Mandatory #------------------------------------------------------------------------------------------------------------ @@ -43,20 +44,9 @@ PrepareDeployment -gitDeploy $gitDeploy -deploymentFolderPath $deploymentFolderP # Main Terraform - Layer1 #------------------------------------------------------------------------------------------------------------ Write-Host "Starting Terraform Deployment- Layer 1" +Write-Host "Note this usually takes a few minutes to complete." $output = terragrunt init --terragrunt-config vars/$env:environmentName/terragrunt.hcl -reconfigure $output = terragrunt apply -auto-approve --terragrunt-config vars/$env:environmentName/terragrunt.hcl -json -$warnings = ($output | ConvertFrom-Json -Depth 20) | Where-Object {$_."@level" -eq "warn"} -$errors = ($output | ConvertFrom-Json -Depth 20) | Where-Object {$_."@level" -eq "error"} -if($warnings.count -gt 0) -{ - Write-Host "---------------------Terraform Warnings-----------------------------------------------------------" - foreach($o in $warnings) {Write-Warning ($o."@message" + "; Address:" + $o.diagnostic.address + "; Detail:" + $o.diagnostic.detail)} - Write-Host "--------------------------------------------------------------------------------------------------" -} -if($errors.count -gt 0) -{ - Write-Host "---------------------Terraform Errors-------------------------------------------------------------" - foreach($o in $errors) {Write-Error ($o."@message" + "; Address:" + $o.diagnostic.address + "; Detail:" + $o.diagnostic.detail)} - Write-Host "--------------------------------------------------------------------------------------------------" -} \ No newline at end of file +ProcessTerraformApply -output $output -gitDeploy $gitDeploy + diff --git a/solution/DeploymentV2/terraform_layer2/02-deploy.ps1 b/solution/DeploymentV2/terraform_layer2/02-deploy.ps1 index 8ca06641..d1f90e2e 100644 --- a/solution/DeploymentV2/terraform_layer2/02-deploy.ps1 +++ b/solution/DeploymentV2/terraform_layer2/02-deploy.ps1 @@ -27,6 +27,7 @@ param ( #------------------------------------------------------------------------------------------------------------ import-Module ./../pwshmodules/GatherOutputsFromTerraform.psm1 -force import-Module ./../pwshmodules/Deploy_0_Prep.psm1 -force +import-Module ./../pwshmodules/ProcessTerraformApply.psm1 -force #------------------------------------------------------------------------------------------------------------ # Preparation #Mandatory #------------------------------------------------------------------------------------------------------------ @@ -43,23 +44,22 @@ PrepareDeployment -gitDeploy $gitDeploy -deploymentFolderPath $deploymentFolderP # Main Terraform - Layer1 #------------------------------------------------------------------------------------------------------------ Write-Host "Starting Terraform Deployment- Layer 2" +Write-Host "Expect this to take 20-30 minutes to complete the first time it is run. Subsequent, incremental builds should only take a few minutes." +if([string]::IsNullOrEmpty($env:TF_VAR_jumphost_password) -and ($gitDeploy -eq $false -or $null -eq $gitdeploy)) +{ + $env:TF_VAR_jumphost_password = Read-Host "Enter the Jumphost Password" +} + +if([string]::IsNullOrEmpty($env:TF_VAR_synapse_sql_password) -and ($gitDeploy -eq $false -or $null -eq $gitdeploy)) +{ + $env:TF_VAR_synapse_sql_password = Read-Host "Enter the Synapse SQL Admin Password" +} + $output = terragrunt init --terragrunt-config vars/$env:environmentName/terragrunt.hcl -reconfigure $output = terragrunt apply -auto-approve --terragrunt-config vars/$env:environmentName/terragrunt.hcl -json #-var synapse_sql_password=$env:TF_VAR_synapse_sql_password -$warnings = ($output | ConvertFrom-Json -Depth 20) | Where-Object {$_."@level" -eq "warn"} -$errors = ($output | ConvertFrom-Json -Depth 20) | Where-Object {$_."@level" -eq "error"} -if($warnings.count -gt 0) -{ - Write-Host "---------------------Terraform Warnings-----------------------------------------------------------" - foreach($o in $warnings) {Write-Warning ($o."@message" + "; Address:" + $o.diagnostic.address + "; Detail:" + $o.diagnostic.detail)} - Write-Host "--------------------------------------------------------------------------------------------------" -} -if($errors.count -gt 0) -{ - Write-Host "---------------------Terraform Errors-------------------------------------------------------------" - foreach($o in $errors) {Write-Error ($o."@message" + "; Address:" + $o.diagnostic.address + "; Detail:" + $o.diagnostic.detail)} - Write-Host "--------------------------------------------------------------------------------------------------" -} +ProcessTerraformApply -output $output -gitDeploy $gitDeploy + #Update Values for variables in Environment $tout_raw = ((az storage blob download -c "tstate" -n "terraform_layer2.tfstate" --account-name $env:TF_VAR_state_storage_account_name --auth-mode login) | ConvertFrom-Json).outputs diff --git a/solution/DeploymentV2/terraform_layer2/database.tf b/solution/DeploymentV2/terraform_layer2/database.tf index 4c38c464..28a97e04 100644 --- a/solution/DeploymentV2/terraform_layer2/database.tf +++ b/solution/DeploymentV2/terraform_layer2/database.tf @@ -8,7 +8,7 @@ resource "random_password" "database" { min_special = 1 special = true lower = true - number = true + numeric = true upper = true } diff --git a/solution/DeploymentV2/terraform_layer2/virtual_machines.tf b/solution/DeploymentV2/terraform_layer2/virtual_machines.tf index 46b4dc8c..afdd2056 100644 --- a/solution/DeploymentV2/terraform_layer2/virtual_machines.tf +++ b/solution/DeploymentV2/terraform_layer2/virtual_machines.tf @@ -58,7 +58,7 @@ resource "random_password" "selfhostedsql" { min_special = 1 special = true lower = true - number = true + numeric = true upper = true } @@ -129,7 +129,7 @@ resource "random_password" "h2o-ai" { min_special = 1 special = true lower = true - number = true + numeric = true upper = true } @@ -204,7 +204,7 @@ resource "random_password" "custom_vm" { min_special = 1 special = true lower = true - number = true + numeric = true upper = true } diff --git a/solution/DeploymentV2/terraform_layer3/03-deploy.ps1 b/solution/DeploymentV2/terraform_layer3/03-deploy.ps1 index fac47fcc..f71fb35c 100644 --- a/solution/DeploymentV2/terraform_layer3/03-deploy.ps1 +++ b/solution/DeploymentV2/terraform_layer3/03-deploy.ps1 @@ -55,6 +55,7 @@ else { # Main Terraform - Layer1 #------------------------------------------------------------------------------------------------------------ Write-Host "Starting Terraform Deployment- Layer 3" - terragrunt init --terragrunt-config vars/$env:environmentName/terragrunt.hcl -reconfigure - terragrunt apply -auto-approve --terragrunt-config vars/$env:environmentName/terragrunt.hcl + $output = terragrunt init --terragrunt-config vars/$env:environmentName/terragrunt.hcl -reconfigure + $output = terragrunt apply -auto-approve --terragrunt-config vars/$env:environmentName/terragrunt.hcl -json + ProcessTerraformApply -output $output -gitDeploy $gitDeploy } diff --git a/solution/DeploymentV2/terraform_layer3/database.ps1 b/solution/DeploymentV2/terraform_layer3/database.ps1 index 26574801..f7e8ba50 100644 --- a/solution/DeploymentV2/terraform_layer3/database.ps1 +++ b/solution/DeploymentV2/terraform_layer3/database.ps1 @@ -30,10 +30,10 @@ else { $myIp2 = $env:TF_VAR_ip_address2 if ($myIp -ne $null) { - $result = az sql server firewall-rule create -g $tout.resource_group_name -s $tout.sqlserver_name -n "CICDAgent" --start-ip-address $myIp --end-ip-address $myIp + $result = az sql server firewall-rule create -g $tout.resource_group_name -s $tout.sqlserver_name -n "CICDAgent" --start-ip-address $myIp --end-ip-address $myIp --only-show-errors } if ($myIp2 -ne $null) { - $result = az sql server firewall-rule create -g $tout.resource_group_name -s $tout.sqlserver_name -n "CICDUser" --start-ip-address $myIp2 --end-ip-address $myIp2 + $result = az sql server firewall-rule create -g $tout.resource_group_name -s $tout.sqlserver_name -n "CICDUser" --start-ip-address $myIp2 --end-ip-address $myIp2 --only-show-errors } $databases = @($tout.stagingdb_name, $tout.sampledb_name, $tout.metadatadb_name) @@ -137,12 +137,12 @@ else { #Add Ip to SQL Firewall #$result = az synapse workspace update -n $synapse_workspace_name -g $resource_group_name --set publicNetworkAccess="Enabled" - $result = az synapse workspace firewall-rule create --resource-group $tout.resource_group_name --workspace-name $tout.synapse_workspace_name --name "CICDAgent" --start-ip-address $myIp --end-ip-address $myIp - $result = az synapse workspace firewall-rule create --resource-group $tout.resource_group_name --workspace-name $tout.synapse_workspace_name --name "CICDUser" --start-ip-address $myIp2 --end-ip-address $myIp2 + $result = az synapse workspace firewall-rule create --resource-group $tout.resource_group_name --workspace-name $tout.synapse_workspace_name --name "CICDAgent" --start-ip-address $myIp --end-ip-address $myIp --only-show-errors + $result = az synapse workspace firewall-rule create --resource-group $tout.resource_group_name --workspace-name $tout.synapse_workspace_name --name "CICDUser" --start-ip-address $myIp2 --end-ip-address $myIp2 --only-show-errors if ($tout.is_vnet_isolated -eq $false) { - $result = az synapse workspace firewall-rule create --resource-group $tout.resource_group_name --workspace-name $tout.synapse_workspace_name --name "AllowAllWindowsAzureIps" --start-ip-address "0.0.0.0" --end-ip-address "0.0.0.0" + $result = az synapse workspace firewall-rule create --resource-group $tout.resource_group_name --workspace-name $tout.synapse_workspace_name --name "AllowAllWindowsAzureIps" --start-ip-address "0.0.0.0" --end-ip-address "0.0.0.0" --only-show-errors } if([string]::IsNullOrEmpty($tout.synapse_sql_pool_name) ) diff --git a/solution/FunctionApp/FunctionApp/Services/AzureSynapseService.cs b/solution/FunctionApp/FunctionApp/Services/AzureSynapseService.cs index 85aceea4..3ba09198 100644 --- a/solution/FunctionApp/FunctionApp/Services/AzureSynapseService.cs +++ b/solution/FunctionApp/FunctionApp/Services/AzureSynapseService.cs @@ -156,7 +156,7 @@ namespace FunctionApp.Services } logging.LogWarning($"Task Named {taskName} Failed To Start. Result status was '{sneh.Sner.StatementResult}' Attempt Number {tryCount.ToString()}"); tryCount++; - await Task.Delay(45000); + await Task.Delay(20000); } if (success) { diff --git a/solution/Synapse/Patterns/uploadNotebooks.ps1 b/solution/Synapse/Patterns/uploadNotebooks.ps1 index 23c3aa5a..c35ad36e 100644 --- a/solution/Synapse/Patterns/uploadNotebooks.ps1 +++ b/solution/Synapse/Patterns/uploadNotebooks.ps1 @@ -10,7 +10,7 @@ $tests = (Get-ChildItem -Path ("../../Synapse/Patterns/notebook") -Verbose -Filt foreach ($test in $tests) { ($test | Get-Content) | Set-Content('FileForUpload.json') - $result = az synapse notebook import --workspace-name $tout.synapse_workspace_name --name $test.BaseName --file '@FileForUpload.json' --folder-path 'FrameworkNotebooks' + $result = az synapse notebook import --workspace-name $tout.synapse_workspace_name --name $test.BaseName --file '@FileForUpload.json' --folder-path 'FrameworkNotebooks' --only-show-errors Remove-Item FileForUpload.json } @@ -22,7 +22,7 @@ if ($tout.publish_sif_database) foreach ($test in $tests) { ($test | Get-Content) | Set-Content('FileForUpload.json') - $result = az synapse notebook import --workspace-name $tout.synapse_workspace_name --name $test.BaseName --file '@FileForUpload.json' --folder-path 'FrameworkNotebooks/sif' + $result = az synapse notebook import --workspace-name $tout.synapse_workspace_name --name $test.BaseName --file '@FileForUpload.json' --folder-path 'FrameworkNotebooks/sif' --only-show-errors Remove-Item FileForUpload.json } } From 0a22695ff09c9e444d95e6dc699e37671b08af3f Mon Sep 17 00:00:00 2001 From: John Rampono Date: Mon, 15 Aug 2022 10:25:31 +0800 Subject: [PATCH 123/151] Update README.md --- solution/DeploymentV2/README.md | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/solution/DeploymentV2/README.md b/solution/DeploymentV2/README.md index c8e7d3c6..fb9b3298 100644 --- a/solution/DeploymentV2/README.md +++ b/solution/DeploymentV2/README.md @@ -26,14 +26,21 @@ The purpose of this script is to prepare your Azure environment ready for the de ### :arrow_forward: How do I run this? Execute the following script file: ```./Prepare.ps1``` -When you execute the script it will ask you for two inputs: - - **Resource Group Name**: The resource group name to be created. If you skip this, only the providers will be registered - - **Storage Account Name** The storage account name for storing your terraform state. If you skip this, no storage account will be created +When you execute the script it will ask you for a number of inputs: +- **Deployment Environment**: The first step is to select which deployment environment you would like to deploy into. The specifics of each deployment environment are stored within json files located within the [./environments/vars/](./environments/vars/) directory. Within this directory there is a subdirectory for each environment file. The prepare script will gather information and update one of these envionment files. The specific file that will be update depends on which environment you choose at this point. +![image](https://user-images.githubusercontent.com/11702150/184566506-95b8d705-de58-4c2c-a2e2-5b8dfd855f7b.png) +- **Resource Group Name**: The resource group name to be created. +![image](https://user-images.githubusercontent.com/11702150/184566884-89671236-cbb6-441d-a6b5-f7390a44b78c.png) +- **Resource Provider Registration**: Select '1' (YES) to ensure that the pre-requisite resource providers have been enabled on your Azure subscription. +![image](https://user-images.githubusercontent.com/11702150/184566915-ad311bf1-59fc-4c1d-a94c-6d51c3b82101.png) +- **Resource Owner**: Select 'yes' to ensure that the pre-requisite reesource providers have been enabled on your Azure subscription. +- **SQL Server AAD Admin**: Select 'yes' to ensure that the pre-requisite reesource providers have been enabled on your Azure subscription. + At the end of the execution, you will be provided the details of what was performed as well as the resource & subscription details. These are pre-loaded into environment variables so that you can directly run the ./Deploy.ps1 without doing any manual entry. -To save you having to do more work later, I recommend that you copy them down and updatethe values directly into the following file: +To save you having to do more work later, I recommend that you copy them down and update the values directly into the following file: ```/azure-data-services-go-fast-codebase/solution/DeploymentV2/terraform/vars/local/terragrunt.hcl``` @@ -45,7 +52,7 @@ Before you run the **Deploy.ps1** script, make sure you have completed the pre-r - Run the Prepare.ps1 script first. This will prepare your azure subscription for deployment - Ensure that you have run az login and az account set - Ensure you have Owner access to the resource group you are planning on deploying into -- Ensure you have the Application Administrator role with Azure AD to allow you to create AAD app registrations +- Ensure you have the Application.ReadWrite.OwnedBy permission with Azure AD to allow you to create and manage AAD app registrations ### :grey_question: What does it do? This script will: From a92b327ca58b6fb26559d65f98f42a055552f9a2 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Mon, 15 Aug 2022 10:34:13 +0800 Subject: [PATCH 124/151] Update README.md --- solution/DeploymentV2/README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/solution/DeploymentV2/README.md b/solution/DeploymentV2/README.md index fb9b3298..57c8d0aa 100644 --- a/solution/DeploymentV2/README.md +++ b/solution/DeploymentV2/README.md @@ -33,8 +33,8 @@ When you execute the script it will ask you for a number of inputs: ![image](https://user-images.githubusercontent.com/11702150/184566884-89671236-cbb6-441d-a6b5-f7390a44b78c.png) - **Resource Provider Registration**: Select '1' (YES) to ensure that the pre-requisite resource providers have been enabled on your Azure subscription. ![image](https://user-images.githubusercontent.com/11702150/184566915-ad311bf1-59fc-4c1d-a94c-6d51c3b82101.png) -- **Resource Owner**: Select 'yes' to ensure that the pre-requisite reesource providers have been enabled on your Azure subscription. -- **SQL Server AAD Admin**: Select 'yes' to ensure that the pre-requisite reesource providers have been enabled on your Azure subscription. +- **Resource Owner**: Insert the object id of the Azure identity or group that you would like to have ownership of the resource group. If you are planning to deploy the solution using a CICD agent, it is suggested that you enter the Agent Service Principal's object id here. If you will be deploying from the command line using an interactive session then leave this field blank. +- **SQL Server AAD Admin**: Insert the object id of the Azure identity or group that you would like to be the AAD administrator of any SQL Server instances deployed. If you are planning to deploy the solution using a CICD agent, then it is suggested that you use an AAD group here. If you will be deploying from the command line using an interactive session then leave this field blank. At the end of the execution, you will be provided the details of what was performed as well as the resource & subscription details. @@ -52,7 +52,7 @@ Before you run the **Deploy.ps1** script, make sure you have completed the pre-r - Run the Prepare.ps1 script first. This will prepare your azure subscription for deployment - Ensure that you have run az login and az account set - Ensure you have Owner access to the resource group you are planning on deploying into -- Ensure you have the Application.ReadWrite.OwnedBy permission with Azure AD to allow you to create and manage AAD app registrations +- Ensure you have the **Application.ReadWrite.OwnedBy** permission with Azure AD to allow you to create and manage AAD app registrations ### :grey_question: What does it do? This script will: From 9b577018785a92ef8c7900bb7d21a3bd0629d369 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Mon, 15 Aug 2022 10:55:07 +0800 Subject: [PATCH 125/151] Update README.md --- solution/DeploymentV2/README.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/solution/DeploymentV2/README.md b/solution/DeploymentV2/README.md index 57c8d0aa..e7f36140 100644 --- a/solution/DeploymentV2/README.md +++ b/solution/DeploymentV2/README.md @@ -71,3 +71,9 @@ The configuration for this environment creation is read from the following locat - The environment variables created when you ran Prepare.ps1 - The environment configuration file: - ```/azure-data-services-go-fast-codebase/solution/DeploymentV2/terraform/vars/local/terragrunt.hcl``` + +Layer | Description | Permissions Required when using Service Principal | Permissions Required when using User Principal +| --- | --- | --- | --- | +Terraform Layer One | - Register AAD Enterprise Applications & Service Principals | - Application.ReadWrite.OwnedBy | - Application Administrator (Role) +Terraform Layer Two | - Core IAC deployment for approx. 70 ADS Go fast resources | - Resource Group Owner | - Resource Group Owner +Terraform Layer Three | - Update AAD Enterprise Applications by granting required roles and permissions to managed service identities created in Layer Two
- Create Private Endpoints for Purview | - Application.ReadWrite.OwnedBy
(Must be same identity as that which was used to run Layer One) | - Application Administrator (Role),
- Network Contributor From 50b58849dfd191ca83038e168199f520174c27fb Mon Sep 17 00:00:00 2001 From: John Rampono Date: Mon, 15 Aug 2022 13:51:35 +0800 Subject: [PATCH 126/151] Update README.md --- solution/DeploymentV2/README.md | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/solution/DeploymentV2/README.md b/solution/DeploymentV2/README.md index e7f36140..0d157d33 100644 --- a/solution/DeploymentV2/README.md +++ b/solution/DeploymentV2/README.md @@ -35,16 +35,16 @@ When you execute the script it will ask you for a number of inputs: ![image](https://user-images.githubusercontent.com/11702150/184566915-ad311bf1-59fc-4c1d-a94c-6d51c3b82101.png) - **Resource Owner**: Insert the object id of the Azure identity or group that you would like to have ownership of the resource group. If you are planning to deploy the solution using a CICD agent, it is suggested that you enter the Agent Service Principal's object id here. If you will be deploying from the command line using an interactive session then leave this field blank. - **SQL Server AAD Admin**: Insert the object id of the Azure identity or group that you would like to be the AAD administrator of any SQL Server instances deployed. If you are planning to deploy the solution using a CICD agent, then it is suggested that you use an AAD group here. If you will be deploying from the command line using an interactive session then leave this field blank. +- **Press any key to continue**: The script will now evaluate your system to gather required information. A summary of that information will be presented to you (similar to the screen capture below). Review the information and press any key to continue. +![image](https://user-images.githubusercontent.com/11702150/184581848-da28499a-2349-4327-a06b-441353b0de93.png) +- **Automatic Environment File Update**: You will now be asked if you wish to automatically persist the configuration information into the selected environment file. It is recommended that you select 'yes' and allow the script to automatically update the required file. +![image](https://user-images.githubusercontent.com/11702150/184582043-8490f92e-fe1b-49d1-b548-5061d957a6e2.png) +- **Reset Flags**: During a deployment there may be some manual steps required such as installation of a self hosted integration runtime. In order for the deployment to be aware of the state of these manual steps a number of flags are included in the configuration files. If you select 'yes' at this step the script will reset these flags to their default state. For a new deployment this is recommended. Once you have completed the associated manual steps you can then update the relevant configuration file accordingly. +![image](https://user-images.githubusercontent.com/11702150/184582065-535151c9-ee64-43a8-88c7-b9dbc30bbec1.png) +- **Fast Start Template**: Fast start templates provide a pre-selected combination of features for a deployment.
For most deployments it is recommended to select the "full_deployment' option. +![image](https://user-images.githubusercontent.com/11702150/184582079-43da1f0c-8a05-4ebd-b842-40a7e8e3af35.png) - -At the end of the execution, you will be provided the details of what was performed as well as the resource & subscription details. -These are pre-loaded into environment variables so that you can directly run the ./Deploy.ps1 without doing any manual entry. - -To save you having to do more work later, I recommend that you copy them down and update the values directly into the following file: - - ```/azure-data-services-go-fast-codebase/solution/DeploymentV2/terraform/vars/local/terragrunt.hcl``` - - This file is used by the ./Deploy.ps1 script by default and will be used if no enviroment vars are available +At the end of the scripts execution the environment details are pre-loaded into environment variables so that you can directly run the ./Deploy.ps1 without doing any manual entry. ## :green_circle: PART 2. Deploy your Lockbox using Deploy.ps1 script ### :page_with_curl: Pre-requisites @@ -56,7 +56,7 @@ Before you run the **Deploy.ps1** script, make sure you have completed the pre-r ### :grey_question: What does it do? This script will: - - Deploy all infra resources using terra + - Deploy all infra resources using terraform - Approve all private link requests - Build and deploy web app - Build and deploy function app From a7572b3d62f21330a6c3416b4a7ad247be7b095f Mon Sep 17 00:00:00 2001 From: John Rampono Date: Mon, 15 Aug 2022 13:55:29 +0800 Subject: [PATCH 127/151] Update README.md --- solution/DeploymentV2/README.md | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/solution/DeploymentV2/README.md b/solution/DeploymentV2/README.md index 0d157d33..e912b806 100644 --- a/solution/DeploymentV2/README.md +++ b/solution/DeploymentV2/README.md @@ -69,9 +69,11 @@ Execute the following script file: You can run this script multiple times if needed. The configuration for this environment creation is read from the following locations: - The environment variables created when you ran Prepare.ps1 -- The environment configuration file: - - ```/azure-data-services-go-fast-codebase/solution/DeploymentV2/terraform/vars/local/terragrunt.hcl``` +- The environment configuration file (*where {selected_environment} is the name of the environment that you selected during execution of prepare.ps1): + - ```/azure-data-services-go-fast-codebase/solution/DeploymentV2/environment/vars/{selected_environment}/terragrunt.hcl``` + +### Deployment Layers Layer | Description | Permissions Required when using Service Principal | Permissions Required when using User Principal | --- | --- | --- | --- | Terraform Layer One | - Register AAD Enterprise Applications & Service Principals | - Application.ReadWrite.OwnedBy | - Application Administrator (Role) From 08d31c2c1653ff94d2fe36cb0abb472f3d33458d Mon Sep 17 00:00:00 2001 From: John Rampono Date: Mon, 15 Aug 2022 14:05:30 +0800 Subject: [PATCH 128/151] Minor change to prepare --- solution/DeploymentV2/Prepare.ps1 | 2 +- .../environments/vars/common_vars_template.jsonnet | 2 ++ .../environments/vars/staging/common_vars_values.jsonc | 4 ++-- solution/FunctionApp/FunctionApp/FunctionApp.csproj | 9 ++------- 4 files changed, 7 insertions(+), 10 deletions(-) diff --git a/solution/DeploymentV2/Prepare.ps1 b/solution/DeploymentV2/Prepare.ps1 index e0f0292d..f2b71c44 100644 --- a/solution/DeploymentV2/Prepare.ps1 +++ b/solution/DeploymentV2/Prepare.ps1 @@ -205,7 +205,7 @@ else Write-Host " - domain = " -NoNewline -ForegroundColor green Write-Host "${env:TF_VAR_domain}"; Write-Host " "; - Write-Host "NOTE: It is recommended you copy these into your environment/vars/local/common_vars_values.jsonc file for future use" -ForegroundColor blue + Write-Host "NOTE: If you did not select the option to autopersist configurations, then it is recommended you copy these into your environment/vars/local/common_vars_values.jsonc file for future use" -ForegroundColor blue Write-Host "Press any key to continue..."; #------------------------------------------------------------------------------------------------------------ # Pause incase this was run directly diff --git a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet index 3221f023..a213c001 100644 --- a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet +++ b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet @@ -643,6 +643,8 @@ local SecretFileSensitiveVars = { // Object comprehension. + + diff --git a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc index f8bedb50..d32fb1e0 100644 --- a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc +++ b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc @@ -39,10 +39,10 @@ "GIT_ADF_EMAIL_ADDRESS": "#####", "FeatureTemplateOverrides": { "is_onprem_datafactory_ir_registered": false, - "deployment_layer3_complete": true, + "deployment_layer3_complete": false, "synapse_git_toggle": false, "adf_git_toggle": false, - "deploy_sentinel": false, + "deploy_sentinel": false, "synapse_git_toggle_integration": false, "synapse_git_repository_owner": "h-sha", "synapse_git_repository_name": "testLockbox", diff --git a/solution/FunctionApp/FunctionApp/FunctionApp.csproj b/solution/FunctionApp/FunctionApp/FunctionApp.csproj index 5de65e0d..7e76e574 100644 --- a/solution/FunctionApp/FunctionApp/FunctionApp.csproj +++ b/solution/FunctionApp/FunctionApp/FunctionApp.csproj @@ -53,13 +53,8 @@ - - - - all - runtime; build; native; contentfiles; analyzers; buildtransitive - - + + all runtime; build; native; contentfiles; analyzers; buildtransitive From b74c058e5c6788977db72b7d2c0eadaeabab1b7b Mon Sep 17 00:00:00 2001 From: h-sha <97069267+h-sha@users.noreply.github.com> Date: Tue, 16 Aug 2022 09:48:48 +1000 Subject: [PATCH 129/151] merge --- .../DeploymentV2/environments/vars/common_vars_template.jsonnet | 2 ++ 1 file changed, 2 insertions(+) diff --git a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet index c44589ed..a38716ac 100644 --- a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet +++ b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet @@ -811,6 +811,8 @@ local SecretFileSensitiveVars = { // Object comprehension. + + From 54a2725c0d8d66e3c369505ee1727303d56ed085 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Tue, 16 Aug 2022 09:25:02 +0800 Subject: [PATCH 130/151] Updated Cleanup --- .../AdsGoFastDbUp/AdsGoFastDbUp.csproj | 2 +- solution/DeploymentV2/Cleanup_RemoveAll.ps1 | 13 ++++--- .../vars/common_vars_template.jsonnet | 4 +++ .../pwshmodules/Deploy_4_PrivateLinks.psm1 | 4 +++ .../terraform_layer3/03-deploy.ps1 | 34 +++++++++++-------- 5 files changed, 36 insertions(+), 21 deletions(-) diff --git a/solution/Database/ADSGoFastDbUp/AdsGoFastDbUp/AdsGoFastDbUp.csproj b/solution/Database/ADSGoFastDbUp/AdsGoFastDbUp/AdsGoFastDbUp.csproj index cd36ea9a..600c813f 100644 --- a/solution/Database/ADSGoFastDbUp/AdsGoFastDbUp/AdsGoFastDbUp.csproj +++ b/solution/Database/ADSGoFastDbUp/AdsGoFastDbUp/AdsGoFastDbUp.csproj @@ -2,7 +2,7 @@ Exe - netcoreapp3.1 + net6.0 diff --git a/solution/DeploymentV2/Cleanup_RemoveAll.ps1 b/solution/DeploymentV2/Cleanup_RemoveAll.ps1 index ab4ace1a..16887867 100644 --- a/solution/DeploymentV2/Cleanup_RemoveAll.ps1 +++ b/solution/DeploymentV2/Cleanup_RemoveAll.ps1 @@ -6,12 +6,15 @@ Import-Module .\pwshmodules\GetSelectionFromUser.psm1 -force Import-Module .\pwshmodules\GatherOutputsFromTerraform.psm1 -force -$environmentName = Get-SelectionFromUser -Options ('local','staging') -Prompt "Select deployment environment" -if ($environmentName -eq "Quit") -{ - Exit +if ($null -eq [System.Environment]::GetEnvironmentVariable('environmentName')) { + $envlist = (Get-ChildItem -Directory -Path ./environments/vars | Select-Object -Property Name).Name + Import-Module ./pwshmodules/GetSelectionFromUser.psm1 -Force + $environmentName = Get-SelectionFromUser -Options ($envlist) -Prompt "Select deployment environment" + [System.Environment]::SetEnvironmentVariable('environmentName', $environmentName) + [System.Environment]::SetEnvironmentVariable('TFenvironmentName',$environmentName) } -[System.Environment]::SetEnvironmentVariable('TFenvironmentName',$environmentName) + + #------------------------------------------------------------------------------------------------------------ # Get all the outputs from terraform so we can use them in subsequent steps diff --git a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet index a213c001..ffc0aa06 100644 --- a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet +++ b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet @@ -643,6 +643,10 @@ local SecretFileSensitiveVars = { // Object comprehension. + + + + diff --git a/solution/DeploymentV2/pwshmodules/Deploy_4_PrivateLinks.psm1 b/solution/DeploymentV2/pwshmodules/Deploy_4_PrivateLinks.psm1 index 3ea22047..7d584aa4 100644 --- a/solution/DeploymentV2/pwshmodules/Deploy_4_PrivateLinks.psm1 +++ b/solution/DeploymentV2/pwshmodules/Deploy_4_PrivateLinks.psm1 @@ -54,5 +54,9 @@ function DeployPrivateLinks ( $result = az network private-endpoint-connection approve -g $tout.resource_group_name -n $id_parts[$id_parts.length - 1] --resource-name $tout.adlsstorage_name --type Microsoft.Storage/storageAccounts --description "Approved by Deploy.ps1" --only-show-errors } } + + + #$links = (az network private-dns zone list --resource-group gfh5 | ConvertFrom-Json).name + #foreach($l in $links) {az network private-dns link vnet create --name "adscore.$l" --registration-enabled false --resource-group gfuat --virtual-network "/subscriptions/035a1364-f00d-48e2-b582-4fe125905ee3/resourceGroups/adsgfcore/providers/Microsoft.Network/virtualNetworks/ads-gf-core-vnet" --zone-name $l } } } \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer3/03-deploy.ps1 b/solution/DeploymentV2/terraform_layer3/03-deploy.ps1 index f71fb35c..d6c3029b 100644 --- a/solution/DeploymentV2/terraform_layer3/03-deploy.ps1 +++ b/solution/DeploymentV2/terraform_layer3/03-deploy.ps1 @@ -40,22 +40,26 @@ $ipaddress2 = $env:TF_VAR_ip_address2 PrepareDeployment -gitDeploy $gitDeploy -deploymentFolderPath $deploymentFolderPath -FeatureTemplate $FeatureTemplate -PathToReturnTo $PathToReturnTo +#if($env:TF_VAR_deploy_purview) +#{ #Check to make sure that purview account is fully deployed -$pname = ((az storage blob download -c "tstate" -n "terraform_layer2.tfstate" --account-name $env:TF_VAR_state_storage_account_name --auth-mode login) | ConvertFrom-Json).outputs.purview_name -$pstate = (az purview account show --name $pname.value -g $env:TF_VAR_resource_group_name | ConvertFrom-Json -Depth 10).provisioningState +#$pname = ((az storage blob download -c "tstate" -n "terraform_layer2.tfstate" --account-name $env:TF_VAR_state_storage_account_name --auth-mode login) | ConvertFrom-Json).outputs.purview_name +#$pstate = (az purview account show --name $pname.value -g $env:TF_VAR_resource_group_name | ConvertFrom-Json -Depth 10).provisioningState #az purview account show --name $pname.value -g $env:TF_VAR_resource_group_name -if($pstate -ne "Succeeded") -{ - Write-Error "Purview account has not yet completed provisioning - Wait For completion and then retry" -} -else { - #------------------------------------------------------------------------------------------------------------ - # Main Terraform - Layer1 - #------------------------------------------------------------------------------------------------------------ - Write-Host "Starting Terraform Deployment- Layer 3" - $output = terragrunt init --terragrunt-config vars/$env:environmentName/terragrunt.hcl -reconfigure - $output = terragrunt apply -auto-approve --terragrunt-config vars/$env:environmentName/terragrunt.hcl -json - ProcessTerraformApply -output $output -gitDeploy $gitDeploy -} +# if($pstate -ne "Succeeded") +# { +# Write-Error "Purview account has not yet completed provisioning - Wait For completion and then retry" +# exit +# } +#} + +#------------------------------------------------------------------------------------------------------------ +# Main Terraform - Layer1 +#------------------------------------------------------------------------------------------------------------ +Write-Host "Starting Terraform Deployment- Layer 3" +$output = terragrunt init --terragrunt-config vars/$env:environmentName/terragrunt.hcl -reconfigure +$output = terragrunt apply -auto-approve --terragrunt-config vars/$env:environmentName/terragrunt.hcl -json +ProcessTerraformApply -output $output -gitDeploy $gitDeploy + From 0d95cba9e9d0cf9d7f362722bebda66414cb275e Mon Sep 17 00:00:00 2001 From: John Rampono Date: Tue, 16 Aug 2022 13:00:47 +0800 Subject: [PATCH 131/151] Updating Prepare to Allow for Fully Private Networking --- .../Patterns/FuncAppTests_Generate.ps1 | 2 +- .../Patterns/Jsonnet_GenerateADFArtefacts.ps1 | 6 +- solution/DataFactory/Patterns/test.ps1 | 2 +- solution/DeploymentV2/Prepare.ps1 | 57 ++++++++++++++----- .../featuretemplates/functional_tests.jsonc | 3 +- .../vars/PreprocessEnvironment.ps1 | 2 +- .../vars/common_vars_template.jsonnet | 8 +++ .../vars/staging/common_vars_values.jsonc | 4 +- .../Patterns/FuncAppTests_Generate.ps1 | 2 +- .../Patterns/Jsonnet_GenerateADFArtefacts.ps1 | 6 +- 10 files changed, 65 insertions(+), 27 deletions(-) diff --git a/solution/DataFactory/Patterns/FuncAppTests_Generate.ps1 b/solution/DataFactory/Patterns/FuncAppTests_Generate.ps1 index 146370de..30835fd1 100644 --- a/solution/DataFactory/Patterns/FuncAppTests_Generate.ps1 +++ b/solution/DataFactory/Patterns/FuncAppTests_Generate.ps1 @@ -14,7 +14,7 @@ foreach ($pattern in $patterns) { if (!(Test-Path "./tests")) { - New-Item -itemType Directory -Name "tests" + New-Item -itemType Directory -Name -Force "tests" } else { diff --git a/solution/DataFactory/Patterns/Jsonnet_GenerateADFArtefacts.ps1 b/solution/DataFactory/Patterns/Jsonnet_GenerateADFArtefacts.ps1 index d48b1738..de50d2a7 100644 --- a/solution/DataFactory/Patterns/Jsonnet_GenerateADFArtefacts.ps1 +++ b/solution/DataFactory/Patterns/Jsonnet_GenerateADFArtefacts.ps1 @@ -190,11 +190,11 @@ foreach ($patternFolder in $patternFolders) Write-Verbose "_____________________________" $newfolder = ($folder + "/output") - $hiddenoutput = !(Test-Path $newfolder) ? ($F = New-Item -itemType Directory -Name $newfolder) : ($F = "") + $hiddenoutput = !(Test-Path $newfolder) ? ($F = New-Item -itemType Directory -Force -Name $newfolder) : ($F = "") $newfolder = ($newfolder + "/schemas") - $hiddenoutput = !(Test-Path $newfolder) ? ($F = New-Item -itemType Directory -Name $newfolder) : ($F = "") + $hiddenoutput = !(Test-Path $newfolder) ? ($F = New-Item -itemType Directory -Force -Name $newfolder) : ($F = "") $newfolder = ($newfolder + "/taskmasterjson/") - $hiddenoutput = !(Test-Path $newfolder) ? ($F = New-Item -itemType Directory -Name $newfolder) : ($F = "") + $hiddenoutput = !(Test-Path $newfolder) ? ($F = New-Item -itemType Directory -Force -Name $newfolder) : ($F = "") $schemafile = (Get-ChildItem -Path ($folder+"/jsonschema/") -Filter "Main.libsonnet") #foreach ($schemafile in $schemafiles) diff --git a/solution/DataFactory/Patterns/test.ps1 b/solution/DataFactory/Patterns/test.ps1 index 142cc8d7..766e8a11 100644 --- a/solution/DataFactory/Patterns/test.ps1 +++ b/solution/DataFactory/Patterns/test.ps1 @@ -4,7 +4,7 @@ $newfolder = "./output/" if (!(Test-Path "./output")) { - New-Item -itemType Directory -Name "output" + New-Item -itemType Directory -Name -Force "output" } else { diff --git a/solution/DeploymentV2/Prepare.ps1 b/solution/DeploymentV2/Prepare.ps1 index 6b241591..16491905 100644 --- a/solution/DeploymentV2/Prepare.ps1 +++ b/solution/DeploymentV2/Prepare.ps1 @@ -22,6 +22,17 @@ # Once this script has finished, you then run Deploy.ps1 to create your environment # ------------------------------------------------------------------------------------------------------------ +Function Sleep-Progress($seconds) { + $s = 0; + Do { + $p = [math]::Round(100 - (($seconds - $s) / $seconds * 100)); + Write-Progress -Activity "Waiting..." -Status "$p% Complete:" -SecondsRemaining ($seconds - $s) -PercentComplete $p; + [System.Threading.Thread]::Sleep(1000) + $s++; + } + While($s -lt $seconds); + +} #by default $gitDeploy will not be true, only being set by the git environment - meaning if not using a runner it will default to a standard execution. $gitDeploy = ([System.Environment]::GetEnvironmentVariable('gitDeploy') -eq 'true') @@ -151,25 +162,45 @@ else # and restrict it so that only GitHub / AzDO can access it. #------------------------------------------------------------------------------------------------------------ if([string]::IsNullOrEmpty($env:TF_VAR_resource_group_name) -eq $false) { - $progress = 0 - Write-Progress -Activity "Creating Resource Group" -Status "${progress}% Complete:" -PercentComplete $progress $rg = az group create -n $env:TF_VAR_resource_group_name -l australiaeast --only-show-errors if([string]::IsNullOrEmpty($env:TF_VAR_state_storage_account_name) -eq $false) { - $progress+=5 - Write-Progress -Activity "Creating Storage Account" -Status "${progress}% Complete:" -PercentComplete $progress - $storageId = az storage account create --resource-group $env:TF_VAR_resource_group_name --name $env:TF_VAR_state_storage_account_name --sku Standard_LRS --allow-blob-public-access false --https-only true --min-tls-version TLS1_2 --query id -o tsv --only-show-errors + Write-Host "Creating storage account" + #Public + $uinput = Get-SelectionFromUser -Options ('Public','Isolated', 'Private') -Prompt "Please select Network Isolation Level" + if($uinput -eq "Public") + { + Write-Host "Creating Public Storage" + $storageId = az storage account create --resource-group $env:TF_VAR_resource_group_name --name $env:TF_VAR_state_storage_account_name --sku Standard_LRS --allow-blob-public-access false --public-network-access Enabled --default-action Allow --https-only true --min-tls-version TLS1_2 --query id -o tsv --only-show-errors + } - $progress+=5 - $userObjectId = az ad signed-in-user show --query id -o tsv --only-show-errors - Write-Progress -Activity "Assigning Blob Contributor" -Status "${progress}% Complete:" -PercentComplete $progress + if($uinput -eq "Isolated") + { + Write-Host "Creating Isolated Storage" + #Isolated + $storageId = az storage account create --resource-group $env:TF_VAR_resource_group_name --name $env:TF_VAR_state_storage_account_name --sku Standard_LRS --allow-blob-public-access false --public-network-access Enabled --default-action Deny --https-only true --min-tls-version TLS1_2 --query id -o tsv --only-show-errors + $hiddenoutput =az storage account network-rule add --resource-group $env:TF_VAR_resource_group_name --account-name $env:TF_VAR_state_storage_account_name --ip-address $env:TF_VAR_ip_address --only-show-errors + #wait for network rule + Sleep-Progress 7 + } + if($uinput -eq "Private") + { + Write-Host "Creating Private Storage" + #Private + $storageId = az storage account create --resource-group $env:TF_VAR_resource_group_name --name $env:TF_VAR_state_storage_account_name --sku Standard_LRS --allow-blob-public-access false --public-network-access Disabled --https-only true --min-tls-version TLS1_2 --query id -o tsv --only-show-errors + + $DeploymentVnet = Read-Host "Please input the name of the vnet from which deployment activities will originate." + $DeploymentRg = Read-Host "Please input the resource group of the vnet from which deployment activities will originate." + $output = az network private-dns zone create --resource-group $env:TF_VAR_resource_group_name --name "privatelink.blob.core.windows.net" + $output = az network private-endpoint create -g $DeploymentRg -n $env:TF_VAR_state_storage_account_name --vnet-name $DeploymentVnet --subnet default --private-connection-resource-id "/subscriptions/$env:TF_VAR_subscription_id/resourceGroups/$env:TF_VAR_resource_group_name/providers/Microsoft.Storage/storageAccounts/gft6state" --connection-name tttt -l australiaeast --group-id dfs + $output = az network private-dns link vnet create --name "adscore.privatelink.blob.core.windows.net" --registration-enabled false --resource-group $env:TF_VAR_resource_group_name --virtual-network "/subscriptions/$env:TF_VAR_subscription_id/resourceGroups/$DeploymentRg/providers/Microsoft.Network/virtualNetworks/$DeploymentVnet" --zone-name "privatelink.blob.core.windows.net" + } + Write-Host "Creating Role Assignment" + $userObjectId = az ad signed-in-user show --query id -o tsv --only-show-errors $assignment = az role assignment create --role "Storage Blob Data Contributor" --assignee-object-id $userObjectId --assignee-principal-type User --only-show-errors - - $progress+=5 - Write-Progress -Activity "Creating State Container" -Status "${progress}% Complete:" -PercentComplete $progress + Write-Host "Creating State Container" $container = az storage container create --name $CONTAINER_NAME --account-name $env:TF_VAR_state_storage_account_name --auth-mode login --only-show-errors - - Write-Progress -Activity "Finished" -Completed + } } diff --git a/solution/DeploymentV2/environments/featuretemplates/functional_tests.jsonc b/solution/DeploymentV2/environments/featuretemplates/functional_tests.jsonc index a14be3e5..7b0ad085 100644 --- a/solution/DeploymentV2/environments/featuretemplates/functional_tests.jsonc +++ b/solution/DeploymentV2/environments/featuretemplates/functional_tests.jsonc @@ -18,7 +18,6 @@ {"Name":"publish_web_app_addcurrentuserasadmin","Value":true}, {"Name":"publish_sif_database","Value":true}, {"Name":"deploy_selfhostedsql","Value":true}, - {"Name":"is_onprem_datafactory_ir_registered","Value":false}, - {"Name":"publish_sif_database","Value":true}, + {"Name":"is_onprem_datafactory_ir_registered","Value":false}, {"Name":"deployment_layer3_complete","Value":false} ] \ No newline at end of file diff --git a/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 b/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 index 716fb799..cd464686 100644 --- a/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 +++ b/solution/DeploymentV2/environments/vars/PreprocessEnvironment.ps1 @@ -48,7 +48,7 @@ Write-Debug "Preparing Environment: $Environment Using $FeatureTemplate Template #Prep Output Folder $newfolder = "./../../bin/environments/$Environment/" -$hiddenoutput = !(Test-Path $newfolder) ? ($F = New-Item -itemType Directory -Name $newfolder) : ($F = "") +$hiddenoutput = !(Test-Path $newfolder) ? ($F = New-Item -itemType Directory -Force -Name $newfolder) : ($F = "") (jsonnet "./common_vars_template.jsonnet" --tla-str featuretemplatename=$FeatureTemplate --tla-str environment=$Environment --tla-str gitDeploy=$gitDeploy ) | Set-Content($newfolder +"/common_vars.json") $obj = Get-Content ($newfolder + "/common_vars.json") | ConvertFrom-Json diff --git a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet index 61b668b8..139527e7 100644 --- a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet +++ b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet @@ -639,6 +639,14 @@ local SecretFileSensitiveVars = { // Object comprehension. + + + + + + + + diff --git a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc index ff266d1a..a7e03846 100644 --- a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc +++ b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc @@ -6,8 +6,8 @@ "domain": "microsoft.com", "subscription_id": "035a1364-f00d-48e2-b582-4fe125905ee3", "prefix": "ads", - "resource_group_name": "gfh5", - "state_storage_account_name": "lockboxftteststate", + "resource_group_name": "gft6", + "state_storage_account_name": "gft6state", "ip_address": "144.138.148.220", "ip_address2": "144.138.148.220", "tenant_id": "72f988bf-86f1-41af-91ab-2d7cd011db47", diff --git a/solution/Synapse/Patterns/FuncAppTests_Generate.ps1 b/solution/Synapse/Patterns/FuncAppTests_Generate.ps1 index 6cfb6bc4..ad4255ad 100644 --- a/solution/Synapse/Patterns/FuncAppTests_Generate.ps1 +++ b/solution/Synapse/Patterns/FuncAppTests_Generate.ps1 @@ -19,7 +19,7 @@ foreach ($pattern in $patterns) { if (!(Test-Path "./tests")) { - New-Item -itemType Directory -Name "tests" + New-Item -itemType Directory -Name "tests" -Force } else { diff --git a/solution/Synapse/Patterns/Jsonnet_GenerateADFArtefacts.ps1 b/solution/Synapse/Patterns/Jsonnet_GenerateADFArtefacts.ps1 index 97093645..eaf8ed21 100644 --- a/solution/Synapse/Patterns/Jsonnet_GenerateADFArtefacts.ps1 +++ b/solution/Synapse/Patterns/Jsonnet_GenerateADFArtefacts.ps1 @@ -107,11 +107,11 @@ foreach ($patternFolder in $patternFolders) Write-Verbose "_____________________________" $newfolder = ($schemafile.SourceFolder + "/output") - $hiddenoutput = !(Test-Path $newfolder) ? ($F = New-Item -itemType Directory -Name $newfolder) : ($F = "") + $hiddenoutput = !(Test-Path $newfolder) ? ($F = New-Item -itemType Directory -Name $newfolder -Force) : ($F = "") $newfolder = ($newfolder + "/schemas") - $hiddenoutput = !(Test-Path $newfolder) ? ($F = New-Item -itemType Directory -Name $newfolder) : ($F = "") + $hiddenoutput = !(Test-Path $newfolder) ? ($F = New-Item -itemType Directory -Name $newfolder -Force) : ($F = "") $newfolder = ($newfolder + "/taskmasterjson/") - $hiddenoutput = !(Test-Path $newfolder) ? ($F = New-Item -itemType Directory -Name $newfolder) : ($F = "") + $hiddenoutput = !(Test-Path $newfolder) ? ($F = New-Item -itemType Directory -Name $newfolder -Force) : ($F = "") $schemafile.TargetFolder = $newfolder $schemafiletemplate = (Get-ChildItem -Path ($schemafile.SourceFolder+"/jsonschema/") -Filter "Main.libsonnet") From c8bd5b95e90e35c3410ff38b286c8385740f3753 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Tue, 16 Aug 2022 21:45:11 +0800 Subject: [PATCH 132/151] Experimenting with Additional Layer 0 --- .../utilities/GitHubRunnerInstall.sh | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) create mode 100644 solution/DeploymentV2/utilities/GitHubRunnerInstall.sh diff --git a/solution/DeploymentV2/utilities/GitHubRunnerInstall.sh b/solution/DeploymentV2/utilities/GitHubRunnerInstall.sh new file mode 100644 index 00000000..5abb4b0f --- /dev/null +++ b/solution/DeploymentV2/utilities/GitHubRunnerInstall.sh @@ -0,0 +1,19 @@ +sudo apt-get update && \ +sudo apt-get install -y wget apt-transport-https software-properties-common && \ +wget -q https://packages.microsoft.com/config/ubuntu/16.04/packages-microsoft-prod.deb && \ +sudo dpkg -i packages-microsoft-prod.deb && \ +sudo apt-get update && \ +sudo apt-get install -y powershell && \ +rm ./packages-microsoft-prod.deb && \ +sudo apt install -y dotnet-sdk-6.0 && \ +wget https://github.com/google/go-jsonnet/releases/download/v0.17.0/jsonnet-go_0.17.0_linux_amd64.deb && \ +sudo dpkg -i jsonnet-go_0.17.0_linux_amd64.deb && \ +sudo rm jsonnet-go_0.17.0_linux_amd64.deb && \ +curl -fsSL https://apt.releases.hashicorp.com/gpg | sudo apt-key add - && \ +sudo apt-add-repository "deb [arch=amd64] https://apt.releases.hashicorp.com $(lsb_release -cs) main" && \ +sudo apt-get update && sudo apt-get install terraform && \ +wget https://github.com/gruntwork-io/terragrunt/releases/download/v0.35.14/terragrunt_linux_amd64 && \ +sudo mv terragrunt_linux_amd64 terragrunt && \ +sudo chmod u+x terragrunt && \ +sudo mv terragrunt /usr/local/bin/terragrunt && \ +curl -sL https://aka.ms/InstallAzureCLIDeb | sudo bashaz \ No newline at end of file From f742feba45392385ceef82d09cf999e67dd71b40 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Tue, 16 Aug 2022 21:45:39 +0800 Subject: [PATCH 133/151] Experimenting with layer 0 --- solution/DeploymentV2/Prepare.ps1 | 29 +- .../vars/common_vars_template.jsonnet | 2 + .../DeploymentV2/terraform_layer0/.gitignore | 39 + .../terraform_layer0/.terraform.lock.hcl | 62 ++ .../terraform_layer0/00-deploy.ps1 | 80 ++ .../terraform_layer0/02-publish.ps1 | 109 +++ .../DeploymentV2/terraform_layer0/bastion.tf | 29 + .../DeploymentV2/terraform_layer0/locals.tf | 119 +++ .../DeploymentV2/terraform_layer0/main.tf | 52 + .../terraform_layer0/nsg_app_service.tf | 53 ++ .../terraform_layer0/nsg_bastion.tf | 207 ++++ .../terraform_layer0/nsg_plink.tf | 53 ++ .../DeploymentV2/terraform_layer0/nsg_vms.tf | 51 + .../DeploymentV2/terraform_layer0/outputs.tf | 0 .../terraform_layer0/private_dns.tf | 176 ++++ .../DeploymentV2/terraform_layer0/readme.md | 101 ++ .../DeploymentV2/terraform_layer0/subnet.tf | 63 ++ .../DeploymentV2/terraform_layer0/vars.tf | 896 ++++++++++++++++++ .../terraform_layer0/vars/admz/terragrunt.hcl | 83 ++ .../vars/local/terragrunt.hcl | 45 + .../vars/production/terragrunt.hcl | 45 + .../vars/staging/terragrunt.hcl | 43 + .../terraform_layer0/vars/uat/terragrunt.hcl | 44 + .../terraform_layer0/virtual_machines.tf | 54 ++ .../DeploymentV2/terraform_layer0/vnet.tf | 15 + 25 files changed, 2444 insertions(+), 6 deletions(-) create mode 100644 solution/DeploymentV2/terraform_layer0/.gitignore create mode 100644 solution/DeploymentV2/terraform_layer0/.terraform.lock.hcl create mode 100644 solution/DeploymentV2/terraform_layer0/00-deploy.ps1 create mode 100644 solution/DeploymentV2/terraform_layer0/02-publish.ps1 create mode 100644 solution/DeploymentV2/terraform_layer0/bastion.tf create mode 100644 solution/DeploymentV2/terraform_layer0/locals.tf create mode 100644 solution/DeploymentV2/terraform_layer0/main.tf create mode 100644 solution/DeploymentV2/terraform_layer0/nsg_app_service.tf create mode 100644 solution/DeploymentV2/terraform_layer0/nsg_bastion.tf create mode 100644 solution/DeploymentV2/terraform_layer0/nsg_plink.tf create mode 100644 solution/DeploymentV2/terraform_layer0/nsg_vms.tf create mode 100644 solution/DeploymentV2/terraform_layer0/outputs.tf create mode 100644 solution/DeploymentV2/terraform_layer0/private_dns.tf create mode 100644 solution/DeploymentV2/terraform_layer0/readme.md create mode 100644 solution/DeploymentV2/terraform_layer0/subnet.tf create mode 100644 solution/DeploymentV2/terraform_layer0/vars.tf create mode 100644 solution/DeploymentV2/terraform_layer0/vars/admz/terragrunt.hcl create mode 100644 solution/DeploymentV2/terraform_layer0/vars/local/terragrunt.hcl create mode 100644 solution/DeploymentV2/terraform_layer0/vars/production/terragrunt.hcl create mode 100644 solution/DeploymentV2/terraform_layer0/vars/staging/terragrunt.hcl create mode 100644 solution/DeploymentV2/terraform_layer0/vars/uat/terragrunt.hcl create mode 100644 solution/DeploymentV2/terraform_layer0/virtual_machines.tf create mode 100644 solution/DeploymentV2/terraform_layer0/vnet.tf diff --git a/solution/DeploymentV2/Prepare.ps1 b/solution/DeploymentV2/Prepare.ps1 index 16491905..fe10bdb9 100644 --- a/solution/DeploymentV2/Prepare.ps1 +++ b/solution/DeploymentV2/Prepare.ps1 @@ -187,13 +187,30 @@ else { Write-Host "Creating Private Storage" #Private - $storageId = az storage account create --resource-group $env:TF_VAR_resource_group_name --name $env:TF_VAR_state_storage_account_name --sku Standard_LRS --allow-blob-public-access false --public-network-access Disabled --https-only true --min-tls-version TLS1_2 --query id -o tsv --only-show-errors + $storageId = az storage account create --resource-group $env:TF_VAR_resource_group_name --name $env:TF_VAR_state_storage_account_name --sku Standard_LRS --pr --allow-blob-public-access false --public-network-access Disabled --https-only true --min-tls-version TLS1_2 --query id -o tsv --only-show-errors - $DeploymentVnet = Read-Host "Please input the name of the vnet from which deployment activities will originate." - $DeploymentRg = Read-Host "Please input the resource group of the vnet from which deployment activities will originate." - $output = az network private-dns zone create --resource-group $env:TF_VAR_resource_group_name --name "privatelink.blob.core.windows.net" - $output = az network private-endpoint create -g $DeploymentRg -n $env:TF_VAR_state_storage_account_name --vnet-name $DeploymentVnet --subnet default --private-connection-resource-id "/subscriptions/$env:TF_VAR_subscription_id/resourceGroups/$env:TF_VAR_resource_group_name/providers/Microsoft.Storage/storageAccounts/gft6state" --connection-name tttt -l australiaeast --group-id dfs - $output = az network private-dns link vnet create --name "adscore.privatelink.blob.core.windows.net" --registration-enabled false --resource-group $env:TF_VAR_resource_group_name --virtual-network "/subscriptions/$env:TF_VAR_subscription_id/resourceGroups/$DeploymentRg/providers/Microsoft.Network/virtualNetworks/$DeploymentVnet" --zone-name "privatelink.blob.core.windows.net" + $DeploymentVnet = Read-Host "Please input the name of the spoke vnet for the deployment. If you leave it blank it will default to 'ads-stg-vnet-ads'" + if([string]::IsNullOrEmpty($DeploymentVnet)) + { + $DeploymentVnet = "ads-stg-vnet-ads" + } + + #Create the VNET + $output = az network vnet create --name $DeploymentVnet --resource-group $env:TF_VAR_resource_group_name --address-prefixes "10.0.0.0/24" --subnet-name ads-stg-snet-ads-vm --subnet-prefixes 10.0.0.192/26 + + #Create Private Endpoint for DFS + $output = az network private-endpoint create -g $env:TF_VAR_resource_group_name -n $env:TF_VAR_state_storage_account_name --vnet-name $DeploymentVnet --subnet ads-stg-snet-ads-vm --private-connection-resource-id "/subscriptions/$env:TF_VAR_subscription_id/resourceGroups/$env:TF_VAR_resource_group_name/providers/Microsoft.Storage/storageAccounts/$env:TF_VAR_state_storage_account_name" --connection-name "$env:TF_VAR_state_storage_account_name-dfs-plink" -l australiaeast --group-id dfs --zone-name "privatelink.dfs.core.windows.net" + + + #DFS Zone and Vnet Link + $output = az network private-dns zone create --resource-group $env:TF_VAR_resource_group_name --name "privatelink.dfs.core.windows.net" + $output = az network private-endpoint dns-zone-group create --endpoint-name "$env:TF_VAR_state_storage_account_name" -g $env:TF_VAR_resource_group_name -n "privatednszonegroupstoragedfs" --zone-name "privatelink.dfs.core.windows.net" --private-dns-zone "privatelink.dfs.core.windows.net" + $output = az network private-dns link vnet create --name "privatelink.dfs.core.windows.net" --registration-enabled false --resource-group $env:TF_VAR_resource_group_name --virtual-network "/subscriptions/$env:TF_VAR_subscription_id/resourceGroups/$env:TF_VAR_resource_group_name/providers/Microsoft.Network/virtualNetworks/$DeploymentVnet" --zone-name "privatelink.dfs.core.windows.net" + + #Add Resources to Zones + $storageip = ((az network private-endpoint show --resource-group $env:TF_VAR_resource_group_name --name $env:TF_VAR_state_storage_account_name) | ConvertFrom-Json -depth 10).customDnsConfigs.ipAddresses + az network private-dns record-set a create -g $env:TF_VAR_resource_group_name -z "privatelink.dfs.core.windows.net" -n "$env:TF_VAR_state_storage_account_name" --ttl 10 + $output = az network private-dns record-set a add-record -g $env:TF_VAR_resource_group_name -z "privatelink.dfs.core.windows.net" -n "$env:TF_VAR_state_storage_account_name" -a $storageip } Write-Host "Creating Role Assignment" $userObjectId = az ad signed-in-user show --query id -o tsv --only-show-errors diff --git a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet index 139527e7..9b615196 100644 --- a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet +++ b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet @@ -653,6 +653,8 @@ local SecretFileSensitiveVars = { // Object comprehension. + + diff --git a/solution/DeploymentV2/terraform_layer0/.gitignore b/solution/DeploymentV2/terraform_layer0/.gitignore new file mode 100644 index 00000000..cc143939 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer0/.gitignore @@ -0,0 +1,39 @@ +# Local .terraform directories +**/.terraform/* + +**/arkahna/* + +# .tfstate files +*.tfstate +*.tfstate.* + +# Crash log files +crash.log + +# Exclude all .tfvars files, which are likely to contain sentitive data, such as +# password, private keys, and other secrets. These should not be part of version +# control as they are data points which are potentially sensitive and subject +# to change depending on the environment. +# +*.tfvars + +# Ignore override files as they are usually used to override resources locally and so +# are not checked in +override.tf +override.tf.json +*_override.tf +*_override.tf.json + +# Include override files you do wish to add to version control using negated pattern +# +# !example_override.tf + +# Include tfplan files to ignore the plan output of command: terraform plan -out=tfplan +# example: *tfplan* + +# Ignore CLI configuration files +.terraformrc +terraform.rc + + +backend.tf diff --git a/solution/DeploymentV2/terraform_layer0/.terraform.lock.hcl b/solution/DeploymentV2/terraform_layer0/.terraform.lock.hcl new file mode 100644 index 00000000..8ca5626b --- /dev/null +++ b/solution/DeploymentV2/terraform_layer0/.terraform.lock.hcl @@ -0,0 +1,62 @@ +# This file is maintained automatically by "terraform init". +# Manual edits may be lost in future updates. + +provider "registry.terraform.io/hashicorp/azuread" { + version = "2.22.0" + constraints = "2.22.0" + hashes = [ + "h1:so17lrrqkdZcmQp5V/hvY5vLXw1BmwQMnlvGcRq/u0c=", + "zh:062d84c514cd5015af60693ca4f3aece80d358fd7172951546eaba8093065c5b", + "zh:13749654ccd901408c74de2e1d7de43157044c4e739edcc0a66012a6cc6bba7a", + "zh:138c107f6aa554924a241806bca69248af1b7ce79ec93c6eef369886f33eef0a", + "zh:1c3e89cf19118fc07d7b04257251fc9897e722c16e0a0df7b07fcd261f8c12e7", + "zh:33c656e07492808da0584717a3cd52377dff15ae0f1f5f411321b8de08a7693e", + "zh:4e08570e51742e717a914db5dd15c0a73cd1686e0c1f1a07123d3aa70cc00718", + "zh:4fef3aca24238cead0798d29196c9e2270622091897dba040c21500c2ddb4095", + "zh:614c60e3dfdd17b7d93b9355e057c825bb36e61f5bc25ccbc6550ff7bd726b65", + "zh:65d8789b8b088322d4e27ea6cd9935749980fe0a1b94e8e56f0cca35c34c394e", + "zh:823abd9bbd9f42bc4c5769be033bf734bb81bb20152b7e1c009a6234b849e5b6", + "zh:9c7ece6b3c65253bfef6ee29acc0cac033ec061bd6755c5496a7e5c17997c918", + "zh:fc0ff3e3104ee6e89c2fa3bf6c83ba698062e64165b60acfe7ad00f2161d1250", + ] +} + +provider "registry.terraform.io/hashicorp/azurerm" { + version = "3.12.0" + constraints = "3.12.0" + hashes = [ + "h1:KF6bIhK7POPuO1HYK1G8b5Fae+0n8c/gM+9EBVJJQ2Q=", + "zh:0bbc93276a38da205d2b8ce34a2f813e96687a2f6fc7addd9bb05b85dab2a662", + "zh:3af12159e0b5217a7b35f081fba1e34ac8fb995acc7e6d2ec86592a559eb85c8", + "zh:7d1bdc9b4d9b1990409d52cb915e5acbe17bd81b29d28f7fcdaaf96003dca77c", + "zh:81ab77524cfa91aed929e35e2ed63b2ac73add7c33d1b3d5cdc21937606ecc7c", + "zh:84ddddd9f4c695199ef2824eea853d29434e164e0ef3603451aed39d8852ba28", + "zh:9905a5ca2d7c5c6e43a4be1f7b207d584860ec4ddad1aaa475fb03a731333751", + "zh:9cdf3223d9f4a2dbabcd1ebc663beab356a4ee5b1f584088772da8271c74890b", + "zh:a8317436ec286aae91d9bfbcd36edb9b0b58c195a9cd0adffb7f588f124bef1e", + "zh:cea079d3f4eff9e301ca207c7ce676553f9acc3202abf88ff161d6faa1e1a54a", + "zh:f569b65999264a9416862bca5cd2a6177d94ccb0424f3a4ef424428912b9cb3c", + "zh:fdaa4de7d6713bd8b1b4a51135c9eadbaa317ea87e7af9c00c52f67018fba288", + "zh:ff25a0a5fb54174a8a37d4e40413fa85737d8bb837c5635b6e88621c36c202bd", + ] +} + +provider "registry.terraform.io/hashicorp/random" { + version = "3.3.0" + constraints = ">= 2.2.0, 3.3.0" + hashes = [ + "h1:4VU/t0rwHuvJI0JZ3Zd93uEWaKIWeXqKx1GhAhgTn6A=", + "zh:0148a1a98ddbc3cf6ad6ef7bb4e5a2820ca50fdb8723d4448a011bfabb6f3d7c", + "zh:1f8c6d2046d6ea626c7abcfca2fbb95dce21663053a098570ebef71433f4a001", + "zh:3681788777b6b191edc5d2aeaece6217f36c1f92fcd2478bf804185f9fc48f9f", + "zh:3e8f7ae388fe981f86b5f6d4636e2b8ddb98b4cec63330f24b04c408ca338fa3", + "zh:3eb6fadea3a905a3e8be63cf3fd9c2dc1a885a8a4d67ac6945b4e562b22ce2d5", + "zh:46761443b5a83bce53a9e8dbb88a60ee260b1825f6e265dfb8865b9ab552ef0b", + "zh:59edb583bfe9ae60023289c570e62a87c86649341fd5e1042adc592334459967", + "zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3", + "zh:8c46658d69142562984be3c6aa9ea29b2d389f8c991197b722c550e8a34fe49c", + "zh:9923e10598c76078cd6b67962aeb0c65160273e4fb36134a994003d1e7375200", + "zh:d528eb4854d5fb529934e0de3b57d33bf8a19db302c5cba6e8292e674291aaeb", + "zh:e9be013d175b21debee2b626574883aa579e4b03a085ca4e4122dd6ae2ffec53", + ] +} diff --git a/solution/DeploymentV2/terraform_layer0/00-deploy.ps1 b/solution/DeploymentV2/terraform_layer0/00-deploy.ps1 new file mode 100644 index 00000000..6136da87 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer0/00-deploy.ps1 @@ -0,0 +1,80 @@ +#---------------------------------------------------------------------------------------------------------------- +# You must be logged into the Azure CLI to run this script +#---------------------------------------------------------------------------------------------------------------- +# This script will: +# - Deploy the required AAD objects (Application Registrations etc) +# +# This is intended for creating a once off deployment from your development machine. You should setup the +# GitHub actions for your long term prod/non-prod environments +# +# Intructions +# - Ensure that you have run the Prepare.ps1 script first. This will prepare your azure subscription for deployment +# - Ensure that you have run az login and az account set +# - Ensure you have Contributor Access to the subscription you are deploying to. +# - Ensure you have Application.ReadWrite.OwnedBy on the Azure AD. +# - Run this script +# +# You can run this script multiple times if needed. +# +#---------------------------------------------------------------------------------------------------------------- +param ( + [Parameter(Mandatory=$false)] + [string]$FeatureTemplate="" +) + +#------------------------------------------------------------------------------------------------------------ +# Module Imports #Mandatory +#------------------------------------------------------------------------------------------------------------ +import-Module ./../pwshmodules/GatherOutputsFromTerraform.psm1 -force +import-Module ./../pwshmodules/Deploy_0_Prep.psm1 -force +import-Module ./../pwshmodules/ProcessTerraformApply.psm1 -force +#------------------------------------------------------------------------------------------------------------ +# Preparation #Mandatory +#------------------------------------------------------------------------------------------------------------ +$PathToReturnTo = (Get-Location).Path +$deploymentFolderPath = Convert-Path -Path ((Get-Location).tostring() + './../') +$gitDeploy = ([System.Environment]::GetEnvironmentVariable('gitDeploy') -eq 'true') +$skipTerraformDeployment = ([System.Environment]::GetEnvironmentVariable('skipTerraformDeployment') -eq 'true') +$ipaddress = $env:TF_VAR_ip_address +$ipaddress2 = $env:TF_VAR_ip_address2 + +PrepareDeployment -gitDeploy $gitDeploy -deploymentFolderPath $deploymentFolderPath -FeatureTemplate $FeatureTemplate -PathToReturnTo $PathToReturnTo + +#------------------------------------------------------------------------------------------------------------ +# Main Terraform - Layer1 +#------------------------------------------------------------------------------------------------------------ +Write-Host "Starting Terraform Deployment- Layer 0" + + +$output = terragrunt init --terragrunt-config vars/$env:environmentName/terragrunt.hcl -reconfigure +$output = terragrunt apply -auto-approve --terragrunt-config vars/$env:environmentName/terragrunt.hcl -json #-var synapse_sql_password=$env:TF_VAR_synapse_sql_password + +ProcessTerraformApply -output $output -gitDeploy $gitDeploy + + +#Update Values for variables in Environment +#[Environment]::SetEnvironmentVariable("TF_VAR_state_storage_account_name", $Value) +$tout_raw = ((az storage blob download -c "tstate" -n "terraform_layer2.tfstate" --account-name $env:TF_VAR_state_storage_account_name --auth-mode login) | ConvertFrom-Json).outputs + + +#conditional +if(-not (([string]::IsNullOrEmpty($tout_raw.adlsstorage_name.value)) -or ([string]::IsNullOrEmpty($tout_raw.keyvault_name.value)) -or([string]::IsNullOrEmpty($tout_raw.synapse_workspace_name.value)) ) ) +{ + Write-Host "Writing ARM_DATALAKE_NAME / ARM_KEYVAULT_NAME / ARM_SYNAPSE_WORKSPACE_NAME to common vars environment file" + $envFolderPath = Convert-Path -Path ($deploymentFolderPath + "./environments/vars/$env:environmentName/") + $varsfile = $envFolderPath + "/common_vars_values.jsonc" + $common_vars_values = Get-Content $varsfile | ConvertFrom-Json -Depth 10 + $common_vars_values.ARM_DATALAKE_NAME = $tout_raw.adlsstorage_name.value + $common_vars_values.ARM_KEYVAULT_NAME = $tout_raw.keyvault_name.value + $common_vars_values.ARM_SYNAPSE_WORKSPACE_NAME = $tout_raw.synapse_workspace_name.value + $common_vars_values | Convertto-Json -Depth 10 | Set-Content $varsfile +} +else +{ + Write-Host "Not writing ARM_DATALAKE_NAME / ARM_KEYVAULT_NAME / ARM_SYNAPSE_WORKSPACE_NAME to common vars environment file" + Write-Host "ARM_DATALAKE_NAME =" $tout_raw.adlsstorage_name.value + Write-Host "ARM_KEYVAULT_NAME =" $tout_raw.keyvault_name.value + Write-Host "ARM_SYNAPSE_WORKSPACE_NAME =" $tout_raw.synapse_workspace_name.value +} + + diff --git a/solution/DeploymentV2/terraform_layer0/02-publish.ps1 b/solution/DeploymentV2/terraform_layer0/02-publish.ps1 new file mode 100644 index 00000000..68cf6307 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer0/02-publish.ps1 @@ -0,0 +1,109 @@ +#---------------------------------------------------------------------------------------------------------------- +# You must be logged into the Azure CLI to run this script +#---------------------------------------------------------------------------------------------------------------- +# This script will: +# - Deploy the required AAD objects (Application Registrations etc) +# +# This is intended for creating a once off deployment from your development machine. You should setup the +# GitHub actions for your long term prod/non-prod environments +# +# Intructions +# - Ensure that you have run the Prepare.ps1 script first. This will prepare your azure subscription for deployment +# - Ensure that you have run az login and az account set +# - Ensure you have Contributor Access to the subscription you are deploying to. +# - Ensure you have Application.ReadWrite.OwnedBy on the Azure AD. +# - Run this script +# +# You can run this script multiple times if needed. +# +#---------------------------------------------------------------------------------------------------------------- +param ( + [Parameter(Mandatory=$false)] + [string]$FeatureTemplate="" +) + + + +#------------------------------------------------------------------------------------------------------------ +# Module Imports #Mandatory +#------------------------------------------------------------------------------------------------------------ +import-Module ./../pwshmodules/GatherOutputsFromTerraform.psm1 -force +import-Module ./../pwshmodules/Deploy_0_Prep.psm1 -force +#------------------------------------------------------------------------------------------------------------ +# Preparation #Mandatory +#------------------------------------------------------------------------------------------------------------ +$PathToReturnTo = (Get-Location).Path +$deploymentFolderPath = Convert-Path -Path ((Get-Location).tostring() + './../') + +$gitDeploy = ([System.Environment]::GetEnvironmentVariable('gitDeploy') -eq 'true') +$skipTerraformDeployment = ([System.Environment]::GetEnvironmentVariable('skipTerraformDeployment') -eq 'true') +$ipaddress = $env:TF_VAR_ip_address +$ipaddress2 = $env:TF_VAR_ip_address2 + +PrepareDeployment -gitDeploy $gitDeploy -deploymentFolderPath $deploymentFolderPath -FeatureTemplate $FeatureTemplate -PathToReturnTo $PathToReturnTo + +if($env:TF_VAR_deployment_layer3_complete -eq $false -or $null -eq $env:TF_VAR_deployment_layer3_complete) +{ + Write-Error "Layer 3 Deployment is not complete. Code will now exit. Run terraform layer 3 for this deployment before running this layer (layer two) again." + exit +} + +#------------------------------------------------------------------------------------------------------------ +# Get Outputs #Mandatory +#------------------------------------------------------------------------------------------------------------ +$tout = GatherOutputsFromTerraform -TerraformFolderPath $PathToReturnTo + +#------------------------------------------------------------------------------------------------------------ +# Publish +#------------------------------------------------------------------------------------------------------------ +import-Module ./../pwshmodules/Deploy_4_PrivateLinks.psm1 -force +DeployPrivateLinks -tout $tout + +import-Module ./../pwshmodules/Deploy_5_WebApp.psm1 -force +DeployWebApp -tout $tout -deploymentFolderPath $deploymentFolderPath -PathToReturnTo $PathToReturnTo + +import-Module ./../pwshmodules/Deploy_6_FuncApp.psm1 -force +DeployFuncApp -tout $tout -deploymentFolderPath $deploymentFolderPath -PathToReturnTo $PathToReturnTo + +import-Module ./../pwshmodules/Deploy_7_MetadataDB.psm1 -force +DeployMataDataDB -publish_metadata_database $true -tout $tout -deploymentFolderPath $deploymentFolderPath -PathToReturnTo $PathToReturnTo + +import-Module ./../pwshmodules/Deploy_9_DataFactory.psm1 -force +DeployDataFactoryAndSynapseArtefacts -tout $tout -deploymentFolderPath $deploymentFolderPath -PathToReturnTo $PathToReturnTo + +import-Module ./../pwshmodules/Deploy_10_SampleFiles.psm1 -force +DeploySampleFiles -tout $tout -deploymentFolderPath $deploymentFolderPath -PathToReturnTo $PathToReturnTo + +#import-Module ./../pwshmodules/ConfigureAzurePurview.psm1 -force +#ConfigureAzurePurview -tout $tout + + +#---------------------------------------------------------------------------------------------------------------- +# Set up Purview +#---------------------------------------------------------------------------------------------------------------- +# This is a WIP - not recommended to use for standard user +#---------------------------------------------------------------------------------------------------------------- +# +if($skipConfigurePurview -or $null -eq $skipConfigurePurview) { + Write-Host "Skipping experimental Purview Configuration" +} +else { + Write-Host "Running Purview Configuration (experimental) Script" + Set-Location $deploymentFolderPath + Invoke-Expression ./ConfigureAzurePurview.ps1 +} + + +#---------------------------------------------------------------------------------------------------------------- +# Deploy Functional Tests +#---------------------------------------------------------------------------------------------------------------- +# This is for development purposes primarily - If using, understand these may not be all working with most recent platform version as tests can become outdated / missing new required fields. +#---------------------------------------------------------------------------------------------------------------- +if($skipFunctionalTests -or $null -eq $skipFunctionalTests) { + Write-Host "Skipping Functional Tests Upload" +} +else { + Write-Host "Deploying Functional Tests to Web App" + Set-Location $deploymentFolderPath + Invoke-Expression ./GenerateAndUploadFunctionalTests.ps1 +} \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer0/bastion.tf b/solution/DeploymentV2/terraform_layer0/bastion.tf new file mode 100644 index 00000000..1d7b9073 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer0/bastion.tf @@ -0,0 +1,29 @@ +resource "azurerm_public_ip" "bastion_pip" { + count = (var.is_vnet_isolated ? 1 : 0) + name = local.bastion_ip_name + location = var.resource_location + resource_group_name = var.resource_group_name + allocation_method = "Static" + sku = "Standard" +} + +resource "azurerm_bastion_host" "bastion" { + count = (var.is_vnet_isolated && var.deploy_bastion? 1 : 0) + name = local.bastion_name + location = var.resource_location + resource_group_name = var.resource_group_name + + ip_configuration { + name = "configuration" + subnet_id = local.bastion_subnet_id + public_ip_address_id = azurerm_public_ip.bastion_pip[0].id + } + tags = local.tags + lifecycle { + ignore_changes = [ + tags + ] + } +} + + diff --git a/solution/DeploymentV2/terraform_layer0/locals.tf b/solution/DeploymentV2/terraform_layer0/locals.tf new file mode 100644 index 00000000..c2e0c0e9 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer0/locals.tf @@ -0,0 +1,119 @@ +locals { + data_factory_name = (var.data_factory_name != "" ? var.data_factory_name : module.naming.data_factory.name_unique) + key_vault_name = (var.key_vault_name != "" ? var.key_vault_name : module.naming.key_vault.name_unique) + app_insights_name = (var.app_insights_name != "" ? var.app_insights_name : module.naming.application_insights.name_unique) + app_service_plan_name = (var.app_service_plan_name != "" ? var.app_service_plan_name : module.naming.app_service_plan.name_unique) + sql_server_name = (var.sql_server_name != "" ? var.sql_server_name : module.naming.sql_server.name_unique) + webapp_name = (var.webapp_name != "" ? var.webapp_name : module.naming.app_service.name_unique) + webapp_url = "https://${local.webapp_name}.azurewebsites.net" + functionapp_name = (var.functionapp_name != "" ? var.functionapp_name : module.naming.function_app.name_unique) + functionapp_url = "https://${local.functionapp_name}.azurewebsites.net" + aad_webapp_name = (var.aad_webapp_name != "" ? var.aad_webapp_name : "ADS GoFast Web Portal (${var.environment_tag})") + aad_functionapp_name = (var.aad_functionapp_name != "" ? var.aad_functionapp_name : "ADS GoFast Orchestration App (${var.environment_tag})") + vnet_name = (var.vnet_name != "" ? var.vnet_name : module.naming.virtual_network.name) + plink_subnet_name = (var.plink_subnet_name != "" ? var.plink_subnet_name : "${module.naming.subnet.name}-plink") + app_service_subnet_name = (var.app_service_subnet_name != "" ? var.plink_subnet_name : "${module.naming.subnet.name}-appservice") + vm_subnet_name = (var.vm_subnet_name != "" ? var.vm_subnet_name : "${module.naming.subnet.name}-vm") + logs_storage_account_name = (var.logs_storage_account_name != "" ? var.logs_storage_account_name : "${module.naming.storage_account.name_unique}log") + app_service_nsg_name = (var.app_service_nsg_name != "" ? var.app_service_nsg_name : "${module.naming.network_security_group.name}-appservice") + plink_nsg_name = (var.plink_nsg_name != "" ? var.plink_nsg_name : "${module.naming.network_security_group.name_unique}-plink") + bastion_nsg_name = (var.bastion_nsg_name != "" ? var.bastion_nsg_name : "${module.naming.network_security_group.name_unique}-bastion") + vm_nsg_name = (var.vm_nsg_name != "" ? var.vm_nsg_name : "${module.naming.network_security_group.name_unique}-vm") + log_analytics_workspace_name = (var.log_analytics_workspace_name != "" ? var.log_analytics_workspace_name : module.naming.log_analytics_workspace.name_unique) + metadata_database_name = "MetadataDb" + sample_database_name = "Samples" + staging_database_name = "Staging" + adls_storage_account_name = (var.adls_storage_account_name != "" ? var.adls_storage_account_name : "${module.naming.data_lake_store.name_unique}adsl") + blob_storage_account_name = (var.blob_storage_account_name != "" ? var.blob_storage_account_name : "${module.naming.data_lake_store.name_unique}blob") + bastion_name = (var.bastion_name != "" ? var.bastion_name : module.naming.bastion_host.name_unique) + bastion_ip_name = (var.bastion_ip_name != "" ? var.bastion_ip_name : module.naming.public_ip.name_unique) + purview_name = (var.purview_name != "" ? var.purview_name : "${var.prefix}${var.environment_tag}pur${var.app_name}${element(split("-", module.naming.data_factory.name_unique),length(split("-", module.naming.data_factory.name_unique))-1)}") + purview_account_plink = (var.purview_name != "" ? var.purview_name : "${var.prefix}-${var.environment_tag}-pura-${lower(var.app_name)}-plink-${element(split("-", module.naming.data_factory.name_unique),length(split("-", module.naming.data_factory.name_unique))-1)}") + purview_portal_plink = (var.purview_name != "" ? var.purview_name : "${var.prefix}-${var.environment_tag}-purp-${lower(var.app_name)}-plink-${element(split("-", module.naming.data_factory.name_unique),length(split("-", module.naming.data_factory.name_unique))-1)}") + purview_resource_group_name = "managed-${module.naming.resource_group.name_unique}-purview" + purview_ir_app_reg_name = (var.purview_ir_app_reg_name != "" ? var.purview_ir_app_reg_name : "ADS GoFast Purview Integration Runtime (${var.environment_tag})") + jumphost_vm_name = module.naming.virtual_machine.name + jumphost_nic_name = "${module.naming.virtual_machine.name}-jumphost_nic" + jumphost_password = ((var.is_vnet_isolated && var.jumphost_password == null) ? "" : var.jumphost_password) + synapse_data_lake_name = (var.synapse_data_lake_name != "" ? var.synapse_data_lake_name : module.naming.data_lake_store.name_unique) + synapse_workspace_name = (var.synapse_workspace_name != "" ? var.synapse_workspace_name : "${var.prefix}${var.environment_tag}synw${var.app_name}${element(split("-", module.naming.data_factory.name_unique), length(split("-", module.naming.data_factory.name_unique)) - 1)}") + synapse_dwpool_name = (var.synapse_dwpool_name != "" ? var.synapse_dwpool_name : "${var.prefix}${var.environment_tag}syndp${var.app_name}") + synapse_sppool_name = (var.synapse_sppool_name != "" ? var.synapse_sppool_name : "${var.prefix}${var.environment_tag}synsp${var.app_name}") + synapse_resource_group_name = "managed-${module.naming.resource_group.name_unique}-synapse" + synapse_sql_password = ((var.deploy_synapse && var.synapse_sql_password == null) ? "" : var.synapse_sql_password) + selfhostedsqlvm_name = replace(module.naming.virtual_machine.name,"-vm-ads","-vm-sql") + h2o-ai_name = replace(module.naming.virtual_machine.name,"-vm-ads","-vm-h2o") + custom_vm_name = replace(module.naming.virtual_machine.name,"-vm-ads","-vm-custom") + + tags = { + Environment = var.environment_tag + Owner = var.owner_tag + Author = var.author_tag + Application = var.app_name + CreatedDate = timestamp() + } + + integration_runtimes = [ + { + name = "Azure-Integration-Runtime" + short_name = "Azure" + is_azure = true + is_managed_vnet = true + valid_source_systems = ["*"] + valid_pipeline_patterns = [ + { + Folder = "*" + SourceFormat = "*" + SourceType = "*" + TargetFormat = "*" + TargetType = "*" + TaskTypeId = "*" + } + ] + }, + { + name = "Onprem-Integration-Runtime" + short_name = "OnPrem" + is_azure = false + is_managed_vnet = false + valid_source_systems = ["-14", "-15", "-9", "-3", "-4"] + valid_pipeline_patterns = [ + { + Folder = "Azure-Storage-to-Azure-Storage" + SourceFormat = "*" + SourceType = "*" + TargetFormat = "*" + TargetType = "*" + TaskTypeId = "*" + }, + { + Folder = "Execute-SQL-Statement" + SourceFormat = "*" + SourceType = "*" + TargetFormat = "*" + TargetType = "*" + TaskTypeId = "*" + }, + { + Folder = "SQL-Database-to-Azure-Storage" + SourceFormat = "*" + SourceType = "*" + TargetFormat = "*" + TargetType = "*" + TaskTypeId = "*" + }, + { + Folder = "SQL-Database-to-Azure-Storage-CDC" + SourceFormat = "*" + SourceType = "*" + TargetFormat = "*" + TargetType = "*" + TaskTypeId = "*" + } + + ] + } + ] +} + + diff --git a/solution/DeploymentV2/terraform_layer0/main.tf b/solution/DeploymentV2/terraform_layer0/main.tf new file mode 100644 index 00000000..1d5d2bdd --- /dev/null +++ b/solution/DeploymentV2/terraform_layer0/main.tf @@ -0,0 +1,52 @@ +# Configure the Azure provider +terraform { + required_providers { + azurerm = { + source = "hashicorp/azurerm" + version = "=3.12.0" + } + azuread = { + source = "hashicorp/azuread" + version = "=2.22.0" + } + random = { + source = "hashicorp/random" + version = "=3.3.0" + } + } +} + +provider "azurerm" { + features { + key_vault { + purge_soft_delete_on_destroy = true + } + } + subscription_id = var.subscription_id + skip_provider_registration = true +} + +provider "azuread" { + tenant_id = var.tenant_id +} + +data "azurerm_client_config" "current" { +} + +module "naming" { + source = "Azure/naming/azurerm" + version = "0.1.1" + #unique-seed = data.terraform_remote_state.layer1.outputs.naming_unique_seed + prefix = [ + var.prefix, + var.environment_tag + ] + suffix = [ + var.app_name + ] +} + + +resource "random_id" "rg_deployment_unique" { + byte_length = 4 +} diff --git a/solution/DeploymentV2/terraform_layer0/nsg_app_service.tf b/solution/DeploymentV2/terraform_layer0/nsg_app_service.tf new file mode 100644 index 00000000..2c0c2421 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer0/nsg_app_service.tf @@ -0,0 +1,53 @@ +resource "azurerm_network_security_group" "app_service_nsg" { + count = (var.is_vnet_isolated && var.existing_app_service_subnet_id == "" ? 1 : 0) + name = local.app_service_nsg_name + location = var.resource_location + resource_group_name = var.resource_group_name + + tags = local.tags + lifecycle { + ignore_changes = [ + tags + ] + } +} + +# start inbound rules +resource "azurerm_network_security_rule" "app_service_in_deny_all" { + count = (var.is_vnet_isolated && var.existing_app_service_subnet_id == "" ? 1 : 0) + name = "app_service_in_deny_all" + priority = 110 + direction = "Inbound" + access = "Deny" + protocol = "*" + + source_port_range = "*" + source_address_prefix = "*" + + destination_port_range = "*" + destination_address_prefix = "*" + + resource_group_name = var.resource_group_name + network_security_group_name = azurerm_network_security_group.app_service_nsg[0].name + + depends_on = [ + azurerm_network_security_group.app_service_nsg[0], + ] +} +# end Inbound rules + +# start outbound rules + +# association +resource "azurerm_subnet_network_security_group_association" "app_service_nsg" { + count = (var.is_vnet_isolated && var.existing_app_service_subnet_id == "" ? 1 : 0) + subnet_id = local.app_service_subnet_id + network_security_group_id = azurerm_network_security_group.app_service_nsg[0].id + timeouts {} + # The subnet will refuse to accept the NSG if it's not this exact + # list so we need to ensure the rules are deployed before the association + depends_on = [ + azurerm_network_security_rule.app_service_in_deny_all[0], + azurerm_subnet.app_service_subnet[0], + ] +} diff --git a/solution/DeploymentV2/terraform_layer0/nsg_bastion.tf b/solution/DeploymentV2/terraform_layer0/nsg_bastion.tf new file mode 100644 index 00000000..55a2f5be --- /dev/null +++ b/solution/DeploymentV2/terraform_layer0/nsg_bastion.tf @@ -0,0 +1,207 @@ +resource "azurerm_network_security_group" "bastion_nsg" { + count = (var.is_vnet_isolated && var.existing_bastion_subnet_id == "" ? 1 : 0) + name = local.bastion_nsg_name + location = var.resource_location + resource_group_name = var.resource_group_name + tags = local.tags + lifecycle { + ignore_changes = [ + tags + ] + } +} + +# Inbound Rules +resource "azurerm_network_security_rule" "bastion_inbound_internet" { + count = (var.is_vnet_isolated && var.existing_bastion_subnet_id == "" ? 1 : 0) + name = "inbound_internet_allow" + priority = 100 + direction = "Inbound" + access = "Allow" + protocol = "*" + + source_port_range = "*" + source_address_prefix = "Internet" + + destination_port_range = "443" + destination_address_prefix = "*" + + resource_group_name = var.resource_group_name + network_security_group_name = azurerm_network_security_group.bastion_nsg[0].name + + depends_on = [ + azurerm_network_security_group.bastion_nsg[0], + ] +} +resource "azurerm_network_security_rule" "bastion_inbound_control_plane" { + count = (var.is_vnet_isolated && var.existing_bastion_subnet_id == "" ? 1 : 0) + name = "inbound_control_plane_allow" + priority = 110 + direction = "Inbound" + access = "Allow" + protocol = "*" + + source_port_range = "*" + source_address_prefix = "GatewayManager" + + destination_port_range = "443" + destination_address_prefix = "*" + + resource_group_name = var.resource_group_name + network_security_group_name = azurerm_network_security_group.bastion_nsg[0].name + + depends_on = [ + azurerm_network_security_group.bastion_nsg[0], + ] +} +resource "azurerm_network_security_rule" "bastion_inbound_data_plane" { + count = (var.is_vnet_isolated && var.existing_bastion_subnet_id == "" ? 1 : 0) + name = "inbound_data_plane_allow" + priority = 120 + direction = "Inbound" + access = "Allow" + protocol = "*" + + source_port_range = "*" + source_address_prefix = "VirtualNetwork" + + destination_port_ranges = ["8080", "5701"] + destination_address_prefix = "*" + + resource_group_name = var.resource_group_name + network_security_group_name = azurerm_network_security_group.bastion_nsg[0].name + + depends_on = [ + azurerm_network_security_group.bastion_nsg[0], + ] +} +resource "azurerm_network_security_rule" "bastion_inbound_load_balancer" { + count = (var.is_vnet_isolated && var.existing_bastion_subnet_id == "" ? 1 : 0) + name = "inbound_load_balancer_allow" + priority = 130 + direction = "Inbound" + access = "Allow" + protocol = "*" + + source_port_range = "*" + source_address_prefix = "AzureLoadBalancer" + + destination_port_range = "443" + destination_address_prefix = "*" + + resource_group_name = var.resource_group_name + network_security_group_name = azurerm_network_security_group.bastion_nsg[0].name + + depends_on = [ + azurerm_network_security_group.bastion_nsg[0], + ] +} +#-------------------------------------------------------------------------------------------------------- +# Outbound Rules +resource "azurerm_network_security_rule" "bastion_outbound_bastion_vms" { + count = (var.is_vnet_isolated && var.existing_bastion_subnet_id == "" ? 1 : 0) + name = "outbound_bastion_vnet_allow" + priority = 110 + direction = "Outbound" + access = "Allow" + protocol = "*" + + source_port_range = "*" + source_address_prefix = "*" + + destination_port_ranges = ["3389", "22"] + destination_address_prefix = "VirtualNetwork" + + resource_group_name = var.resource_group_name + network_security_group_name = azurerm_network_security_group.bastion_nsg[0].name + + depends_on = [ + azurerm_network_security_group.bastion_nsg[0], + ] +} +resource "azurerm_network_security_rule" "bastion_outbound_dataplane" { + count = (var.is_vnet_isolated && var.existing_bastion_subnet_id == "" ? 1 : 0) + name = "bastion_outbound_dataplane_allow" + priority = 120 + direction = "Outbound" + access = "Allow" + protocol = "*" + + source_port_range = "*" + source_address_prefix = "*" + + destination_port_ranges = ["8080", "5701"] + destination_address_prefix = "VirtualNetwork" + + resource_group_name = var.resource_group_name + network_security_group_name = azurerm_network_security_group.bastion_nsg[0].name + + depends_on = [ + azurerm_network_security_group.bastion_nsg[0], + ] +} +resource "azurerm_network_security_rule" "bastion_outbound_azure" { + count = (var.is_vnet_isolated && var.existing_bastion_subnet_id == "" ? 1 : 0) + name = "bastion_outbound_azure_allow" + priority = 130 + direction = "Outbound" + access = "Allow" + protocol = "*" + + source_port_range = "*" + source_address_prefix = "*" + + destination_port_range = "443" + destination_address_prefix = "AzureCloud" + + resource_group_name = var.resource_group_name + network_security_group_name = azurerm_network_security_group.bastion_nsg[0].name + + depends_on = [ + azurerm_network_security_group.bastion_nsg[0], + ] +} +resource "azurerm_network_security_rule" "bastion_outbound_internet" { + count = (var.is_vnet_isolated && var.existing_bastion_subnet_id == "" ? 1 : 0) + name = "bastion_outbound_internet_allow" + priority = 140 + direction = "Outbound" + access = "Allow" + protocol = "*" + + source_port_range = "*" + source_address_prefix = "*" + + destination_port_range = "80" + destination_address_prefix = "Internet" + + resource_group_name = var.resource_group_name + network_security_group_name = azurerm_network_security_group.bastion_nsg[0].name + + depends_on = [ + azurerm_network_security_group.bastion_nsg[0], + ] +} + +# Associate NSG with subnet + +resource "azurerm_subnet_network_security_group_association" "bastion_nsg" { + count = (var.is_vnet_isolated && var.existing_bastion_subnet_id == "" ? 1 : 0) + subnet_id = local.bastion_subnet_id + network_security_group_id = azurerm_network_security_group.bastion_nsg[0].id + + # The subnet will refuse to accept the NSG if it's not this exact + # list so we need to ensure the rules are deployed before the association + depends_on = [ + azurerm_network_security_rule.bastion_inbound_internet[0], + azurerm_network_security_rule.bastion_inbound_control_plane[0], + azurerm_network_security_rule.bastion_inbound_data_plane[0], + azurerm_network_security_rule.bastion_inbound_load_balancer[0], + azurerm_network_security_rule.bastion_outbound_bastion_vms[0], + azurerm_network_security_rule.bastion_outbound_dataplane[0], + azurerm_network_security_rule.bastion_outbound_azure[0], + azurerm_network_security_rule.bastion_outbound_internet[0], + azurerm_subnet.bastion_subnet[0], + ] + timeouts {} +} diff --git a/solution/DeploymentV2/terraform_layer0/nsg_plink.tf b/solution/DeploymentV2/terraform_layer0/nsg_plink.tf new file mode 100644 index 00000000..f50b6b4e --- /dev/null +++ b/solution/DeploymentV2/terraform_layer0/nsg_plink.tf @@ -0,0 +1,53 @@ +resource "azurerm_network_security_group" "plink_nsg" { + count = (var.is_vnet_isolated && var.existing_plink_subnet_id == "" ? 1 : 0) + name = local.plink_nsg_name + location = var.resource_location + resource_group_name = var.resource_group_name + tags = local.tags + lifecycle { + ignore_changes = [ + tags + ] + } +} + +# Inbound Rules + +# Outbound Rules +resource "azurerm_network_security_rule" "plink_out_deny_all" { + count = (var.is_vnet_isolated && var.existing_plink_subnet_id == "" ? 1 : 0) + name = "plink_out_deny_all" + priority = 110 + direction = "Outbound" + access = "Deny" + protocol = "*" + + source_port_range = "*" + source_address_prefix = "*" + + destination_port_range = "*" + destination_address_prefix = "*" + + resource_group_name = var.resource_group_name + network_security_group_name = azurerm_network_security_group.plink_nsg[0].name + + depends_on = [ + azurerm_network_security_group.plink_nsg[0], + ] +} + +# Associate NSG with subnet + +resource "azurerm_subnet_network_security_group_association" "plink_nsg" { + count = (var.is_vnet_isolated && var.existing_plink_subnet_id == "" ? 1 : 0) + subnet_id = local.plink_subnet_id + network_security_group_id = azurerm_network_security_group.plink_nsg[0].id + + # The subnet will refuse to accept the NSG if it's not this exact + # list so we need to ensure the rules are deployed before the association + depends_on = [ + azurerm_network_security_rule.plink_out_deny_all[0], + azurerm_subnet.plink_subnet[0], + ] + timeouts {} +} diff --git a/solution/DeploymentV2/terraform_layer0/nsg_vms.tf b/solution/DeploymentV2/terraform_layer0/nsg_vms.tf new file mode 100644 index 00000000..76ec79ec --- /dev/null +++ b/solution/DeploymentV2/terraform_layer0/nsg_vms.tf @@ -0,0 +1,51 @@ +resource "azurerm_network_security_group" "vm_nsg" { + count = (var.is_vnet_isolated && var.existing_vm_subnet_id == "" ? 1 : 0) + name = local.vm_nsg_name + location = var.resource_location + resource_group_name = var.resource_group_name + tags = local.tags + lifecycle { + ignore_changes = [ + tags + ] + } +} + +# Inbound Rules +resource "azurerm_network_security_rule" "vm_inbound_bastion" { + count = (var.is_vnet_isolated && var.existing_vm_subnet_id == "" ? 1 : 0) + name = "inbound_bastion_allow" + priority = 110 + direction = "Inbound" + access = "Allow" + protocol = "*" + + source_port_range = "*" + source_address_prefix = var.bastion_subnet_cidr + + destination_port_ranges = ["22", "3389"] + destination_address_prefix = "*" + + resource_group_name = var.resource_group_name + network_security_group_name = azurerm_network_security_group.vm_nsg[0].name + + depends_on = [ + azurerm_network_security_group.vm_nsg[0], + ] +} +# Outbound Rules + +# Associate NSG with subnet + +resource "azurerm_subnet_network_security_group_association" "vm_nsg" { + count = (var.is_vnet_isolated && var.existing_vm_subnet_id == "" ? 1 : 0) + subnet_id = local.vm_subnet_id + network_security_group_id = azurerm_network_security_group.vm_nsg[0].id + + # The subnet will refuse to accept the NSG if it's not this exact + # list so we need to ensure the rules are deployed before the association + depends_on = [ + azurerm_subnet.vm_subnet[0], + ] + timeouts {} +} diff --git a/solution/DeploymentV2/terraform_layer0/outputs.tf b/solution/DeploymentV2/terraform_layer0/outputs.tf new file mode 100644 index 00000000..e69de29b diff --git a/solution/DeploymentV2/terraform_layer0/private_dns.tf b/solution/DeploymentV2/terraform_layer0/private_dns.tf new file mode 100644 index 00000000..0ac8e32c --- /dev/null +++ b/solution/DeploymentV2/terraform_layer0/private_dns.tf @@ -0,0 +1,176 @@ +resource "azurerm_private_dns_zone" "private_dns_zone_db" { + count = (var.is_vnet_isolated && var.existing_private_dns_zone_db_id == "" ? 1 : 0) + name = "privatelink.database.windows.net" + resource_group_name = var.resource_group_name +} + +resource "azurerm_private_dns_zone_virtual_network_link" "database" { + count = (var.is_vnet_isolated && var.existing_private_dns_zone_db_id == "" ? 1 : 0) + name = "${local.vnet_name}-database" + resource_group_name = var.resource_group_name + private_dns_zone_name = azurerm_private_dns_zone.private_dns_zone_db[0].name + virtual_network_id = azurerm_virtual_network.vnet[0].id +} + +locals { + private_dns_zone_db_id = (var.is_vnet_isolated && var.existing_private_dns_zone_db_id == "" ? azurerm_private_dns_zone.private_dns_zone_db[0].id : var.existing_private_dns_zone_db_id) + private_dns_zone_kv_id = (var.is_vnet_isolated && var.existing_private_dns_zone_kv_id == "" ? azurerm_private_dns_zone.private_dns_zone_kv[0].id : var.existing_private_dns_zone_kv_id) + private_dns_zone_blob_id = (var.is_vnet_isolated && var.existing_private_dns_zone_blob_id == "" ? azurerm_private_dns_zone.private_dns_zone_blob[0].id : var.existing_private_dns_zone_blob_id) + private_dns_zone_queue_id = (var.is_vnet_isolated && var.existing_private_dns_zone_queue_id == "" ? azurerm_private_dns_zone.private_dns_zone_queue[0].id : var.existing_private_dns_zone_queue_id) + private_dns_zone_dfs_id = (var.is_vnet_isolated && var.existing_private_dns_zone_dfs_id == "" ? azurerm_private_dns_zone.private_dns_zone_dfs[0].id : var.existing_private_dns_zone_dfs_id) + private_dns_zone_purview_id = (var.is_vnet_isolated && var.existing_private_dns_zone_purview_id == "" ? azurerm_private_dns_zone.private_dns_zone_purview[0].id : var.existing_private_dns_zone_purview_id) + private_dns_zone_purview_studio_id = (var.is_vnet_isolated && var.existing_private_dns_zone_purview_studio_id == "" ? azurerm_private_dns_zone.private_dns_zone_purview_studio[0].id : var.existing_private_dns_zone_purview_studio_id) + private_dns_zone_servicebus_id = (var.is_vnet_isolated && var.existing_private_dns_zone_servicebus_id == "" ? azurerm_private_dns_zone.private_dns_zone_servicebus[0].id : var.existing_private_dns_zone_servicebus_id) + private_dns_zone_synapse_gateway_id = (var.is_vnet_isolated && var.existing_private_dns_zone_synapse_gateway_id == "" ? azurerm_private_dns_zone.synapse_gateway[0].id : var.existing_private_dns_zone_synapse_gateway_id) + private_dns_zone_synapse_studio_id = (var.is_vnet_isolated && var.existing_private_dns_zone_synapse_studio_id == "" ? azurerm_private_dns_zone.synapse_studio[0].id : var.existing_private_dns_zone_synapse_studio_id) + private_dns_zone_synapse_sql_id = (var.is_vnet_isolated && var.existing_private_dns_zone_synapse_sql_id == "" ? azurerm_private_dns_zone.synapse_sql[0].id : var.existing_private_dns_zone_synapse_sql_id) + +} + + +resource "azurerm_private_dns_zone" "private_dns_zone_kv" { + count = (var.is_vnet_isolated && var.existing_private_dns_zone_kv_id == "" ? 1 : 0) + name = "privatelink.vaultcore.azure.net" + resource_group_name = var.resource_group_name +} + +resource "azurerm_private_dns_zone_virtual_network_link" "vaultcore" { + count = (var.is_vnet_isolated && var.existing_private_dns_zone_kv_id == "" ? 1 : 0) + name = "${local.vnet_name}-vaultcore" + resource_group_name = var.resource_group_name + private_dns_zone_name = azurerm_private_dns_zone.private_dns_zone_kv[0].name + virtual_network_id = azurerm_virtual_network.vnet[0].id + tags = local.tags + lifecycle { + ignore_changes = [ + tags + ] + } +} + +resource "azurerm_private_dns_zone" "private_dns_zone_blob" { + count = (var.is_vnet_isolated && var.existing_private_dns_zone_blob_id == "" ? 1 : 0) + name = "privatelink.blob.core.windows.net" + resource_group_name = var.resource_group_name +} + +resource "azurerm_private_dns_zone_virtual_network_link" "blob" { + count = (var.is_vnet_isolated && var.existing_private_dns_zone_blob_id == "" ? 1 : 0) + name = "${local.vnet_name}-blob" + resource_group_name = var.resource_group_name + private_dns_zone_name = azurerm_private_dns_zone.private_dns_zone_blob[0].name + virtual_network_id = azurerm_virtual_network.vnet[0].id +} + +resource "azurerm_private_dns_zone" "private_dns_zone_queue" { + count = (var.is_vnet_isolated && var.existing_private_dns_zone_queue_id == "" ? 1 : 0) + name = "privatelink.queue.core.windows.net" + resource_group_name = var.resource_group_name +} + +resource "azurerm_private_dns_zone_virtual_network_link" "queue" { + count = (var.is_vnet_isolated && var.existing_private_dns_zone_queue_id == "" ? 1 : 0) + name = "${local.vnet_name}-queue" + resource_group_name = var.resource_group_name + private_dns_zone_name = azurerm_private_dns_zone.private_dns_zone_queue[0].name + virtual_network_id = azurerm_virtual_network.vnet[0].id +} + +resource "azurerm_private_dns_zone" "private_dns_zone_dfs" { + count = (var.is_vnet_isolated && var.existing_private_dns_zone_dfs_id == "" ? 1 : 0) + name = "privatelink.dfs.core.windows.net" + resource_group_name = var.resource_group_name +} + +resource "azurerm_private_dns_zone_virtual_network_link" "dfs" { + count = (var.is_vnet_isolated && var.existing_private_dns_zone_dfs_id == "" ? 1 : 0) + name = "${local.vnet_name}-dfs" + resource_group_name = var.resource_group_name + private_dns_zone_name = azurerm_private_dns_zone.private_dns_zone_dfs[0].name + virtual_network_id = azurerm_virtual_network.vnet[0].id +} + +resource "azurerm_private_dns_zone" "private_dns_zone_purview" { + count = (var.is_vnet_isolated && var.existing_private_dns_zone_purview_id == "" ? 1 : 0) + name = "privatelink.purview.azure.com" + resource_group_name = var.resource_group_name +} + +resource "azurerm_private_dns_zone_virtual_network_link" "purview" { + count = (var.is_vnet_isolated && var.existing_private_dns_zone_purview_id == "" ? 1 : 0) + name = "${local.vnet_name}-purview" + resource_group_name = var.resource_group_name + private_dns_zone_name = azurerm_private_dns_zone.private_dns_zone_purview[0].name + virtual_network_id = azurerm_virtual_network.vnet[0].id +} + +resource "azurerm_private_dns_zone" "private_dns_zone_purview_studio" { + count = (var.is_vnet_isolated && var.existing_private_dns_zone_purview_studio_id == "" ? 1 : 0) + name = "privatelink.purviewstudio.azure.com" + resource_group_name = var.resource_group_name +} + +resource "azurerm_private_dns_zone_virtual_network_link" "purview_studio" { + count = (var.is_vnet_isolated && var.existing_private_dns_zone_purview_studio_id == "" ? 1 : 0) + name = "${local.vnet_name}-purviewstudio" + resource_group_name = var.resource_group_name + private_dns_zone_name = azurerm_private_dns_zone.private_dns_zone_purview_studio[0].name + virtual_network_id = azurerm_virtual_network.vnet[0].id +} + +resource "azurerm_private_dns_zone" "private_dns_zone_servicebus" { + count = (var.is_vnet_isolated && var.existing_private_dns_zone_servicebus_id == "" ? 1 : 0) + name = "privatelink.servicebus.windows.net" + resource_group_name = var.resource_group_name +} + +resource "azurerm_private_dns_zone_virtual_network_link" "servicebus" { + count = (var.is_vnet_isolated && var.existing_private_dns_zone_servicebus_id == "" ? 1 : 0) + name = "${local.vnet_name}-servicebus" + resource_group_name = var.resource_group_name + private_dns_zone_name = azurerm_private_dns_zone.private_dns_zone_servicebus[0].name + virtual_network_id = azurerm_virtual_network.vnet[0].id +} + +# Synapse Private DNS Zones +resource "azurerm_private_dns_zone" "synapse_gateway" { + count = (var.is_vnet_isolated && var.deploy_synapse && var.existing_private_dns_zone_synapse_gateway_id == "" ? 1 : 0) + name = "privatelink.azuresynapse.net" + resource_group_name = var.resource_group_name +} + +resource "azurerm_private_dns_zone_virtual_network_link" "synapse_gateway" { + count = (var.is_vnet_isolated && var.deploy_synapse && var.existing_private_dns_zone_synapse_gateway_id == "" ? 1 : 0) + name = "${local.vnet_name}-synapsegateway" + resource_group_name = var.resource_group_name + private_dns_zone_name = azurerm_private_dns_zone.synapse_gateway[0].name + virtual_network_id = azurerm_virtual_network.vnet[0].id +} + +resource "azurerm_private_dns_zone" "synapse_sql" { + count = (var.is_vnet_isolated && var.deploy_synapse && var.existing_private_dns_zone_synapse_sql_id == "" ? 1 : 0) + name = "privatelink.sql.azuresynapse.net" + resource_group_name = var.resource_group_name +} + +resource "azurerm_private_dns_zone_virtual_network_link" "synapse_sql" { + count = (var.is_vnet_isolated && var.deploy_synapse && var.existing_private_dns_zone_synapse_sql_id == "" ? 1 : 0) + name = "${local.vnet_name}-synapsesql" + resource_group_name = var.resource_group_name + private_dns_zone_name = azurerm_private_dns_zone.synapse_sql[0].name + virtual_network_id = azurerm_virtual_network.vnet[0].id +} + +resource "azurerm_private_dns_zone" "synapse_studio" { + count = (var.is_vnet_isolated && var.deploy_synapse && var.existing_private_dns_zone_synapse_studio_id == "" ? 1 : 0) + name = "privatelink.dev.azuresynapse.net" + resource_group_name = var.resource_group_name +} + +resource "azurerm_private_dns_zone_virtual_network_link" "synapse_studio" { + count = (var.is_vnet_isolated && var.deploy_synapse && var.existing_private_dns_zone_synapse_studio_id == "" ? 1 : 0) + name = "${local.vnet_name}-synapsestudio" + resource_group_name = var.resource_group_name + private_dns_zone_name = azurerm_private_dns_zone.synapse_studio[0].name + virtual_network_id = azurerm_virtual_network.vnet[0].id +} diff --git a/solution/DeploymentV2/terraform_layer0/readme.md b/solution/DeploymentV2/terraform_layer0/readme.md new file mode 100644 index 00000000..0f05e929 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer0/readme.md @@ -0,0 +1,101 @@ +# Setting up a new environment +This section describes how to set up a new environment. It is suggested that for development purposes, developers use their own unique development environment, i.e. a separate resource group, storage account and Terraform state file. + + +# Setting up Infrastructure from Local Machine +This describes how to run the Terraform configuration from your local machine. We will be running with local state and not persisting to a remote state store. + +## Run Terraform/Terragrunt locally +Log into environment as yourself and change to infra directory. This is a quick way to get going but doesn't test the permissions of the account which will be actually deploying. + +``` PowerShell +az login +az account set --subscription +``` + +``` PowerShell +terragrunt init --terragrunt-config vars/local/terragrunt.hcl -reconfigure +terragrunt plan --terragrunt-config vars/local/terragrunt.hcl +terragrunt apply --terragrunt-config vars/local/terragrunt.hcl +``` + +``` PowerShell +terragrunt init --terragrunt-config vars/staging/terragrunt.hcl -reconfigure +terragrunt plan --terragrunt-config vars/staging/terragrunt.hcl +terragrunt apply --terragrunt-config vars/staging/terragrunt.hcl +``` + +# Setting up Infrastructure for CI/CD managed environment +This describes how to run the Terraform configuration to create the state store for an environment. This should be run once per environment and provides the location for terraform state to be stored. + +## Set up Terraform state +This will set up a resource group, storage account and container to be used for Terraform state. The same resource group will be used for deployed artefacts. + +Run PowerShell + +Log into environment. + +``` PowerShell +az login +az account set --subscription +``` + +Edit *infrastructure\state\create-state-store.ps1* so that *$RESOURCE_GROUP_NAME* and *$RESOURCE_GROUP_NAME* reflect the environment. + +Run the script to create the resources. + +## Set up Terragrunt config file +Set up the config file in location *infrastructure\vars\\terragrunt.hcl* + +Set *remote_state.config.resource_group_name* and *remote_state.config.storage_account_name* as appropriate for the environment, and point to the resource group and storage created above. + +Set up the *inputs* section to reflect the environment being deployed to. + + +## Init the state for the environment +Run PowerShell + +Log into environment and change to infra directory. + +``` PowerShell +az login +az account set --subscription +cd infrastructure +``` + +Initialise state + +``` PowerShell +cd infrastructure +terragrunt init --terragrunt-config vars/development/terragrunt.hcl +``` + +## If you need to import existing resources + + +1. Grant you service principal rights to the resources. +eg. $assignment = az role assignment create --role "Owner" --assignee 4c732d19-4076-4a76-87f3-6fbfd77f007d --resource-group "gft2" + +az ad app owner add --id db2c4f38-1566-41af-a1d4-495cd59097cc --owner-object-id 4c732d19-4076-4a76-87f3-6fbfd77f007d +az ad app owner add --id d2e89752-2e75-48ba-a5a7-cb4bbc7bcfc8 --owner-object-id 4c732d19-4076-4a76-87f3-6fbfd77f007d + + + +2. Then import resources into state + +terraform import azuread_application.web_reg[0] 497fb46f-3d88-4445-b9e8-7065970e3b40 +terraform import azuread_application.function_app_reg[0] db2c4f38-1566-41af-a1d4-495cd59097cc + + +# Required Azure resource providers +Microsoft.Storage +Microsoft.Network +Microsoft.Web +microsoft.insights +Microsoft.ManagedIdentity +Microsoft.KeyVault +Microsoft.OperationalInsights +Microsoft.Purview +Microsoft.EventHub +Microsoft.Compute + diff --git a/solution/DeploymentV2/terraform_layer0/subnet.tf b/solution/DeploymentV2/terraform_layer0/subnet.tf new file mode 100644 index 00000000..4b73c126 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer0/subnet.tf @@ -0,0 +1,63 @@ +resource "azurerm_subnet" "plink_subnet" { + count = (var.is_vnet_isolated && var.existing_plink_subnet_id == "" ? 1 : 0) + name = local.plink_subnet_name + resource_group_name = var.resource_group_name + virtual_network_name = azurerm_virtual_network.vnet[0].name + address_prefixes = [var.plink_subnet_cidr] + enforce_private_link_endpoint_network_policies = true +} + +locals { + plink_subnet_id = (var.existing_plink_subnet_id == "" && (var.is_vnet_isolated) ? azurerm_subnet.plink_subnet[0].id : var.existing_plink_subnet_id) +} + +resource "azurerm_subnet" "bastion_subnet" { + count = (var.is_vnet_isolated && var.deploy_bastion && var.existing_bastion_subnet_id == "" ? 1 : 0) + name = "AzureBastionSubnet" + resource_group_name = var.resource_group_name + virtual_network_name = azurerm_virtual_network.vnet[0].name + address_prefixes = [var.bastion_subnet_cidr] + enforce_private_link_endpoint_network_policies = true +} + +locals { + bastion_subnet_id = (var.existing_bastion_subnet_id == "" && (var.is_vnet_isolated) && var.deploy_bastion ? azurerm_subnet.bastion_subnet[0].id : var.existing_bastion_subnet_id) +} + +resource "azurerm_subnet" "vm_subnet" { + count = (var.is_vnet_isolated || (var.deploy_selfhostedsql || var.deploy_h2o-ai) && var.existing_vm_subnet_id == "" ? 1 : 0) + name = local.vm_subnet_name + resource_group_name = var.resource_group_name + virtual_network_name = azurerm_virtual_network.vnet[0].name + address_prefixes = [var.vm_subnet_cidr] + enforce_private_link_endpoint_network_policies = true +} + +locals { + vm_subnet_id = (var.existing_vm_subnet_id == "" && ((var.is_vnet_isolated) || var.deploy_selfhostedsql || var.deploy_h2o-ai) ? azurerm_subnet.vm_subnet[0].id : var.existing_vm_subnet_id) +} + + +resource "azurerm_subnet" "app_service_subnet" { + count = (var.is_vnet_isolated && var.deploy_app_service_plan && var.existing_app_service_subnet_id == "" ? 1 : 0) + name = local.app_service_subnet_name + resource_group_name = var.resource_group_name + virtual_network_name = azurerm_virtual_network.vnet[0].name + address_prefixes = [var.app_service_subnet_cidr] + enforce_private_link_endpoint_network_policies = false + + + # required for VNet integration with app services (functions) + # https://docs.microsoft.com/en-us/azure/app-service/web-sites-integrate-with-vnet#regional-vnet-integration + delegation { + name = "app-service-delegation" + + service_delegation { + name = "Microsoft.Web/serverFarms" + actions = ["Microsoft.Network/virtualNetworks/subnets/action"] + } + } +} +locals { + app_service_subnet_id = (var.existing_app_service_subnet_id == "" && (var.is_vnet_isolated) ? azurerm_subnet.app_service_subnet[0].id : var.existing_app_service_subnet_id) +} diff --git a/solution/DeploymentV2/terraform_layer0/vars.tf b/solution/DeploymentV2/terraform_layer0/vars.tf new file mode 100644 index 00000000..17d9cfbf --- /dev/null +++ b/solution/DeploymentV2/terraform_layer0/vars.tf @@ -0,0 +1,896 @@ +#--------------------------------------------------------------- +# Provider details +#--------------------------------------------------------------- +variable "ip_address" { + description = "The CICD ipaddress. We add an IP whitelisting to allow the setting of keyvault secrets" + type = string + default = "" +} + +variable "ip_address2" { + description = "The CICD ipaddress. We add an IP whitelisting to allow the setting of keyvault secrets" + type = string + default = "" +} + +variable "tenant_id" { + description = "The AAD tenant ID" + type = string +} + +variable "subscription_id" { + description = "The Azure Subscription ID" + type = string +} + +variable "resource_location" { + description = "The Azure Region being deployed to." + type = string + default = "Australia East" +} + +variable "resource_group_name" { + type = string +} +#--------------------------------------------------------------- +# Tags +#--------------------------------------------------------------- + +variable "owner_tag" { + description = "The tags to apply to resources." + type = string + default = "opensource.microsoft.com" +} + +variable "author_tag" { + description = "The tags to apply to resources." + type = string + default = "opensource.microsoft.com" +} + +variable "environment_tag" { + description = "The name of the environment. Don't use spaces" + default = "dev" + type = string +} + +#--------------------------------------------------------------- +# Configuration +#--------------------------------------------------------------- +variable "domain" { + description = "The AAD domain" + type = string +} +variable "cicd_sp_id" { + description = "The Object Id of the GitHub Service Principal. This will ensure that keyvault access policies are configured for GitHub/terraform to read secret state later" + type = string + default = "" +} +//Onprem linked services and pipelines won't be registered until you complete the IR registration and set this to true +variable "is_onprem_datafactory_ir_registered" { + description = "Are all on-premise Integration runtimes configured?" + default = false + type = bool +} + +variable "is_vnet_isolated" { + description = "Whether to deploy the resources as vnet attached / private linked" + default = true + type = bool +} + +variable "sql_admin_username" { + description = "The username for the sql server admin" + default = "adsgofastsqladminuser11" + type = string +} + +variable "jumphost_password" { + description = "Password for the jumphost" + type = string +} + +variable "synapse_sql_login" { + description = "Login for the Azure Synapse SQL admin" + default = "adsgofastsynapseadminuser14" + type = string +} + +variable "synapse_sql_password" { + description = "Password for the Azure Synapse SQL admin" + type = string +} + +variable "allow_public_access_to_synapse_studio" { + description = "Should the synapse studio allow access to public IPs" + type = bool + default = false +} + +variable "vnet_cidr" { + description = "CIDR of the vnet" + type = string + default = "10.0.0.0/24" +} +variable "plink_subnet_cidr" { + description = "CIDR of the subnet used for private link endpoints" + type = string + default = "10.0.0.0/26" +} +variable "bastion_subnet_cidr" { + description = "CIDR of the subnet used for bastion" + type = string + default = "10.0.0.64/26" +} +variable "app_service_subnet_cidr" { + description = "CIDR of the subnet used to host the app service plan" + type = string + default = "10.0.0.128/26" +} + +variable "vm_subnet_cidr" { + description = "CIDR of the subnet used to host VM compute resources" + type = string + default = "10.0.0.192/26" +} + +# This is used when deploying from outside the Vnet (running locally or with GitHub Hosted runners) +# When set to true. Resources will be created with public_access set to true and then a script +# will be executed at the end to set them back. +variable "delay_private_access" { + description = "Whether to create resoruces with public access enabled and then disable it at the end." + type = bool + default = true +} + + +#--------------------------------------------------------------- +# Feature Toggles +#--------------------------------------------------------------- +variable "deploy_data_factory" { + description = "Feature toggle for deploying the Azure Data Factory" + default = true + type = bool +} +variable "deploy_app_insights" { + description = "Feature toggle for deploying the App Insights" + default = true + type = bool +} +variable "deploy_bastion" { + description = "Feature toggle for deploying bastion" + default = true + type = bool +} +variable "deploy_app_service_plan" { + description = "Feature toggle for deploying the App Service" + default = true + type = bool +} +variable "deploy_web_app" { + description = "Feature toggle for deploying the Web App" + default = true + type = bool +} +variable "deploy_function_app" { + description = "Feature toggle for deploying the Function App" + default = true + type = bool +} +variable "deploy_sql_server" { + description = "Feature toggle for deploying the SQL Server" + default = true + type = bool +} + +variable "deploy_metadata_database" { + description = "Feature toggle for deploying Metadata Database" + default = true + type = bool +} + +variable "deploy_sql_extend_audit_policy" { + description = "Feature toggle for deploying the SQL Server Extended Audit policy" + default = true + type = bool +} +variable "deploy_azure_ad_web_app_registration" { + description = "Feature toggle for deploying the Azure AD App registration for the Web Portal" + default = true + type = bool +} +variable "deploy_azure_ad_function_app_registration" { + description = "Feature toggle for deploying the Azure AD App registration for the Function App" + default = true + type = bool +} +variable "deploy_azure_role_assignments" { + description = "Feature toggle for deploying the Azure Role Assignments" + default = true + type = bool +} +variable "deploy_storage_account" { + description = "Feature toggle for deploying the internal storage account" + default = true + type = bool +} +variable "deploy_adls" { + description = "Feature toggle for deploying the internal data lake" + default = true + type = bool +} +variable "deploy_purview" { + description = "Feature toggle for deploying Azure Purview" + default = false + type = bool +} +variable "deploy_sentinel" { + description = "Feature toggle for deploying Azure Sentinel" + default = false + type = bool +} +variable "deploy_synapse" { + description = "Feature toggle for deploying Azure Synapse" + default = false + type = bool +} + +variable "deploy_synapse_sqlpool" { + description = "Feature toggle for deploying Azure Synapse SQL Pool" + default = true + type = bool +} + +variable "deploy_synapse_sparkpool" { + description = "Feature toggle for deploying Azure Synapse Spark Pool" + default = true + type = bool +} + +variable "deploy_selfhostedsql" { + description = "Feature toggle for deploying Self Hosted Sql VM" + default = false + type = bool +} + +variable "deploy_h2o-ai" { + description = "Feature toggle for deploying H2O-AI VM" + default = false + type = bool +} +variable "deploy_custom_vm" { + description = "Feature toggle for deploying a custom virtual machine" + default = false + type = bool +} +variable "custom_vm_os" { + description = "User must define whether they wish deploy a 'windows' or 'linux' virtual machine." + default = "linux" + type = string +} +variable "synapse_git_toggle_integration" { + description = "Feature toggle for enabling synapse github integration" + default = false + type = bool +} +variable "synapse_git_integration_type" { + description = "User must define whether they wish to use 'github' integration or 'devops'" + default = "github" + type = string +} + +variable "synapse_git_use_pat" { + description = "Whether a pat is required for authentication (non public repo)." + default = true + type = bool +} + +variable "adf_git_toggle_integration" { + description = "Feature toggle for enabling adf github integration" + default = false + type = bool +} + +variable "adf_git_use_pat" { + description = "Whether a pat is required for authentication (non public repo)." + default = true + type = bool +} +variable "deploy_custom_terraform" { + description = "Whether the platform deploys the infrastructure located in the terraform_custom folder" + default = false + type = bool +} +#--------------------------------------------------------------- +# Post IAC - Feature Toggles +#--------------------------------------------------------------- +variable "publish_web_app" { + description = "Feature toggle for Publishing Web Application Code Base" + default = true + type = bool +} + +variable "publish_function_app" { + description = "Feature toggle for Publishing Function Application Code Base" + default = true + type = bool +} + +variable "publish_sample_files" { + description = "Feature toggle for Publishing Sample Filess" + default = true + type = bool +} + +variable "publish_metadata_database" { + description = "Feature toggle for Publishing Metadata Database schema and seeding with data" + default = true + type = bool +} +variable "publish_sql_logins" { + description = "Feature toggle for Publishing Synapse / SQL database logins for lockbox" + default = true + type = bool +} +variable "publish_functional_tests" { + description = "Feature toggle for Publishing Functional Tests to the Web App" + default = false + type = bool +} + +variable "publish_purview_configuration" { + description = "Feature toggle for deploying the Purview configuration script (WIP)" + default = false + type = bool +} +variable "configure_networking" { + description = "Feature toggle for post IAC network configuration" + default = true + type = bool +} + +variable "publish_datafactory_pipelines" { + description = "Feature toggle for post IAC data factory pipeline deployment" + default = true + type = bool +} + + +variable "publish_sif_database" { + description = "Feature toggle for Publishing SIF Database" + default = false + type = bool +} + +variable "sif_database_name" { + description = "SIF DataMart Name" + default = "sif" + type = string +} + +#--------------------------------------------------------------- +# Naming Prefix Settings +#--------------------------------------------------------------- +variable "prefix" { + description = "The prefix value to be used for autogenerated naming conventions" + default = "ark" + type = string +} +variable "app_name" { + description = "The app_name suffix value to be used for autogenerated naming conventions" + default = "ads" + type = string +} + +#--------------------------------------------------------------- +# Override individual resource names +#--------------------------------------------------------------- +variable "key_vault_name" { + description = "The override name for the keyvault resource. If empty, will be autogenerated based on prefix settings" + default = "" + type = string +} +variable "app_service_plan_name" { + description = "The override name for the app service plan resource. If empty, will be autogenerated based on prefix settings" + default = "" + type = string +} +variable "app_insights_name" { + description = "The override name for the app insights resource. If empty, will be autogenerated based on prefix settings" + default = "" + type = string +} +variable "sql_server_name" { + description = "The override name for the sql server resource. If empty, will be autogenerated based on prefix settings" + default = "" + type = string +} +variable "webapp_name" { + description = "The override name for the web app service. If empty, will be autogenerated based on prefix settings" + default = "" + type = string +} +variable "functionapp_name" { + description = "The override name for the function app service resource. If empty, will be autogenerated based on prefix settings" + default = "" + type = string +} + +variable "aad_webapp_name" { + description = "The override name for the AAD App registration for the web app. If empty, will be autogenerated based on prefix settings" + default = "" + type = string +} +variable "aad_functionapp_name" { + description = "The override name for the AAD App registration for the function app. If empty, will be autogenerated based on prefix settings" + default = "" + type = string +} +variable "vnet_name" { + description = "The override name for the Virtual Network resource. If empty, will be autogenerated based on prefix settings" + default = "" + type = string +} +variable "plink_subnet_name" { + description = "The override name for the private link subnet resource. If empty, will be autogenerated based on prefix settings" + default = "" + type = string +} +variable "app_service_subnet_name" { + description = "The override name for the app service subnet resource. If empty, will be autogenerated based on prefix settings" + default = "" + type = string +} +variable "vm_subnet_name" { + description = "The override name for the vm subnet resource. If empty, will be autogenerated based on prefix settings" + default = "" + type = string +} + +variable "app_service_nsg_name" { + description = "The override name for the app service subnet NSG. If empty, will be autogenerated based on prefix settings" + default = "" + type = string +} +variable "plink_nsg_name" { + description = "The override name for the private link subnet NSG. If empty, will be autogenerated based on prefix settings" + default = "" + type = string +} +variable "vm_nsg_name" { + description = "The override name for the VM subnet NSG. If empty, will be autogenerated based on prefix settings" + default = "" + type = string +} +variable "bastion_nsg_name" { + description = "The override name for the bastion subnet NSG. If empty, will be autogenerated based on prefix settings" + default = "" + type = string +} +variable "log_analytics_workspace_name" { + description = "The override name for the Log Analytics workspace. If empty, will be autogenerated based on prefix settings" + default = "" + type = string +} +variable "logs_storage_account_name" { + description = "The override name for the storage account used for logs. If empty, will be autogenerated based on prefix settings" + default = "" + type = string +} +variable "adls_storage_account_name" { + description = "The override name for the storage account used for adls. If empty, will be autogenerated based on prefix settings" + default = "" + type = string +} +variable "blob_storage_account_name" { + description = "The override name for the storage account used for staging data. If empty, will be autogenerated based on prefix settings" + default = "" + type = string +} +variable "bastion_name" { + description = "The override name for the Bastion service. If empty, will be autogenerated based on prefix settings" + default = "" + type = string +} +variable "bastion_ip_name" { + description = "The override name for the Bastion service Public IP. If empty, will be autogenerated based on prefix settings" + default = "" + type = string +} +variable "data_factory_name" { + description = "The override name for the Data Factory component. If empty, will be autogenerated based on prefix settings" + default = "" + type = string +} +variable "purview_name" { + description = "The override name for the Purview component. If empty, will be autogenerated based on prefix settings" + default = "" + type = string +} +variable "purview_ir_app_reg_name" { + description = "The override name for the Purview Integration runtime SP. If empty, will be autogenerated based on prefix settings" + default = "" + type = string +} +variable "purview_resource_location" { + description = "The override location for the Purview component. If empty, will be autogenerated based global location settings" + default = "" + type = string +} + +variable "synapse_data_lake_name" { + description = "The override name for the Synapse data lake component. If empty, will be autogenerated based on prefix settings" + default = "" + type = string +} +variable "synapse_workspace_name" { + description = "The override name for the Synapse workspace component. If empty, will be autogenerated based on prefix settings" + default = "" + type = string +} +variable "synapse_dwpool_name" { + description = "The override name for the Synapse Dedicated Pool component. If empty, will be autogenerated based on prefix settings" + default = "" + type = string +} +variable "synapse_sppool_name" { + description = "The override name for the Synapse spark pool component. If empty, will be autogenerated based on prefix settings" + default = "" + type = string +} + +variable "synapse_git_repository_owner" { + description = "The owner of the github repository to be used for synapse. Eg. for the repository https://github.com/contoso/ads, the owner is contoso" + default = "" + type = string +} + +variable "synapse_git_repository_name" { + description = "The name of the github repository to be used for synapse" + default = "" + type = string +} +/*NOT CURRENLTY USED +variable "synapse_git_repository_base_url" { + description = "The base URL of the git repository you are using for synapse E.g - https://github.com/microsoft/azure-data-services-go-fast-codebase / https://dev.azure.com/microsoft/_git/lockBoxProject" + default = "" + type = string +}*/ +variable "synapse_git_repository_branch_name" { + description = "The name of the github branch to be used" + default = "main" + type = string +} + +variable "synapse_git_repository_root_folder" { + description = "The name of the root folder to be used in the branch" + default = "/" + type = string +} +variable "synapse_git_github_host_url" { + description = "Specifies the GitHub Enterprise host name. For example: https://github.mydomain.com. Use https://github.com for open source repositories. Note: Not used for devops" + default = "https://github.com" + type = string +} +variable "synapse_git_devops_project_name" { + description = "The name of the project to be referenced within devops. Note: Not used for github." + default = "/" + type = string +} + +variable "synapse_git_devops_tenant_id" { + description = "The tenant id of the devops project. By default it will be valued as your tenant_id. Note: Not used for github." + default = "" + type = string +} + +variable "synapse_git_pat" { + description = "The personal access token used to authenticate the git account" + default = "" + type = string +} +variable "synapse_git_user_name" { + description = "The user name to be associated with the commit being done for the pipeline upload." + default = "" + type = string +} + +variable "synapse_git_email_address" { + description = "The email address to be associated with the commit being done for the pipeline upload." + default = "" + type = string +} +variable "adf_git_repository_owner" { + description = "The owner of the github repository to be used for adf. Eg. for the repository https://github.com/contoso/ads, the owner is contoso" + default = "" + type = string +} + +variable "adf_git_repository_name" { + description = "The name of the github repository to be used for synapse" + default = "" + type = string +} +variable "adf_git_repository_branch_name" { + description = "The name of the github branch to be used" + default = "main" + type = string +} + +variable "adf_git_repository_root_folder" { + description = "The name of the root folder to be used in the branch" + default = "/" + type = string +} + +variable "adf_git_host_url" { + description = "Specifies the GitHub Enterprise host name. For example: https://github.mydomain.com. Use https://github.com for open source repositories." + default = "https://github.com" + type = string +} + +variable "adf_git_pat" { + description = "The personal access token used to authenticate the git account" + default = "" + type = string +} +variable "adf_git_user_name" { + description = "The user name to be associated with the commit being done for the pipeline upload." + default = "" + type = string +} + +variable "adf_git_email_address" { + description = "The email address to be associated with the commit being done for the pipeline upload." + default = "" + type = string +} +#--------------------------------------------------------------- +# Scale settings +#--------------------------------------------------------------- +variable "app_service_sku" { + description = "The sku/scale of the app service" + type = object({ + tier = string + size = string + capacity = number }) + default = { + tier = "Standard" + size = "S1" + capacity = 1 + } +} + +variable "synapse_sku" { + description = "The sku/scale of the Synapse SQL Pool" + default = "DW100c" + type = string + validation { + condition = contains(["DW100c", "DW200c", "DW300c", "DW400c", "DW500c", "DW1000c", "DW1500c", "DW2000c", "DW2500c", "DW3000c"], var.synapse_sku) + error_message = "Invalid values for var: synapse_sku." + } +} + + +variable "synapse_spark_min_node_count" { + description = "The minimum number of spark nodes in the autoscale pool" + default = 3 + type = number +} + +variable "synapse_spark_max_node_count" { + description = "The maximum number of spark nodes in the autoscale pool" + default = 12 + type = number +} + + + + +#--------------------------------------------------------------- +# Parameters for specifying existing resources for reuse/ +#--------------------------------------------------------------- +variable "existing_log_analytics_workspace_id" { + description = "An existing log analytics workspace id for reuse" + default = "" + type = string +} +variable "existing_log_analytics_resource_id" { + description = "An existing log analytics resource id for reuse" + default = "" + type = string +} +variable "existing_plink_subnet_id" { + description = "An existing subnet id for reuse for the Private link resources" + default = "" + type = string +} + + +variable "existing_bastion_subnet_id" { + description = "An existing subnet id for reuse for the Bastion host" + default = "" + type = string +} +variable "existing_app_service_subnet_id" { + description = "An existing subnet id for reuse for the App Service delegation" + default = "" + type = string +} +variable "existing_vm_subnet_id" { + description = "An existing subnet id for reuse for the Agent VMs" + default = "" + type = string +} + +variable "existing_private_dns_zone_db_id" { + description = "An existing private DNS zone for privatelink.database.windows.net" + default = "" + type = string +} + +variable "existing_private_dns_zone_kv_id" { + description = "An existing private DNS zone for privatelink.vaultcore.azure.net" + default = "" + type = string +} + +variable "existing_private_dns_zone_blob_id" { + description = "An existing private DNS zone for privatelink.blob.core.azure.net" + default = "" + type = string +} + +variable "existing_private_dns_zone_queue_id" { + description = "An existing private DNS zone for privatelink.queue.core.azure.net" + default = "" + type = string +} + +variable "existing_private_dns_zone_dfs_id" { + description = "An existing private DNS zone for privatelink.dfs.core.azure.net" + default = "" + type = string +} + +variable "existing_private_dns_zone_purview_id" { + description = "An existing private DNS zone for privatelink.purview.azure.net" + default = "" + type = string +} + +variable "existing_private_dns_zone_purview_studio_id" { + description = "An existing private DNS zone for privatelink.purviewstudio.azure.net" + default = "" + type = string +} + +variable "existing_private_dns_zone_servicebus_id" { + description = "An existing private DNS zone for privatelink.servicebus.windows.net" + default = "" + type = string +} + +variable "existing_private_dns_zone_synapse_gateway_id" { + description = "An existing private DNS zone for privatelink.azuresynapse.net" + default = "" + type = string +} + +variable "existing_private_dns_zone_synapse_studio_id" { + description = "An existing private DNS zone for privatelink.dev.azuresynapse.net" + default = "" + type = string +} + +variable "existing_private_dns_zone_synapse_sql_id" { + description = "An existing private DNS zone for privatelink.sql.azuresynapse.net" + default = "" + type = string +} + +variable "existing_synapse_private_link_hub_id" { + description = "An existing private link hub for synapse studio." + default = "" + type = string +} + +variable "custom_vm_plan_name" { + description = "An Azure vm plan name to be referenced for a custom vm image." + default = "" + type = string +} +variable "custom_vm_plan_product" { + description = "An Azure vm plan product to be referenced for a custom vm image." + default = "" + type = string +} +variable "custom_vm_plan_publisher" { + description = "An Azure vm plan publisher to be referenced for a custom vm image." + default = "" + type = string +} +variable "custom_vm_image_offer" { + description = "An Azure custom image marketplace image offer to be referenced for a custom vm image." + default = "" + type = string +} +variable "custom_vm_image_publisher" { + description = "An Azure custom image marketplace image publisher to be referenced for a custom vm image." + default = "" + type = string +} +variable "custom_vm_image_sku" { + description = "An Azure custom image marketplace image sku to be referenced for a custom vm image." + default = "" + type = string +} +variable "custom_vm_image_version" { + description = "An Azure custom image marketplace image version to be referenced for a custom vm image." + default = "latest" + type = string +} + + + + +#--------------------------------------------------------------- +# User Access and Ownership/ +#--------------------------------------------------------------- + +variable "deployment_principal_layers1and3" { + description = "Object Id of the azure account that will deploy layers 1 & 3. If it is the same as the layer 2 user then leave as empty string." + default = "" + type = string +} + +variable "azure_sql_aad_administrators" { + description = "List of Azure SQL Administrators" + type = map(string) + default = {} +} + +variable "azure_purview_data_curators" { + description = "List of Azure Purview Data Curators for default root" + type = map(string) + default = {} +} + +variable "synapse_administrators" { + description = "List of Synapse Administrators" + type = map(string) + default = {} +} + +variable "synapse_contributors" { + description = "List of Synapse Contributors" + type = map(string) + default = {} +} + +variable "synapse_publishers" { + description = "List of Synapse Publishers" + type = map(string) + default = {} +} + +variable "publish_web_app_addcurrentuserasadmin" { + description = "Feature toggle for adding user running deployment as a webapp admin" + default = false + type = bool +} + + +variable "web_app_admin_security_group" { + description = "A web app Azure security group used for admin access." + default = "" + type = string +} + + +variable "resource_owners" { + description = "A web app Azure security group used for admin access." + default = [] + type = list(string) +} + diff --git a/solution/DeploymentV2/terraform_layer0/vars/admz/terragrunt.hcl b/solution/DeploymentV2/terraform_layer0/vars/admz/terragrunt.hcl new file mode 100644 index 00000000..dd5ec60b --- /dev/null +++ b/solution/DeploymentV2/terraform_layer0/vars/admz/terragrunt.hcl @@ -0,0 +1,83 @@ +remote_state { + backend = "azurerm" + generate = { + path = "backend.tf" + if_exists = "overwrite_terragrunt" + } + config = { + # You need to update the resource group and storage account here. + # You should have created these with the Prepare.ps1 script. + resource_group_name = "dlzdev08lite" + storage_account_name = "teststatedev08litestate" + container_name = "tstate" + key = "terraform.tfstate" + } +} + +# These inputs are provided to the terraform variables when deploying the environment +# If you are deploying using pipelines, these can be overridden from environment variables +# using TF_VAR_variablename +inputs = { + prefix = "ark" # All azure resources will be prefixed with this + domain = "arkahna.io" # Used when configuring AAD config for Azure functions + tenant_id = "0fee3d31-b963-4a1c-8f4a-ca367205aa65" # This is the Azure AD tenant ID + subscription_id = "14f299e1-be54-43e9-bf5e-696840f86fc4" # The azure subscription id to deploy to + resource_location = "Australia East" # The location of the resources + resource_group_name = "dlzdev08lite" # The resource group all resources will be deployed to + owner_tag = "Arkahna" # Owner tag value for Azure resources + environment_tag = "prod" # This is used on Azure tags as well as all resource names + ip_address = "101.179.193.89" # This is the ip address of the agent/current IP. Used to create firewall exemptions. + deploy_custom_terraform = false # This is whether the infrastructure located in the terraform_custom folder is deployed or not. + configure_networking = false + is_vnet_isolated = true + deploy_web_app = false + deploy_function_app = false + publish_web_app = false + publish_function_app = false + publish_sample_files = true + publish_metadata_database = false + publish_datafactory_pipelines = true + publish_web_app_addcurrentuserasadmin = true + deploy_bastion = false + deploy_sentinel = false + deploy_purview = false + deploy_synapse = true + deploy_app_service_plan = false + deploy_synapse_sqlpool = false + deploy_selfhostedsql = false + deploy_h2o-ai = false + deploy_metadata_database = false + is_onprem_datafactory_ir_registered = false + + #Below is a space for providing details of EXISTING resources so that the deployment can integrate with your management zone. + #Please ensure that you enter everything that is relevant otherwise deployed resources may not work properly. + #log anayltics resource id can be found under the properties of the log analytics resource NOTE: This is the full URI not the workspaceID + #workspace id can be found under the main page, it is a guid + existing_log_analytics_resource_id = "/subscriptions/14f299e1-be54-43e9-bf5e-696840f86fc4/resourceGroups/dlzdev08lite/providers/Microsoft.OperationalInsights/workspaces/ark-stg-log-ads-g4js" + existing_log_analytics_workspace_id = "23bfd865-7b4e-494c-8538-a872f54c3893" + #synapse private link hub id can be found under the properties of the synapse private link NOTE: This is the full URI (ResourceID) + existing_synapse_private_link_hub_id = "/subscriptions/14f299e1-be54-43e9-bf5e-696840f86fc4/resourceGroups/dlzdev08lite/providers/Microsoft.Synapse/privateLinkHubs/arkstgsynwadsg4jsplink" + #Please assign subnet id's for the following - you may end up using the same subnet id for all of these resources depending on your already deployed assets. + #command used to get subnet ids: + # az network vnet subnet show -g MyResourceGroup -n MySubnet --vnet-name MyVNet + existing_plink_subnet_id = "/subscriptions/14f299e1-be54-43e9-bf5e-696840f86fc4/resourceGroups/dlzdev08lite/providers/Microsoft.Network/virtualNetworks/ark-stg-vnet-ads/subnets/ark-stg-snet-ads-plink" + existing_bastion_subnet_id = "/subscriptions/14f299e1-be54-43e9-bf5e-696840f86fc4/resourceGroups/dlzdev08lite/providers/Microsoft.Network/virtualNetworks/ark-stg-vnet-ads/subnets/AzureBastionSubnet" + existing_app_service_subnet_id = "/subscriptions/14f299e1-be54-43e9-bf5e-696840f86fc4/resourceGroups/dlzdev08lite/providers/Microsoft.Network/virtualNetworks/ark-stg-vnet-ads/subnets/ark-stg-snet-ads-appservice" + existing_vm_subnet_id = "/subscriptions/14f299e1-be54-43e9-bf5e-696840f86fc4/resourceGroups/dlzdev08lite/providers/Microsoft.Network/virtualNetworks/ark-stg-vnet-ads/subnets/ark-stg-snet-ads-vm" + + #assign the private DNS zone id's for the following. + #command used to get existing private-dns zones: + #az network private-dns zone list -g MyResourceGroup + existing_private_dns_zone_db_id = "/subscriptions/14f299e1-be54-43e9-bf5e-696840f86fc4/resourceGroups/dlzdev08lite/providers/Microsoft.Network/privateDnsZones/privatelink.database.windows.net" + existing_private_dns_zone_kv_id = "/subscriptions/14f299e1-be54-43e9-bf5e-696840f86fc4/resourceGroups/dlzdev08lite/providers/Microsoft.Network/privateDnsZones/privatelink.vaultcore.azure.net" + existing_private_dns_zone_blob_id = "/subscriptions/14f299e1-be54-43e9-bf5e-696840f86fc4/resourceGroups/dlzdev08lite/providers/Microsoft.Network/privateDnsZones/privatelink.blob.core.windows.net" + existing_private_dns_zone_queue_id = "/subscriptions/14f299e1-be54-43e9-bf5e-696840f86fc4/resourceGroups/dlzdev08lite/providers/Microsoft.Network/privateDnsZones/privatelink.queue.core.windows.net" + existing_private_dns_zone_dfs_id = "/subscriptions/14f299e1-be54-43e9-bf5e-696840f86fc4/resourceGroups/dlzdev08lite/providers/Microsoft.Network/privateDnsZones/privatelink.dfs.core.windows.net" + existing_private_dns_zone_purview_id = "/subscriptions/14f299e1-be54-43e9-bf5e-696840f86fc4/resourceGroups/dlzdev08lite/providers/Microsoft.Network/privateDnsZones/privatelink.purview.azure.com" + existing_private_dns_zone_purview_studio_id = "/subscriptions/14f299e1-be54-43e9-bf5e-696840f86fc4/resourceGroups/dlzdev08lite/providers/Microsoft.Network/privateDnsZones/privatelink.purviewstudio.azure.com" + existing_private_dns_zone_servicebus_id = "/subscriptions/14f299e1-be54-43e9-bf5e-696840f86fc4/resourceGroups/dlzdev08lite/providers/Microsoft.Network/privateDnsZones/privatelink.servicebus.windows.net" + existing_private_dns_zone_synapse_gateway_id = "/subscriptions/14f299e1-be54-43e9-bf5e-696840f86fc4/resourceGroups/dlzdev08lite/providers/Microsoft.Network/privateDnsZones/privatelink.azuresynapse.net" + existing_private_dns_zone_synapse_studio_id = "/subscriptions/14f299e1-be54-43e9-bf5e-696840f86fc4/resourceGroups/dlzdev08lite/providers/Microsoft.Network/privateDnsZones/privatelink.dev.azuresynapse.net" + existing_private_dns_zone_synapse_sql_id = "/subscriptions/14f299e1-be54-43e9-bf5e-696840f86fc4/resourceGroups/dlzdev08lite/providers/Microsoft.Network/privateDnsZones/privatelink.sql.azuresynapse.net" + +} diff --git a/solution/DeploymentV2/terraform_layer0/vars/local/terragrunt.hcl b/solution/DeploymentV2/terraform_layer0/vars/local/terragrunt.hcl new file mode 100644 index 00000000..0f9e5d47 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer0/vars/local/terragrunt.hcl @@ -0,0 +1,45 @@ +locals { + common_vars = jsondecode(file("../../../bin/environments/local/common_vars_for_hcl.json")) +} + + + +# These inputs are provided to the terraform variables when deploying the environment +# If you are deploying using pipelines, these can be overridden from environment variables +# using TF_VAR_variablename +inputs = { + prefix = "${local.common_vars.prefix}" # All azure resources will be prefixed with this + domain = "${local.common_vars.domain}" # Used when configuring AAD config for Azure functions + tenant_id = "${local.common_vars.tenant_id}" # This is the Azure AD tenant ID + subscription_id = "${local.common_vars.subscription_id}" # The azure subscription id to deploy to + resource_location = "${local.common_vars.resource_location}" # The location of the resources + resource_group_name = "${local.common_vars.resource_group_name}" # The resource group all resources will be deployed to + owner_tag = "${local.common_vars.owner_tag}" # Owner tag value for Azure resources + environment_tag = "${local.common_vars.environment_tag}" # This is used on Azure tags as well as all resource names + ip_address = "${local.common_vars.ip_address}" # This is the ip address of the agent/current IP. Used to create firewall exemptions. + azure_sql_aad_administrators = "${local.common_vars.azure_sql_aad_administrators}" + azure_purview_data_curators = "${local.common_vars.azure_purview_data_curators}" + synapse_administrators = "${local.common_vars.synapse_administrators}" + resource_owners = "${local.common_vars.resource_owners}" + deploy_web_app = true + deploy_function_app = true + deploy_custom_terraform = false # This is whether the infrastructure located in the terraform_custom folder is deployed or not. + deploy_app_service_plan = true + deploy_data_factory = true + deploy_sentinel = true + deploy_purview = false + deploy_synapse = true + deploy_metadata_database = true + is_vnet_isolated = false + publish_web_app = true + publish_function_app = true + publish_sample_files = true + publish_metadata_database = true + configure_networking = false + publish_datafactory_pipelines = true + publish_web_app_addcurrentuserasadmin = true + deploy_selfhostedsql = false + is_onprem_datafactory_ir_registered = false + publish_sif_database = true + deployment_principal_layers1and3 = "${local.common_vars.deployment_principal_layers1and3}" +} diff --git a/solution/DeploymentV2/terraform_layer0/vars/production/terragrunt.hcl b/solution/DeploymentV2/terraform_layer0/vars/production/terragrunt.hcl new file mode 100644 index 00000000..c7a1a75e --- /dev/null +++ b/solution/DeploymentV2/terraform_layer0/vars/production/terragrunt.hcl @@ -0,0 +1,45 @@ +locals { + common_vars = jsondecode(file("../../../bin/environments/production/common_vars_for_hcl.json")) +} + + + +# These inputs are provided to the terraform variables when deploying the environment +# If you are deploying using pipelines, these can be overridden from environment variables +# using TF_VAR_variablename +inputs = { + prefix = "${local.common_vars.prefix}" # All azure resources will be prefixed with this + domain = "${local.common_vars.domain}" # Used when configuring AAD config for Azure functions + tenant_id = "${local.common_vars.tenant_id}" # This is the Azure AD tenant ID + subscription_id = "${local.common_vars.subscription_id}" # The azure subscription id to deploy to + resource_location = "${local.common_vars.resource_location}" # The location of the resources + resource_group_name = "${local.common_vars.resource_group_name}" # The resource group all resources will be deployed to + owner_tag = "${local.common_vars.owner_tag}" # Owner tag value for Azure resources + environment_tag = "${local.common_vars.environment_tag}" # This is used on Azure tags as well as all resource names + ip_address = "${local.common_vars.ip_address}" # This is the ip address of the agent/current IP. Used to create firewall exemptions + azure_sql_aad_administrators = "${local.common_vars.azure_sql_aad_administrators}" + azure_purview_data_curators = "${local.common_vars.azure_purview_data_curators}" + synapse_administrators = "${local.common_vars.synapse_administrators}" + resource_owners = "${local.common_vars.resource_owners}" + deploy_web_app = true + deploy_function_app = true + deploy_custom_terraform = false # This is whether the infrastructure located in the terraform_custom folder is deployed or not. + deploy_app_service_plan = true + deploy_data_factory = true + deploy_sentinel = true + deploy_purview = false + deploy_synapse = true + deploy_metadata_database = true + is_vnet_isolated = false + publish_web_app = true + publish_function_app = true + publish_sample_files = true + publish_metadata_database = true + configure_networking = false + publish_datafactory_pipelines = true + publish_web_app_addcurrentuserasadmin = true + deploy_selfhostedsql = false + is_onprem_datafactory_ir_registered = false + publish_sif_database = true + deployment_principal_layers1and3 = "${local.common_vars.deployment_principal_layers1and3}" +} diff --git a/solution/DeploymentV2/terraform_layer0/vars/staging/terragrunt.hcl b/solution/DeploymentV2/terraform_layer0/vars/staging/terragrunt.hcl new file mode 100644 index 00000000..57f8fe58 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer0/vars/staging/terragrunt.hcl @@ -0,0 +1,43 @@ +locals { + common_vars = jsondecode(file("../../../bin/environments/staging/common_vars_for_hcl.json")) +} + +# These inputs are provided to the terraform variables when deploying the environment +# If you are deploying using pipelines, these can be overridden from environment variables +# using TF_VAR_variablename +inputs = { + prefix = "${local.common_vars.prefix}" # All azure resources will be prefixed with this + domain = "${local.common_vars.domain}" # Used when configuring AAD config for Azure functions + tenant_id = "${local.common_vars.tenant_id}" # This is the Azure AD tenant ID + subscription_id = "${local.common_vars.subscription_id}" # The azure subscription id to deploy to + resource_location = "${local.common_vars.resource_location}" # The location of the resources + resource_group_name = "${local.common_vars.resource_group_name}" # The resource group all resources will be deployed to + owner_tag = "${local.common_vars.owner_tag}" # Owner tag value for Azure resources + environment_tag = "${local.common_vars.environment_tag}" # This is used on Azure tags as well as all resource names + ip_address = "${local.common_vars.ip_address}" # This is the ip address of the agent/current IP. Used to create firewall exemptions. + azure_sql_aad_administrators = "${local.common_vars.azure_sql_aad_administrators}" + azure_purview_data_curators = "${local.common_vars.azure_purview_data_curators}" + synapse_administrators = "${local.common_vars.synapse_administrators}" + resource_owners = "${local.common_vars.resource_owners}" + deploy_web_app = true + deploy_function_app = true + deploy_custom_terraform = false # This is whether the infrastructure located in the terraform_custom folder is deployed or not. + deploy_app_service_plan = true + deploy_data_factory = true + deploy_sentinel = true + deploy_purview = false + deploy_synapse = true + deploy_metadata_database = true + is_vnet_isolated = false + publish_web_app = true + publish_function_app = true + publish_sample_files = true + publish_metadata_database = true + configure_networking = false + publish_datafactory_pipelines = true + publish_web_app_addcurrentuserasadmin = true + deploy_selfhostedsql = false + is_onprem_datafactory_ir_registered = false + publish_sif_database = true + deployment_principal_layers1and3 = "${local.common_vars.deployment_principal_layers1and3}" +} diff --git a/solution/DeploymentV2/terraform_layer0/vars/uat/terragrunt.hcl b/solution/DeploymentV2/terraform_layer0/vars/uat/terragrunt.hcl new file mode 100644 index 00000000..d5dd4e7b --- /dev/null +++ b/solution/DeploymentV2/terraform_layer0/vars/uat/terragrunt.hcl @@ -0,0 +1,44 @@ +locals { + common_vars = jsondecode(file("../../../bin/environments/uat/common_vars_for_hcl.json")) +} + + +# These inputs are provided to the terraform variables when deploying the environment +# If you are deploying using pipelines, these can be overridden from environment variables +# using TF_VAR_variablename +inputs = { + prefix = "${local.common_vars.prefix}" # All azure resources will be prefixed with this + domain = "${local.common_vars.domain}" # Used when configuring AAD config for Azure functions + tenant_id = "${local.common_vars.tenant_id}" # This is the Azure AD tenant ID + subscription_id = "${local.common_vars.subscription_id}" # The azure subscription id to deploy to + resource_location = "${local.common_vars.resource_location}" # The location of the resources + resource_group_name = "${local.common_vars.resource_group_name}" # The resource group all resources will be deployed to + owner_tag = "${local.common_vars.owner_tag}" # Owner tag value for Azure resources + environment_tag = "${local.common_vars.environment_tag}" # This is used on Azure tags as well as all resource names + ip_address = "${local.common_vars.ip_address}" # This is the ip address of the agent/current IP. Used to create firewall exemptions. + azure_sql_aad_administrators = "${local.common_vars.azure_sql_aad_administrators}" + azure_purview_data_curators = "${local.common_vars.azure_purview_data_curators}" + synapse_administrators = "${local.common_vars.synapse_administrators}" + resource_owners = "${local.common_vars.resource_owners}" + deploy_web_app = true + deploy_function_app = true + deploy_custom_terraform = false # This is whether the infrastructure located in the terraform_custom folder is deployed or not. + deploy_app_service_plan = true + deploy_data_factory = true + deploy_sentinel = true + deploy_purview = false + deploy_synapse = true + deploy_metadata_database = true + is_vnet_isolated = false + publish_web_app = true + publish_function_app = true + publish_sample_files = true + publish_metadata_database = true + configure_networking = false + publish_datafactory_pipelines = true + publish_web_app_addcurrentuserasadmin = true + deploy_selfhostedsql = false + is_onprem_datafactory_ir_registered = false + publish_sif_database = true + deployment_principal_layers1and3 = "${local.common_vars.deployment_principal_layers1and3}" +} diff --git a/solution/DeploymentV2/terraform_layer0/virtual_machines.tf b/solution/DeploymentV2/terraform_layer0/virtual_machines.tf new file mode 100644 index 00000000..5b2306f3 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer0/virtual_machines.tf @@ -0,0 +1,54 @@ +resource "azurerm_network_interface" "cicd_vm_nic" { + count = var.is_vnet_isolated ? 1 : 0 + name = local.jumphost_nic_name + location = var.resource_location + resource_group_name = var.resource_group_name + + ip_configuration { + name = "internal" + subnet_id = local.vm_subnet_id + private_ip_address_allocation = "Dynamic" + } +} + + +resource "azurerm_virtual_machine" "cicd_vm_linux" { + name = "CICDAgent" + location = var.resource_location + resource_group_name = var.resource_group_name + vm_size = "Standard_D2s_v3" + network_interface_ids = [ + azurerm_network_interface.cicd_vm_nic[0].id, + ] + + os_profile { + computer_name = "CICDAgent" + admin_username = "adminuser" + admin_password = var.jumphost_password + } + os_profile_linux_config { + disable_password_authentication = false + } + + storage_image_reference { + publisher = "Canonical" + offer = "0001-com-ubuntu-server-jammy" + sku = "22_04-lts-gen2" + version = "latest" + } + + storage_os_disk { + name = "cicd_vm_linux_disk" + caching = "ReadWrite" + create_option = "FromImage" + managed_disk_type = "Premium_LRS" + disk_size_gb = "30" + } + + lifecycle { + ignore_changes = [ + os_profile + ] + } +} + diff --git a/solution/DeploymentV2/terraform_layer0/vnet.tf b/solution/DeploymentV2/terraform_layer0/vnet.tf new file mode 100644 index 00000000..080a25f1 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer0/vnet.tf @@ -0,0 +1,15 @@ + +resource "azurerm_virtual_network" "vnet" { + count = (var.is_vnet_isolated || var.deploy_selfhostedsql || var.deploy_h2o-ai ? 1 : 0) + name = local.vnet_name + location = var.resource_location + resource_group_name = var.resource_group_name + address_space = [var.vnet_cidr] + + tags = local.tags + lifecycle { + ignore_changes = [ + tags + ] + } +} From 20224f0a71276d01a0f1d79adf6a08008915aba3 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Wed, 17 Aug 2022 18:57:45 +0800 Subject: [PATCH 134/151] State Storage Swapped from native tf due to https://github.com/hashicorp/terraform-provider-azurerm/issues/16335 --- .gitignore | 1 + .../vars/common_vars_template.jsonnet | 14 +++ .../terraform_layer0/.terraform.lock.hcl | 19 ++++ .../terraform_layer0/00-deploy.ps1 | 7 +- .../DeploymentV2/terraform_layer0/locals.tf | 1 - .../DeploymentV2/terraform_layer0/main.tf | 8 ++ .../DeploymentV2/terraform_layer0/outputs.tf | 3 + .../terraform_layer0/storage_adls_state.tf | 102 ++++++++++++++++++ .../DeploymentV2/terraform_layer0/vars.tf | 5 - .../utilities/GitHubRunnerInstall.sh | 10 +- 10 files changed, 157 insertions(+), 13 deletions(-) create mode 100644 solution/DeploymentV2/terraform_layer0/storage_adls_state.tf diff --git a/.gitignore b/.gitignore index 97bbf98e..e1c4e241 100644 --- a/.gitignore +++ b/.gitignore @@ -294,3 +294,4 @@ solution/FunctionApp/Patterns/pipeline/*/output/ #test folders solution/DataFactory/Patterns/pipeline/*/functionapptests/tests/tests.json solution/Synapse/Patterns/pipeline/*/functionapptests/tests/tests.json +solution/DeploymentV2/terraform_layer0/.terraform.lock.hcl diff --git a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet index 9b615196..2bf01188 100644 --- a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet +++ b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet @@ -643,6 +643,20 @@ local SecretFileSensitiveVars = { // Object comprehension. + + + + + + + + + + + + + + diff --git a/solution/DeploymentV2/terraform_layer0/.terraform.lock.hcl b/solution/DeploymentV2/terraform_layer0/.terraform.lock.hcl index 8ca5626b..049c2da1 100644 --- a/solution/DeploymentV2/terraform_layer0/.terraform.lock.hcl +++ b/solution/DeploymentV2/terraform_layer0/.terraform.lock.hcl @@ -1,6 +1,25 @@ # This file is maintained automatically by "terraform init". # Manual edits may be lost in future updates. +provider "registry.terraform.io/azure/azapi" { + version = "0.5.0" + hashes = [ + "h1:0mmLkpn3qO9lEO9BGYpwifqJ7KLmNnrJL/5+EAn0sS4=", + "zh:1bbd3e887b13085aa1d989f11e3fc7c8cf0d81cc8dcfeb58f7f752478f061001", + "zh:2c52b4f0bd0e96d8f60c878947a63ae8ea08a735968d8603769f0da4654954b3", + "zh:32837d15d002721c20f4561a1b2d44438b066f9812801295d864e0b4c93b6297", + "zh:3e9fbb1137a36d782df4935acbf89e88f14e6672934ac03fd0226e5ddd4430e4", + "zh:4ce3d703c596bc998f4392a9a177b75ee4d48181c09512fee67ef38aff7dd945", + "zh:53ec2fcdb866b763cc05f8dbd6caf9320d86381c6083d69b4d838dc9bc8128cf", + "zh:5aafa486486ec52d4ca621b490016fa7c022b3f5e3ac16b1dd3b01b98cd86a1c", + "zh:83817bb7dc503254e93972464c496d0f9f4fc7499d294b74c7df9105100c41e7", + "zh:9bce71b22b4700c625f8bd2dddd72e01470c410675fa5c2b014c71acfd7ecb3c", + "zh:d7fc35384b21c0209575c74ae057061048ab342d0bf8d923ed44d83bd6670e42", + "zh:df8e045e6fd72d94fa18c082989d69cd2c73d3b4ade008362ddb90dfcfa91ac7", + "zh:f245a74fc9eca9b63ff0d6856da9d71ccab951b299d45af54e3d3b47bc5ef85f", + ] +} + provider "registry.terraform.io/hashicorp/azuread" { version = "2.22.0" constraints = "2.22.0" diff --git a/solution/DeploymentV2/terraform_layer0/00-deploy.ps1 b/solution/DeploymentV2/terraform_layer0/00-deploy.ps1 index 6136da87..e42197d1 100644 --- a/solution/DeploymentV2/terraform_layer0/00-deploy.ps1 +++ b/solution/DeploymentV2/terraform_layer0/00-deploy.ps1 @@ -44,7 +44,10 @@ PrepareDeployment -gitDeploy $gitDeploy -deploymentFolderPath $deploymentFolderP # Main Terraform - Layer1 #------------------------------------------------------------------------------------------------------------ Write-Host "Starting Terraform Deployment- Layer 0" - +if([string]::IsNullOrEmpty($env:TF_VAR_jumphost_password) -and ($gitDeploy -eq $false -or $null -eq $gitdeploy)) +{ + $env:TF_VAR_jumphost_password = Read-Host "Enter the Jumphost Password" +} $output = terragrunt init --terragrunt-config vars/$env:environmentName/terragrunt.hcl -reconfigure $output = terragrunt apply -auto-approve --terragrunt-config vars/$env:environmentName/terragrunt.hcl -json #-var synapse_sql_password=$env:TF_VAR_synapse_sql_password @@ -52,6 +55,8 @@ $output = terragrunt apply -auto-approve --terragrunt-config vars/$env:environme ProcessTerraformApply -output $output -gitDeploy $gitDeploy + + #Update Values for variables in Environment #[Environment]::SetEnvironmentVariable("TF_VAR_state_storage_account_name", $Value) $tout_raw = ((az storage blob download -c "tstate" -n "terraform_layer2.tfstate" --account-name $env:TF_VAR_state_storage_account_name --auth-mode login) | ConvertFrom-Json).outputs diff --git a/solution/DeploymentV2/terraform_layer0/locals.tf b/solution/DeploymentV2/terraform_layer0/locals.tf index c2e0c0e9..15a5146a 100644 --- a/solution/DeploymentV2/terraform_layer0/locals.tf +++ b/solution/DeploymentV2/terraform_layer0/locals.tf @@ -40,7 +40,6 @@ locals { synapse_dwpool_name = (var.synapse_dwpool_name != "" ? var.synapse_dwpool_name : "${var.prefix}${var.environment_tag}syndp${var.app_name}") synapse_sppool_name = (var.synapse_sppool_name != "" ? var.synapse_sppool_name : "${var.prefix}${var.environment_tag}synsp${var.app_name}") synapse_resource_group_name = "managed-${module.naming.resource_group.name_unique}-synapse" - synapse_sql_password = ((var.deploy_synapse && var.synapse_sql_password == null) ? "" : var.synapse_sql_password) selfhostedsqlvm_name = replace(module.naming.virtual_machine.name,"-vm-ads","-vm-sql") h2o-ai_name = replace(module.naming.virtual_machine.name,"-vm-ads","-vm-h2o") custom_vm_name = replace(module.naming.virtual_machine.name,"-vm-ads","-vm-custom") diff --git a/solution/DeploymentV2/terraform_layer0/main.tf b/solution/DeploymentV2/terraform_layer0/main.tf index 1d5d2bdd..61787890 100644 --- a/solution/DeploymentV2/terraform_layer0/main.tf +++ b/solution/DeploymentV2/terraform_layer0/main.tf @@ -9,6 +9,9 @@ terraform { source = "hashicorp/azuread" version = "=2.22.0" } + azapi = { + source = "Azure/azapi" + } random = { source = "hashicorp/random" version = "=3.3.0" @@ -30,6 +33,11 @@ provider "azuread" { tenant_id = var.tenant_id } + +provider "azapi" { + tenant_id = var.tenant_id +} + data "azurerm_client_config" "current" { } diff --git a/solution/DeploymentV2/terraform_layer0/outputs.tf b/solution/DeploymentV2/terraform_layer0/outputs.tf index e69de29b..241d1621 100644 --- a/solution/DeploymentV2/terraform_layer0/outputs.tf +++ b/solution/DeploymentV2/terraform_layer0/outputs.tf @@ -0,0 +1,3 @@ +output "private_dns_zone_dfs_id" { + value = local.private_dns_zone_dfs_id +} \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer0/storage_adls_state.tf b/solution/DeploymentV2/terraform_layer0/storage_adls_state.tf new file mode 100644 index 00000000..efa0cd40 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer0/storage_adls_state.tf @@ -0,0 +1,102 @@ +locals { + stateaccountname = "gf6state" +} + +#note: Swapped from native tf due to https://github.com/hashicorp/terraform-provider-azurerm/issues/16335 +resource "azapi_resource" "adls_state" { + type = "Microsoft.Storage/storageAccounts@2022-05-01" + name = local.stateaccountname + parent_id = "/subscriptions/${var.subscription_id}/resourceGroups/${var.resource_group_name}" + location = var.resource_location + + body = jsonencode({ + kind = "StorageV2" + sku = { + name = "Standard_GRS" + } + properties = { + isHnsEnabled = true, + minimumTlsVersion = "TLS1_2" + publicNetworkAccess = var.is_vnet_isolated ? "Disabled" : "Enabled" + networkAcls = { + defaultAction = "Deny", + bypass = "AzureServices,Metrics" + ipRules = var.is_vnet_isolated ? [{value = var.ip_address, action = "Allow"}] : [] + } + } + }) +} + + + +resource "azurerm_role_assignment" "adls_state_deployment_agents" { + for_each = { + for ro in var.resource_owners : ro => ro + } + scope = azapi_resource.adls_state.id + role_definition_name = "Storage Blob Data Contributor" + principal_id = each.value +} + +resource "azurerm_private_endpoint" "adls_state_storage_private_endpoint_with_dns" { + count = var.deploy_adls && var.is_vnet_isolated ? 1 : 0 + name = "${local.stateaccountname}-blob-plink" + location = var.resource_location + resource_group_name = var.resource_group_name + subnet_id = local.plink_subnet_id + + private_service_connection { + name = "${local.stateaccountname}-blob-plink-conn" + private_connection_resource_id = azapi_resource.adls_state.id + is_manual_connection = false + subresource_names = ["blob"] + } + + private_dns_zone_group { + name = "privatednszonegroupstorageblob" + private_dns_zone_ids = [local.private_dns_zone_blob_id] + } + + depends_on = [ + azapi_resource.adls_state + ] + + tags = local.tags + lifecycle { + ignore_changes = [ + tags + ] + } +} + +resource "azurerm_private_endpoint" "adls_dfs_state_storage_private_endpoint_with_dns" { + count = var.deploy_adls && var.is_vnet_isolated ? 1 : 0 + name = "${local.stateaccountname}-dfs-plink" + location = var.resource_location + resource_group_name = var.resource_group_name + subnet_id = local.plink_subnet_id + + private_service_connection { + name = "${local.stateaccountname}-dfs-plink-conn" + private_connection_resource_id = azapi_resource.adls_state.id + is_manual_connection = false + subresource_names = ["dfs"] + } + + private_dns_zone_group { + name = "privatednszonegroupstoragedfs" + private_dns_zone_ids = [local.private_dns_zone_dfs_id] + } + + depends_on = [ + azapi_resource.adls_state + ] + + tags = local.tags + lifecycle { + ignore_changes = [ + tags + ] + } +} + diff --git a/solution/DeploymentV2/terraform_layer0/vars.tf b/solution/DeploymentV2/terraform_layer0/vars.tf index 17d9cfbf..e5b751df 100644 --- a/solution/DeploymentV2/terraform_layer0/vars.tf +++ b/solution/DeploymentV2/terraform_layer0/vars.tf @@ -96,11 +96,6 @@ variable "synapse_sql_login" { type = string } -variable "synapse_sql_password" { - description = "Password for the Azure Synapse SQL admin" - type = string -} - variable "allow_public_access_to_synapse_studio" { description = "Should the synapse studio allow access to public IPs" type = bool diff --git a/solution/DeploymentV2/utilities/GitHubRunnerInstall.sh b/solution/DeploymentV2/utilities/GitHubRunnerInstall.sh index 5abb4b0f..89a637a3 100644 --- a/solution/DeploymentV2/utilities/GitHubRunnerInstall.sh +++ b/solution/DeploymentV2/utilities/GitHubRunnerInstall.sh @@ -1,10 +1,8 @@ sudo apt-get update && \ sudo apt-get install -y wget apt-transport-https software-properties-common && \ -wget -q https://packages.microsoft.com/config/ubuntu/16.04/packages-microsoft-prod.deb && \ -sudo dpkg -i packages-microsoft-prod.deb && \ -sudo apt-get update && \ -sudo apt-get install -y powershell && \ -rm ./packages-microsoft-prod.deb && \ +wget -q https://github.com/PowerShell/PowerShell/releases/download/v7.2.5/powershell-lts_7.2.5-1.deb_amd64.deb && \ +sudo dpkg -i powershell-lts_7.2.5-1.deb_amd64.deb && \ +rm ./powershell-lts_7.2.5-1.deb_amd64.deb && \ sudo apt install -y dotnet-sdk-6.0 && \ wget https://github.com/google/go-jsonnet/releases/download/v0.17.0/jsonnet-go_0.17.0_linux_amd64.deb && \ sudo dpkg -i jsonnet-go_0.17.0_linux_amd64.deb && \ @@ -16,4 +14,4 @@ wget https://github.com/gruntwork-io/terragrunt/releases/download/v0.35.14/terra sudo mv terragrunt_linux_amd64 terragrunt && \ sudo chmod u+x terragrunt && \ sudo mv terragrunt /usr/local/bin/terragrunt && \ -curl -sL https://aka.ms/InstallAzureCLIDeb | sudo bashaz \ No newline at end of file +curl -sL https://aka.ms/InstallAzureCLIDeb | sudo bash \ No newline at end of file From f8db3e3f94c924725f53461e82b972b25942b33c Mon Sep 17 00:00:00 2001 From: John Rampono Date: Thu, 18 Aug 2022 07:12:25 +0800 Subject: [PATCH 135/151] self-hosted-tested --- .../workflows/02.continuous-delivery-uat.yml | 2 +- .github/workflows/login-self-hosted.yml | 73 +++++++++++++++++++ 2 files changed, 74 insertions(+), 1 deletion(-) create mode 100644 .github/workflows/login-self-hosted.yml diff --git a/.github/workflows/02.continuous-delivery-uat.yml b/.github/workflows/02.continuous-delivery-uat.yml index abf23b01..54423871 100644 --- a/.github/workflows/02.continuous-delivery-uat.yml +++ b/.github/workflows/02.continuous-delivery-uat.yml @@ -3,7 +3,7 @@ name: Continuous Delivery - Uat on: workflow_dispatch: push: - branches: feature-1.0.4 + branches: main #feature-1.0.4 jobs: deploy-to-env-one: diff --git a/.github/workflows/login-self-hosted.yml b/.github/workflows/login-self-hosted.yml new file mode 100644 index 00000000..2d06ee12 --- /dev/null +++ b/.github/workflows/login-self-hosted.yml @@ -0,0 +1,73 @@ +name: login-self-hosted + +on: + workflow_dispatch: + push: + branches: feature-1.0.4 + +jobs: + deploy-to-env-one: + name: Deploy to Environment One + concurrency: terraform + environment: + name: uat + env: + # This determines the location of the .hcl file that will be used + environmentName: uat + gitDeploy : true + skipTerraformDeployment: false + featureTemplate: ${{ secrets.ARM_FEATURE_TEMPLATE }} + WEB_APP_ADMIN_USER: ${{ secrets.WEB_APP_ADMIN_USER }} + keyVaultName: ${{ secrets.ARM_KEYVAULT_NAME }} + ARM_SYNAPSE_WORKSPACE_NAME: ${{ secrets.ARM_SYNAPSE_WORKSPACE_NAME }} + datalakeName: ${{ secrets.ARM_DATALAKE_NAME }} + # Required for Terraform + ARM_CLIENT_ID: ${{ secrets.ARM_CLIENT_ID }} + ARM_PAL_PARTNER_ID: ${{ secrets.ARM_PAL_PARTNER_ID }} + ARM_CLIENT_SECRET: ${{ secrets.ARM_CLIENT_SECRET }} + ARM_SUBSCRIPTION_ID: ${{ secrets.ARM_SUBSCRIPTION_ID }} + ARM_TENANT_ID: ${{ secrets.ARM_TENANT_ID }} + # Customizing Terraform vars + TF_VAR_ip_address2 : ${{ secrets.ARM_IP_ADDRESS2 }} + TF_VAR_domain : ${{ secrets.ARM_DOMAIN }} + TF_VAR_subscription_id : ${{ secrets.ARM_SUBSCRIPTION_ID }} + TF_VAR_tenant_id : ${{ secrets.ARM_TENANT_ID }} + TF_VAR_environment_tag : ${{ secrets.ENVIRONMENT_TAG }} + TF_VAR_resource_group_name : ${{ secrets.ARM_RESOURCE_GROUP_NAME }} + TF_VAR_state_storage_account_name : ${{ secrets.ARM_STORAGE_NAME }} + TF_VAR_jumphost_password : ${{secrets.ARM_JUMPHOST_PASSWORD}} + TF_VAR_synapse_sql_password : ${{secrets.ARM_SYNAPSE_PASSWORD}} + TF_VAR_web_app_admin_security_group : ${{secrets.WEB_APP_ADMIN_SECURITY_GROUP}} + # GIT Integration set up + TF_VAR_synapse_git_repository_name : ${{secrets.GIT_REPOSITORY_NAME}} + TF_VAR_synapse_git_repository_branch_name : ${{secrets.GIT_SYNAPSE_REPOSITORY_BRANCH_NAME}} + TF_VAR_synapse_git_pat : ${{secrets.GIT_PAT}} + TF_VAR_synapse_git_user_name : ${{secrets.GIT_USER_NAME}} + TF_VAR_synapse_git_email_address : ${{secrets.GIT_EMAIL_ADDRESS}} + TF_VAR_adf_git_repository_name : ${{secrets.GIT_REPOSITORY_NAME}} + TF_VAR_adf_git_repository_branch_name : ${{secrets.GIT_ADF_REPOSITORY_BRANCH_NAME}} + TF_VAR_adf_git_pat : ${{secrets.GIT_PAT}} + TF_VAR_adf_git_user_name : ${{secrets.GIT_USER_NAME}} + TF_VAR_adf_git_email_address : ${{secrets.GIT_EMAIL_ADDRESS}} + + #TF_LOG : TRACE + + runs-on: self-hosted + steps: + - name: PrintInfo + run: | + echo "Deploying to Resource Group: ${{ env.TF_VAR_resource_group_name }} " + echo "echo Hcl file name: ${{ env.environmentName}} " + + - name: Checkout + uses: actions/checkout@v3.0.0 + + - name: Get public IP + id: ip + uses: haythem/public-ip@v1.2 + + - name: Login via Az module + uses: azure/login@v1 + with: + creds: '{"clientId":"${{ secrets.ARM_CLIENT_ID }}","clientSecret":"${{ secrets.ARM_CLIENT_SECRET }}","subscriptionId":"${{ secrets.ARM_SUBSCRIPTION_ID }}","tenantId":"${{ secrets.ARM_TENANT_ID }}"}' + enable-AzPSSession: false From d01968cf0b4e7bbcd242227e81f086262b2453dd Mon Sep 17 00:00:00 2001 From: John Rampono Date: Thu, 18 Aug 2022 21:09:00 +0800 Subject: [PATCH 136/151] Continued Creation of Layer0 --- .../environments/hcltemplates/primary.hcl | 45 ++++ .../vars/common_vars_template.jsonnet | 20 ++ .../vars/staging/common_vars_values.jsonc | 6 +- .../pwshmodules/ProcessTerraformApply.psm1 | 6 + .../DeploymentV2/terraform_layer0/outputs.tf | 70 ++++++ .../DeploymentV2/terraform_layer0/vars.tf | 6 + .../vars/local/terragrunt.hcl | 2 +- .../vars/staging/terragrunt.hcl | 28 +++ .../DeploymentV2/terraform_layer1/main.tf | 1 + .../DeploymentV2/terraform_layer1/vars.tf | 8 + .../DeploymentV2/terraform_layer2/bastion.tf | 56 ----- .../DeploymentV2/terraform_layer2/layer0.tf | 13 ++ .../DeploymentV2/terraform_layer2/layer1.tf | 4 +- .../DeploymentV2/terraform_layer2/locals.tf | 13 ++ .../terraform_layer2/nsg_app_service.tf | 53 ----- .../terraform_layer2/nsg_bastion.tf | 207 ------------------ .../terraform_layer2/nsg_plink.tf | 53 ----- .../DeploymentV2/terraform_layer2/nsg_vms.tf | 51 ----- .../DeploymentV2/terraform_layer2/outputs.tf | 25 ++- .../terraform_layer2/private_dns.tf | 176 --------------- .../DeploymentV2/terraform_layer2/subnet.tf | 63 ------ .../DeploymentV2/terraform_layer2/synapse.tf | 8 +- .../vars/staging/terragrunt.hcl | 20 ++ .../DeploymentV2/terraform_layer2/vnet.tf | 15 -- 24 files changed, 256 insertions(+), 693 deletions(-) create mode 100644 solution/DeploymentV2/environments/hcltemplates/primary.hcl delete mode 100644 solution/DeploymentV2/terraform_layer2/bastion.tf create mode 100644 solution/DeploymentV2/terraform_layer2/layer0.tf delete mode 100644 solution/DeploymentV2/terraform_layer2/nsg_app_service.tf delete mode 100644 solution/DeploymentV2/terraform_layer2/nsg_bastion.tf delete mode 100644 solution/DeploymentV2/terraform_layer2/nsg_plink.tf delete mode 100644 solution/DeploymentV2/terraform_layer2/nsg_vms.tf delete mode 100644 solution/DeploymentV2/terraform_layer2/private_dns.tf delete mode 100644 solution/DeploymentV2/terraform_layer2/subnet.tf delete mode 100644 solution/DeploymentV2/terraform_layer2/vnet.tf diff --git a/solution/DeploymentV2/environments/hcltemplates/primary.hcl b/solution/DeploymentV2/environments/hcltemplates/primary.hcl new file mode 100644 index 00000000..fdf921cd --- /dev/null +++ b/solution/DeploymentV2/environments/hcltemplates/primary.hcl @@ -0,0 +1,45 @@ +locals { + common_vars = jsondecode(file("../../../bin/environments/{@environment}/common_vars_for_hcl.json")) +} + + + +# These inputs are provided to the terraform variables when deploying the environment +# If you are deploying using pipelines, these can be overridden from environment variables +# using TF_VAR_variablename +inputs = { + prefix = "${local.common_vars.prefix}" # All azure resources will be prefixed with this + domain = "${local.common_vars.domain}" # Used when configuring AAD config for Azure functions + tenant_id = "${local.common_vars.tenant_id}" # This is the Azure AD tenant ID + subscription_id = "${local.common_vars.subscription_id}" # The azure subscription id to deploy to + resource_location = "${local.common_vars.resource_location}" # The location of the resources + resource_group_name = "${local.common_vars.resource_group_name}" # The resource group all resources will be deployed to + owner_tag = "${local.common_vars.owner_tag}" # Owner tag value for Azure resources + environment_tag = "${local.common_vars.environment_tag}" # This is used on Azure tags as well as all resource names + ip_address = "${local.common_vars.ip_address}" # This is the ip address of the agent/current IP. Used to create firewall exemptions. + azure_sql_aad_administrators = "${local.common_vars.azure_sql_aad_administrators}" + azure_purview_data_curators = "${local.common_vars.azure_purview_data_curators}" + synapse_administrators = "${local.common_vars.synapse_administrators}" + resource_owners = "${local.common_vars.resource_owners}" + deploy_web_app = true + deploy_function_app = true + deploy_custom_terraform = false # This is whether the infrastructure located in the terraform_custom folder is deployed or not. + deploy_app_service_plan = true + deploy_data_factory = true + deploy_sentinel = true + deploy_purview = false + deploy_synapse = true + deploy_metadata_database = true + is_vnet_isolated = false + publish_web_app = true + publish_function_app = true + publish_sample_files = true + publish_metadata_database = true + configure_networking = false + publish_datafactory_pipelines = true + publish_web_app_addcurrentuserasadmin = true + deploy_selfhostedsql = false + is_onprem_datafactory_ir_registered = false + publish_sif_database = true + deployment_principal_layers1and3 = "${local.common_vars.deployment_principal_layers1and3}" +} diff --git a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet index 2bf01188..43082168 100644 --- a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet +++ b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet @@ -361,6 +361,14 @@ local AllVariables = [ "Value": locals[environment].GIT_ADF_EMAIL_ADDRESS, "Sensitive": false, "DoNotReplaceDuringAgentDeployment":false + }, + { + "CICDSecretName": "", + "EnvVarName": "TF_VAR_is_private_network", + "HCLName": "is_private_network", + "Value": locals[environment].is_private_network, + "Sensitive": false, + "DoNotReplaceDuringAgentDeployment":false } ]+featuretemplate; @@ -659,6 +667,18 @@ local SecretFileSensitiveVars = { // Object comprehension. + + + + + + + + + + + + diff --git a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc index a7e03846..7139759c 100644 --- a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc +++ b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc @@ -7,7 +7,7 @@ "subscription_id": "035a1364-f00d-48e2-b582-4fe125905ee3", "prefix": "ads", "resource_group_name": "gft6", - "state_storage_account_name": "gft6state", + "state_storage_account_name": "gf6state", "ip_address": "144.138.148.220", "ip_address2": "144.138.148.220", "tenant_id": "72f988bf-86f1-41af-91ab-2d7cd011db47", @@ -38,6 +38,7 @@ "GIT_ADF_REPOSITORY_BRANCH_NAME": "#####", "GIT_ADF_USER_NAME": "#####", "GIT_ADF_EMAIL_ADDRESS": "#####", + "is_private_network": true, "FeatureTemplateOverrides": { "is_onprem_datafactory_ir_registered": false, "deployment_layer3_complete": false, @@ -48,6 +49,7 @@ "synapse_git_repository_owner": "h-sha", "synapse_git_repository_name": "testLockbox", "synapse_git_repository_root_folder": "/Synapse", - "synapse_git_use_pat": false + "synapse_git_use_pat": false, + "layer0_state": "remote" } } diff --git a/solution/DeploymentV2/pwshmodules/ProcessTerraformApply.psm1 b/solution/DeploymentV2/pwshmodules/ProcessTerraformApply.psm1 index 0d6d9746..21d80ce8 100644 --- a/solution/DeploymentV2/pwshmodules/ProcessTerraformApply.psm1 +++ b/solution/DeploymentV2/pwshmodules/ProcessTerraformApply.psm1 @@ -21,3 +21,9 @@ function ProcessTerraformApply ( } } + + +# +# +# $pout = terragrunt plan --terragrunt-config vars/$env:environmentName/terragrunt.hcl -json +# (($pout | ConvertFrom-Json -Depth 20) | Where-Object {$_.type -eq "change_summary"}) \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer0/outputs.tf b/solution/DeploymentV2/terraform_layer0/outputs.tf index 241d1621..0b6bb8f5 100644 --- a/solution/DeploymentV2/terraform_layer0/outputs.tf +++ b/solution/DeploymentV2/terraform_layer0/outputs.tf @@ -1,3 +1,73 @@ + +/*DNS Zone*/ +output "private_dns_zone_servicebus_id" { + value = local.private_dns_zone_servicebus_id +} + +output "private_dns_zone_queue_id" { + value = local.private_dns_zone_queue_id +} + +output "private_dns_zone_blob_id" { + value = local.private_dns_zone_blob_id +} + output "private_dns_zone_dfs_id" { value = local.private_dns_zone_dfs_id +} + + +output "private_dns_zone_purview_id" { + value = local.private_dns_zone_purview_id +} + +output "private_dns_zone_purview_studio_id" { + value = local.private_dns_zone_purview_studio_id +} + + +output "private_dns_zone_synapse_sql_id" { + value = local.private_dns_zone_synapse_sql_id +} + +output "private_dns_zone_synapse_gateway_id" { + value = local.private_dns_zone_synapse_gateway_id +} + + +output "private_dns_zone_synapse_studio_id" { + value = local.private_dns_zone_synapse_studio_id +} + +output "plink_subnet_id" { + value = local.plink_subnet_id +} + +output "azurerm_virtual_network_vnet_name" { + value = var.is_vnet_isolated ? azurerm_virtual_network.vnet[0].name : "" +} + +output "private_dns_zone_db_id" { + value = local.private_dns_zone_db_id +} + +output "app_service_subnet_id" { + value = local.app_service_subnet_id +} + +output "vm_subnet_id" { + value = local.vm_subnet_id +} + +output "private_dns_zone_kv_id" { + value = local.private_dns_zone_kv_id +} + +/*Variables for Naming Module*/ +output "naming_unique_seed" { + value = module.naming.unique-seed +} + +output "naming_unique_suffix" { + value = substr(module.naming.unique-seed, 0, 4) } \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer0/vars.tf b/solution/DeploymentV2/terraform_layer0/vars.tf index e5b751df..b601294a 100644 --- a/solution/DeploymentV2/terraform_layer0/vars.tf +++ b/solution/DeploymentV2/terraform_layer0/vars.tf @@ -79,6 +79,12 @@ variable "is_vnet_isolated" { type = bool } +variable "is_private_network" { + description = "If true will disable public IP's entirely" + default = false + type = bool +} + variable "sql_admin_username" { description = "The username for the sql server admin" default = "adsgofastsqladminuser11" diff --git a/solution/DeploymentV2/terraform_layer0/vars/local/terragrunt.hcl b/solution/DeploymentV2/terraform_layer0/vars/local/terragrunt.hcl index 0f9e5d47..fdf921cd 100644 --- a/solution/DeploymentV2/terraform_layer0/vars/local/terragrunt.hcl +++ b/solution/DeploymentV2/terraform_layer0/vars/local/terragrunt.hcl @@ -1,5 +1,5 @@ locals { - common_vars = jsondecode(file("../../../bin/environments/local/common_vars_for_hcl.json")) + common_vars = jsondecode(file("../../../bin/environments/{@environment}/common_vars_for_hcl.json")) } diff --git a/solution/DeploymentV2/terraform_layer0/vars/staging/terragrunt.hcl b/solution/DeploymentV2/terraform_layer0/vars/staging/terragrunt.hcl index 57f8fe58..374f6c2e 100644 --- a/solution/DeploymentV2/terraform_layer0/vars/staging/terragrunt.hcl +++ b/solution/DeploymentV2/terraform_layer0/vars/staging/terragrunt.hcl @@ -2,6 +2,34 @@ locals { common_vars = jsondecode(file("../../../bin/environments/staging/common_vars_for_hcl.json")) } +/*If performing a private networking deployment Prepare.ps1 will initially set TF_VAR_layer0_state to 'local' after initial deployment this should be set to 'remote'*/ +generate "backend.tf" { + + path = "backend.tf" + if_exists = "overwrite_terragrunt" + contents = < Date: Sat, 20 Aug 2022 06:35:44 +0800 Subject: [PATCH 137/151] testing deployment with new layer 0 --- solution/DeploymentV2/Prepare.ps1 | 36 +++++++----- .../tghcltemplates/GenerateTgHCLS.ps1 | 25 ++++++++ .../primary.hcl => tghcltemplates/inputs.hcl} | 6 -- .../environments/tghcltemplates/layer0.hcl | 30 ++++++++++ .../environments/tghcltemplates/layer1.hcl | 19 ++++++ .../environments/tghcltemplates/layer2.hcl | 58 +++++++++++++++++++ .../environments/tghcltemplates/layer3.hcl | 38 ++++++++++++ .../vars/staging/common_vars_values.jsonc | 9 ++- .../DeploymentV2/terraform_layer0/bastion.tf | 4 +- .../terraform_layer0/storage_adls_state.tf | 8 ++- .../DeploymentV2/terraform_layer0/vars.tf | 21 +++++++ .../terraform_layer0/virtual_machines.tf | 3 +- .../DeploymentV2/terraform_layer0/vnet.tf | 2 +- .../DeploymentV2/terraform_layer2/purview.tf | 1 + 14 files changed, 229 insertions(+), 31 deletions(-) create mode 100644 solution/DeploymentV2/environments/tghcltemplates/GenerateTgHCLS.ps1 rename solution/DeploymentV2/environments/{hcltemplates/primary.hcl => tghcltemplates/inputs.hcl} (96%) create mode 100644 solution/DeploymentV2/environments/tghcltemplates/layer0.hcl create mode 100644 solution/DeploymentV2/environments/tghcltemplates/layer1.hcl create mode 100644 solution/DeploymentV2/environments/tghcltemplates/layer2.hcl create mode 100644 solution/DeploymentV2/environments/tghcltemplates/layer3.hcl diff --git a/solution/DeploymentV2/Prepare.ps1 b/solution/DeploymentV2/Prepare.ps1 index fe10bdb9..e8448176 100644 --- a/solution/DeploymentV2/Prepare.ps1 +++ b/solution/DeploymentV2/Prepare.ps1 @@ -170,12 +170,14 @@ else $uinput = Get-SelectionFromUser -Options ('Public','Isolated', 'Private') -Prompt "Please select Network Isolation Level" if($uinput -eq "Public") { + $delay_private_access = true Write-Host "Creating Public Storage" $storageId = az storage account create --resource-group $env:TF_VAR_resource_group_name --name $env:TF_VAR_state_storage_account_name --sku Standard_LRS --allow-blob-public-access false --public-network-access Enabled --default-action Allow --https-only true --min-tls-version TLS1_2 --query id -o tsv --only-show-errors } if($uinput -eq "Isolated") { + $delay_private_access = true Write-Host "Creating Isolated Storage" #Isolated $storageId = az storage account create --resource-group $env:TF_VAR_resource_group_name --name $env:TF_VAR_state_storage_account_name --sku Standard_LRS --allow-blob-public-access false --public-network-access Enabled --default-action Deny --https-only true --min-tls-version TLS1_2 --query id -o tsv --only-show-errors @@ -185,32 +187,33 @@ else } if($uinput -eq "Private") { - Write-Host "Creating Private Storage" + $delay_private_access = false #Private - $storageId = az storage account create --resource-group $env:TF_VAR_resource_group_name --name $env:TF_VAR_state_storage_account_name --sku Standard_LRS --pr --allow-blob-public-access false --public-network-access Disabled --https-only true --min-tls-version TLS1_2 --query id -o tsv --only-show-errors - - $DeploymentVnet = Read-Host "Please input the name of the spoke vnet for the deployment. If you leave it blank it will default to 'ads-stg-vnet-ads'" - if([string]::IsNullOrEmpty($DeploymentVnet)) - { - $DeploymentVnet = "ads-stg-vnet-ads" - } + #Now run Layer 0 terraform + + #$storageId = az storage account create --resource-group $env:TF_VAR_resource_group_name --name $env:TF_VAR_state_storage_account_name --sku Standard_LRS --pr --allow-blob-public-access false --public-network-access Disabled --https-only true --min-tls-version TLS1_2 --query id -o tsv --only-show-errors + #$DeploymentVnet = Read-Host "Please input the name of the spoke vnet for the deployment. If you leave it blank it will default to 'ads-stg-vnet-ads'" + #if([string]::IsNullOrEmpty($DeploymentVnet)) + #{ + # $DeploymentVnet = "ads-stg-vnet-ads" + #} #Create the VNET - $output = az network vnet create --name $DeploymentVnet --resource-group $env:TF_VAR_resource_group_name --address-prefixes "10.0.0.0/24" --subnet-name ads-stg-snet-ads-vm --subnet-prefixes 10.0.0.192/26 + #$output = az network vnet create --name $DeploymentVnet --resource-group $env:TF_VAR_resource_group_name --address-prefixes "10.0.0.0/24" --subnet-name ads-stg-snet-ads-vm --subnet-prefixes 10.0.0.192/26 #Create Private Endpoint for DFS - $output = az network private-endpoint create -g $env:TF_VAR_resource_group_name -n $env:TF_VAR_state_storage_account_name --vnet-name $DeploymentVnet --subnet ads-stg-snet-ads-vm --private-connection-resource-id "/subscriptions/$env:TF_VAR_subscription_id/resourceGroups/$env:TF_VAR_resource_group_name/providers/Microsoft.Storage/storageAccounts/$env:TF_VAR_state_storage_account_name" --connection-name "$env:TF_VAR_state_storage_account_name-dfs-plink" -l australiaeast --group-id dfs --zone-name "privatelink.dfs.core.windows.net" + #$output = az network private-endpoint create -g $env:TF_VAR_resource_group_name -n $env:TF_VAR_state_storage_account_name --vnet-name $DeploymentVnet --subnet ads-stg-snet-ads-vm --private-connection-resource-id "/subscriptions/$env:TF_VAR_subscription_id/resourceGroups/$env:TF_VAR_resource_group_name/providers/Microsoft.Storage/storageAccounts/$env:TF_VAR_state_storage_account_name" --connection-name "$env:TF_VAR_state_storage_account_name-dfs-plink" -l australiaeast --group-id dfs --zone-name "privatelink.dfs.core.windows.net" #DFS Zone and Vnet Link - $output = az network private-dns zone create --resource-group $env:TF_VAR_resource_group_name --name "privatelink.dfs.core.windows.net" - $output = az network private-endpoint dns-zone-group create --endpoint-name "$env:TF_VAR_state_storage_account_name" -g $env:TF_VAR_resource_group_name -n "privatednszonegroupstoragedfs" --zone-name "privatelink.dfs.core.windows.net" --private-dns-zone "privatelink.dfs.core.windows.net" - $output = az network private-dns link vnet create --name "privatelink.dfs.core.windows.net" --registration-enabled false --resource-group $env:TF_VAR_resource_group_name --virtual-network "/subscriptions/$env:TF_VAR_subscription_id/resourceGroups/$env:TF_VAR_resource_group_name/providers/Microsoft.Network/virtualNetworks/$DeploymentVnet" --zone-name "privatelink.dfs.core.windows.net" + #$output = az network private-dns zone create --resource-group $env:TF_VAR_resource_group_name --name "privatelink.dfs.core.windows.net" + #$output = az network private-endpoint dns-zone-group create --endpoint-name "$env:TF_VAR_state_storage_account_name" -g $env:TF_VAR_resource_group_name -n "privatednszonegroupstoragedfs" --zone-name "privatelink.dfs.core.windows.net" --private-dns-zone "privatelink.dfs.core.windows.net" + #$output = az network private-dns link vnet create --name "privatelink.dfs.core.windows.net" --registration-enabled false --resource-group $env:TF_VAR_resource_group_name --virtual-network "/subscriptions/$env:TF_VAR_subscription_id/resourceGroups/$env:TF_VAR_resource_group_name/providers/Microsoft.Network/virtualNetworks/$DeploymentVnet" --zone-name "privatelink.dfs.core.windows.net" #Add Resources to Zones - $storageip = ((az network private-endpoint show --resource-group $env:TF_VAR_resource_group_name --name $env:TF_VAR_state_storage_account_name) | ConvertFrom-Json -depth 10).customDnsConfigs.ipAddresses - az network private-dns record-set a create -g $env:TF_VAR_resource_group_name -z "privatelink.dfs.core.windows.net" -n "$env:TF_VAR_state_storage_account_name" --ttl 10 - $output = az network private-dns record-set a add-record -g $env:TF_VAR_resource_group_name -z "privatelink.dfs.core.windows.net" -n "$env:TF_VAR_state_storage_account_name" -a $storageip + #$storageip = ((az network private-endpoint show --resource-group $env:TF_VAR_resource_group_name --name $env:TF_VAR_state_storage_account_name) | ConvertFrom-Json -depth 10).customDnsConfigs.ipAddresses + #az network private-dns record-set a create -g $env:TF_VAR_resource_group_name -z "privatelink.dfs.core.windows.net" -n "$env:TF_VAR_state_storage_account_name" --ttl 10 + #$output = az network private-dns record-set a add-record -g $env:TF_VAR_resource_group_name -z "privatelink.dfs.core.windows.net" -n "$env:TF_VAR_state_storage_account_name" -a $storageip } Write-Host "Creating Role Assignment" $userObjectId = az ad signed-in-user show --query id -o tsv --only-show-errors @@ -336,6 +339,7 @@ else $common_vars_values.FeatureTemplateOverrides.deployment_layer3_complete = $false } + $common_vars_values.FeatureTemplateOverrides.delay_private_access = $delay_private_access $common_vars_values | Convertto-Json -Depth 10 | Set-Content ./environments/vars/$environmentName/common_vars_values.jsonc diff --git a/solution/DeploymentV2/environments/tghcltemplates/GenerateTgHCLS.ps1 b/solution/DeploymentV2/environments/tghcltemplates/GenerateTgHCLS.ps1 new file mode 100644 index 00000000..43c2c7e9 --- /dev/null +++ b/solution/DeploymentV2/environments/tghcltemplates/GenerateTgHCLS.ps1 @@ -0,0 +1,25 @@ +function GenerateTgHCLS ( + [Parameter(Mandatory = $true)] + [System.Boolean]$gitDeploy = $false, + [Parameter(Mandatory = $true)] + [String]$deploymentFolderPath, + [Parameter(Mandatory = $false)] + [String]$PathToReturnTo = "" +) +{ + $layers = @(0,1,2,3) + $envlist = (Get-ChildItem -Directory -Path ./../vars | Select-Object -Property Name).Name + + foreach ($l in $layers) + { + foreach ($e in $envlist) + { + $layer = "layer$($l.ToString())" + $layer_t = (Get-Content "$layer.hcl" -Raw) + $inputs_t = (Get-Content "inputs.hcl" -Raw) + $template = $layer_t + $inputs_t + $template = $template.Replace("<@environment>", $e) + $template + } + } +} \ No newline at end of file diff --git a/solution/DeploymentV2/environments/hcltemplates/primary.hcl b/solution/DeploymentV2/environments/tghcltemplates/inputs.hcl similarity index 96% rename from solution/DeploymentV2/environments/hcltemplates/primary.hcl rename to solution/DeploymentV2/environments/tghcltemplates/inputs.hcl index fdf921cd..b827852f 100644 --- a/solution/DeploymentV2/environments/hcltemplates/primary.hcl +++ b/solution/DeploymentV2/environments/tghcltemplates/inputs.hcl @@ -1,9 +1,3 @@ -locals { - common_vars = jsondecode(file("../../../bin/environments/{@environment}/common_vars_for_hcl.json")) -} - - - # These inputs are provided to the terraform variables when deploying the environment # If you are deploying using pipelines, these can be overridden from environment variables # using TF_VAR_variablename diff --git a/solution/DeploymentV2/environments/tghcltemplates/layer0.hcl b/solution/DeploymentV2/environments/tghcltemplates/layer0.hcl new file mode 100644 index 00000000..ee439e3f --- /dev/null +++ b/solution/DeploymentV2/environments/tghcltemplates/layer0.hcl @@ -0,0 +1,30 @@ +locals { + common_vars = jsondecode(file("../../../bin/environments/<@environment>/common_vars_for_hcl.json")) +} + +/*If performing a private networking deployment Prepare.ps1 will initially set TF_VAR_layer0_state to 'local' after initial deployment this should be set to 'remote'*/ +generate "backend.tf" { + + path = "backend.tf" + if_exists = "overwrite_terragrunt" + contents = </common_vars_for_hcl.json")) +} + +remote_state { + backend = "azurerm" + generate = { + path = "backend.tf" + if_exists = "overwrite_terragrunt" + } + config = { + # You need to update the resource group and storage account here. + # You should have created these with the Prepare.ps1 script. + resource_group_name = "${local.common_vars.resource_group_name}" + storage_account_name = "${local.common_vars.state_storage_account_name}" + container_name = "tstate" + key = "terraform_layer1.tfstate" + } +} \ No newline at end of file diff --git a/solution/DeploymentV2/environments/tghcltemplates/layer2.hcl b/solution/DeploymentV2/environments/tghcltemplates/layer2.hcl new file mode 100644 index 00000000..6c73d8e9 --- /dev/null +++ b/solution/DeploymentV2/environments/tghcltemplates/layer2.hcl @@ -0,0 +1,58 @@ +locals { + common_vars = jsondecode(file("../../../bin/environments/<@environment>/common_vars_for_hcl.json")) +} + +generate "layer0.tf" { + path = "layer0.tf" + if_exists = "overwrite_terragrunt" + contents = </common_vars_for_hcl.json")) +} + +generate "layer2.tf" { + path = "layer2.tf" + if_exists = "overwrite_terragrunt" + contents = < ro } - scope = azapi_resource.adls_state.id + scope = azapi_resource.adls_state[0].id role_definition_name = "Storage Blob Data Contributor" principal_id = each.value + depends_on = [ + azapi_resource.adls_state + ] } resource "azurerm_private_endpoint" "adls_state_storage_private_endpoint_with_dns" { diff --git a/solution/DeploymentV2/terraform_layer0/vars.tf b/solution/DeploymentV2/terraform_layer0/vars.tf index b601294a..7bebefb6 100644 --- a/solution/DeploymentV2/terraform_layer0/vars.tf +++ b/solution/DeploymentV2/terraform_layer0/vars.tf @@ -796,6 +796,13 @@ variable "existing_synapse_private_link_hub_id" { type = string } +variable "existing_vnet_id" { + description = "An existing virtual network." + default = "" + type = string +} + + variable "custom_vm_plan_name" { description = "An Azure vm plan name to be referenced for a custom vm image." default = "" @@ -895,3 +902,17 @@ variable "resource_owners" { type = list(string) } + +/*Toggles for Layer0 */ +variable "deploy_cicd_vm" { + description = "Set to true if you want to deploy a vm to host the github runnner." + default = true + type = bool +} + +variable "deploy_state_storage_account" { + description = "Set to true if you want to deploy a the state storage account." + default = true + type = bool +} + diff --git a/solution/DeploymentV2/terraform_layer0/virtual_machines.tf b/solution/DeploymentV2/terraform_layer0/virtual_machines.tf index 5b2306f3..ca3463a4 100644 --- a/solution/DeploymentV2/terraform_layer0/virtual_machines.tf +++ b/solution/DeploymentV2/terraform_layer0/virtual_machines.tf @@ -1,5 +1,5 @@ resource "azurerm_network_interface" "cicd_vm_nic" { - count = var.is_vnet_isolated ? 1 : 0 + count = var.is_vnet_isolated && var.deploy_cicd_vm ? 1 : 0 name = local.jumphost_nic_name location = var.resource_location resource_group_name = var.resource_group_name @@ -13,6 +13,7 @@ resource "azurerm_network_interface" "cicd_vm_nic" { resource "azurerm_virtual_machine" "cicd_vm_linux" { + count = var.deploy_cicd_vm ? 1 : 0 name = "CICDAgent" location = var.resource_location resource_group_name = var.resource_group_name diff --git a/solution/DeploymentV2/terraform_layer0/vnet.tf b/solution/DeploymentV2/terraform_layer0/vnet.tf index 080a25f1..c74b7d8c 100644 --- a/solution/DeploymentV2/terraform_layer0/vnet.tf +++ b/solution/DeploymentV2/terraform_layer0/vnet.tf @@ -1,6 +1,6 @@ resource "azurerm_virtual_network" "vnet" { - count = (var.is_vnet_isolated || var.deploy_selfhostedsql || var.deploy_h2o-ai ? 1 : 0) + count = ((var.is_vnet_isolated || var.deploy_selfhostedsql || var.deploy_h2o-ai) && var.existing_vnet_id == "" ? 1 : 0) name = local.vnet_name location = var.resource_location resource_group_name = var.resource_group_name diff --git a/solution/DeploymentV2/terraform_layer2/purview.tf b/solution/DeploymentV2/terraform_layer2/purview.tf index eec5913c..964dbe10 100644 --- a/solution/DeploymentV2/terraform_layer2/purview.tf +++ b/solution/DeploymentV2/terraform_layer2/purview.tf @@ -19,3 +19,4 @@ resource "azurerm_purview_account" "purview" { } + From 9f42bcbe6a59aa99989fe6d2f9c3d218cf813edb Mon Sep 17 00:00:00 2001 From: John Rampono Date: Sat, 20 Aug 2022 06:38:38 +0800 Subject: [PATCH 138/151] testing layer 0 --- .github/workflows/02.continuous-delivery-staging.yml | 2 +- .../environments/vars/staging/common_vars_values.jsonc | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/02.continuous-delivery-staging.yml b/.github/workflows/02.continuous-delivery-staging.yml index 84cbd2be..9abbd83b 100644 --- a/.github/workflows/02.continuous-delivery-staging.yml +++ b/.github/workflows/02.continuous-delivery-staging.yml @@ -3,7 +3,7 @@ name: Continuous Delivery - Staging on: workflow_dispatch: push: - branches: main #feature-1.0.4 + branches: feature-1.0.4 jobs: deploy-to-env-one: diff --git a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc index 46a1fe60..77c2945f 100644 --- a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc +++ b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc @@ -55,6 +55,7 @@ "synapse_git_repository_name": "testLockbox", "synapse_git_repository_root_folder": "/Synapse", "synapse_git_use_pat": false, - "layer0_state": "remote" + "layer0_state": "remote", + "delay_private_access": false } } From 75d2f1254049eb8c5e323c414333133e3e2ba80f Mon Sep 17 00:00:00 2001 From: John Rampono Date: Sat, 20 Aug 2022 06:44:10 +0800 Subject: [PATCH 139/151] deleted test github workflow --- .../02.continuous-delivery-staging.yml | 4 +- .github/workflows/login-self-hosted.yml | 73 ------------------- 2 files changed, 2 insertions(+), 75 deletions(-) delete mode 100644 .github/workflows/login-self-hosted.yml diff --git a/.github/workflows/02.continuous-delivery-staging.yml b/.github/workflows/02.continuous-delivery-staging.yml index 9abbd83b..e36e5956 100644 --- a/.github/workflows/02.continuous-delivery-staging.yml +++ b/.github/workflows/02.continuous-delivery-staging.yml @@ -52,7 +52,7 @@ jobs: #TF_LOG : TRACE - runs-on: ubuntu-latest + runs-on: self-hosted steps: - name: PrintInfo run: | @@ -169,7 +169,7 @@ jobs: #PROD ENVIRONMENT #TF_LOG : TRACE - runs-on: ubuntu-latest + runs-on: self-hosted steps: - name: Checkout diff --git a/.github/workflows/login-self-hosted.yml b/.github/workflows/login-self-hosted.yml deleted file mode 100644 index 2d06ee12..00000000 --- a/.github/workflows/login-self-hosted.yml +++ /dev/null @@ -1,73 +0,0 @@ -name: login-self-hosted - -on: - workflow_dispatch: - push: - branches: feature-1.0.4 - -jobs: - deploy-to-env-one: - name: Deploy to Environment One - concurrency: terraform - environment: - name: uat - env: - # This determines the location of the .hcl file that will be used - environmentName: uat - gitDeploy : true - skipTerraformDeployment: false - featureTemplate: ${{ secrets.ARM_FEATURE_TEMPLATE }} - WEB_APP_ADMIN_USER: ${{ secrets.WEB_APP_ADMIN_USER }} - keyVaultName: ${{ secrets.ARM_KEYVAULT_NAME }} - ARM_SYNAPSE_WORKSPACE_NAME: ${{ secrets.ARM_SYNAPSE_WORKSPACE_NAME }} - datalakeName: ${{ secrets.ARM_DATALAKE_NAME }} - # Required for Terraform - ARM_CLIENT_ID: ${{ secrets.ARM_CLIENT_ID }} - ARM_PAL_PARTNER_ID: ${{ secrets.ARM_PAL_PARTNER_ID }} - ARM_CLIENT_SECRET: ${{ secrets.ARM_CLIENT_SECRET }} - ARM_SUBSCRIPTION_ID: ${{ secrets.ARM_SUBSCRIPTION_ID }} - ARM_TENANT_ID: ${{ secrets.ARM_TENANT_ID }} - # Customizing Terraform vars - TF_VAR_ip_address2 : ${{ secrets.ARM_IP_ADDRESS2 }} - TF_VAR_domain : ${{ secrets.ARM_DOMAIN }} - TF_VAR_subscription_id : ${{ secrets.ARM_SUBSCRIPTION_ID }} - TF_VAR_tenant_id : ${{ secrets.ARM_TENANT_ID }} - TF_VAR_environment_tag : ${{ secrets.ENVIRONMENT_TAG }} - TF_VAR_resource_group_name : ${{ secrets.ARM_RESOURCE_GROUP_NAME }} - TF_VAR_state_storage_account_name : ${{ secrets.ARM_STORAGE_NAME }} - TF_VAR_jumphost_password : ${{secrets.ARM_JUMPHOST_PASSWORD}} - TF_VAR_synapse_sql_password : ${{secrets.ARM_SYNAPSE_PASSWORD}} - TF_VAR_web_app_admin_security_group : ${{secrets.WEB_APP_ADMIN_SECURITY_GROUP}} - # GIT Integration set up - TF_VAR_synapse_git_repository_name : ${{secrets.GIT_REPOSITORY_NAME}} - TF_VAR_synapse_git_repository_branch_name : ${{secrets.GIT_SYNAPSE_REPOSITORY_BRANCH_NAME}} - TF_VAR_synapse_git_pat : ${{secrets.GIT_PAT}} - TF_VAR_synapse_git_user_name : ${{secrets.GIT_USER_NAME}} - TF_VAR_synapse_git_email_address : ${{secrets.GIT_EMAIL_ADDRESS}} - TF_VAR_adf_git_repository_name : ${{secrets.GIT_REPOSITORY_NAME}} - TF_VAR_adf_git_repository_branch_name : ${{secrets.GIT_ADF_REPOSITORY_BRANCH_NAME}} - TF_VAR_adf_git_pat : ${{secrets.GIT_PAT}} - TF_VAR_adf_git_user_name : ${{secrets.GIT_USER_NAME}} - TF_VAR_adf_git_email_address : ${{secrets.GIT_EMAIL_ADDRESS}} - - #TF_LOG : TRACE - - runs-on: self-hosted - steps: - - name: PrintInfo - run: | - echo "Deploying to Resource Group: ${{ env.TF_VAR_resource_group_name }} " - echo "echo Hcl file name: ${{ env.environmentName}} " - - - name: Checkout - uses: actions/checkout@v3.0.0 - - - name: Get public IP - id: ip - uses: haythem/public-ip@v1.2 - - - name: Login via Az module - uses: azure/login@v1 - with: - creds: '{"clientId":"${{ secrets.ARM_CLIENT_ID }}","clientSecret":"${{ secrets.ARM_CLIENT_SECRET }}","subscriptionId":"${{ secrets.ARM_SUBSCRIPTION_ID }}","tenantId":"${{ secrets.ARM_TENANT_ID }}"}' - enable-AzPSSession: false From baf734405f3ad2890bd94758fb21905f5c20729b Mon Sep 17 00:00:00 2001 From: John Rampono Date: Sat, 20 Aug 2022 06:45:38 +0800 Subject: [PATCH 140/151] Diabled PS Login --- .github/workflows/02.continuous-delivery-staging.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/02.continuous-delivery-staging.yml b/.github/workflows/02.continuous-delivery-staging.yml index e36e5956..c3fdda73 100644 --- a/.github/workflows/02.continuous-delivery-staging.yml +++ b/.github/workflows/02.continuous-delivery-staging.yml @@ -70,7 +70,7 @@ jobs: uses: azure/login@v1 with: creds: '{"clientId":"${{ secrets.ARM_CLIENT_ID }}","clientSecret":"${{ secrets.ARM_CLIENT_SECRET }}","subscriptionId":"${{ secrets.ARM_SUBSCRIPTION_ID }}","tenantId":"${{ secrets.ARM_TENANT_ID }}"}' - enable-AzPSSession: true + enable-AzPSSession: false - name: Open Firewalls for Agent id: open_firewalls From d022dd5ba285972af1c4620c94c713ca5cef2d32 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Sat, 20 Aug 2022 12:10:20 +0800 Subject: [PATCH 141/151] Paraming terragrunt install --- .github/workflows/02.continuous-delivery-staging.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/02.continuous-delivery-staging.yml b/.github/workflows/02.continuous-delivery-staging.yml index c3fdda73..b0aec658 100644 --- a/.github/workflows/02.continuous-delivery-staging.yml +++ b/.github/workflows/02.continuous-delivery-staging.yml @@ -12,6 +12,8 @@ jobs: environment: name: development env: + # Runner Installs + installTerragrunt: false # This determines the location of the .hcl file that will be used environmentName: staging gitDeploy : true @@ -91,6 +93,7 @@ jobs: - name: Terragrunt Install id: terragrunt_install working-directory: ./solution/DeploymentV2/terraform_layer2 + if: env.installTerragrunt run: | brew install terragrunt From 0b75894c8cd7d67b19b122264ce23d2a6976ea13 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Sat, 20 Aug 2022 12:16:03 +0800 Subject: [PATCH 142/151] test --- .github/workflows/02.continuous-delivery-staging.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/02.continuous-delivery-staging.yml b/.github/workflows/02.continuous-delivery-staging.yml index b0aec658..4b02a438 100644 --- a/.github/workflows/02.continuous-delivery-staging.yml +++ b/.github/workflows/02.continuous-delivery-staging.yml @@ -93,7 +93,7 @@ jobs: - name: Terragrunt Install id: terragrunt_install working-directory: ./solution/DeploymentV2/terraform_layer2 - if: env.installTerragrunt + if: ${{ env.installTerragrunt }} run: | brew install terragrunt From b0cebf121ea1afb0bce300bda937f1b8295e3cba Mon Sep 17 00:00:00 2001 From: John Rampono Date: Sat, 20 Aug 2022 12:48:32 +0800 Subject: [PATCH 143/151] test terragrint condition --- .github/workflows/02.continuous-delivery-staging.yml | 7 ++++--- solution/DeploymentV2/utilities/InstallTerragrunt.sh | 5 +++++ 2 files changed, 9 insertions(+), 3 deletions(-) create mode 100755 solution/DeploymentV2/utilities/InstallTerragrunt.sh diff --git a/.github/workflows/02.continuous-delivery-staging.yml b/.github/workflows/02.continuous-delivery-staging.yml index 4b02a438..ed3daf43 100644 --- a/.github/workflows/02.continuous-delivery-staging.yml +++ b/.github/workflows/02.continuous-delivery-staging.yml @@ -92,10 +92,11 @@ jobs: - name: Terragrunt Install id: terragrunt_install - working-directory: ./solution/DeploymentV2/terraform_layer2 - if: ${{ env.installTerragrunt }} + working-directory: ./solution/DeploymentV2/terraform_layer2 run: | - brew install terragrunt + tgcheck=$(command -v terragrunt) + if [ -z "$tgcheck" ]; then tgcheck="NotInstalled";fi + if [ $tgcheck == "NotInstalled" ];then echo "Terragrunt Not Installed. Installing"; brew install terragrunt; else echo "Terragrunt Installed"; fi - name: Install Jsonnet id: jsonnet-install diff --git a/solution/DeploymentV2/utilities/InstallTerragrunt.sh b/solution/DeploymentV2/utilities/InstallTerragrunt.sh new file mode 100755 index 00000000..3a2cbf5d --- /dev/null +++ b/solution/DeploymentV2/utilities/InstallTerragrunt.sh @@ -0,0 +1,5 @@ +#!/bin/bash + +tgcheck=$(command -v terragrunt) +if [ -z "$tgcheck" ]; then tgcheck="NotInstalled";fi +if [ $tgcheck == "NotInstalled" ];then echo "Terragrunt Not Installed. Installing"; brew install terragrunt; else echo "Terragrunt Installed"; fi From 439b958c251631ace3de05bcb48d593a9fbb98c2 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Sun, 21 Aug 2022 20:43:00 +0800 Subject: [PATCH 144/151] Fixed CICD agent use of wrong nic --- .github/workflows/02.continuous-delivery-staging.yml | 2 +- solution/DeploymentV2/README.md | 7 ++++--- .../environments/vars/common_vars_template.jsonnet | 6 ++++++ .../environments/vars/staging/common_vars_values.jsonc | 2 +- solution/DeploymentV2/terraform_layer0/locals.tf | 3 +++ solution/DeploymentV2/terraform_layer0/virtual_machines.tf | 2 +- solution/DeploymentV2/terraform_layer2/02-deploy.ps1 | 2 +- solution/DeploymentV2/terraform_layer2/synapse.tf | 4 ++-- solution/DeploymentV2/terraform_layer3/layer2.tf | 4 ++-- solution/DeploymentV2/terraform_layer3/purview.tf | 2 +- solution/DeploymentV2/utilities/ConnectToCICDAgent.sh | 1 + solution/DeploymentV2/utilities/GitHubRunnerInstall.sh | 4 ++++ 12 files changed, 27 insertions(+), 12 deletions(-) create mode 100644 solution/DeploymentV2/utilities/ConnectToCICDAgent.sh diff --git a/.github/workflows/02.continuous-delivery-staging.yml b/.github/workflows/02.continuous-delivery-staging.yml index ed3daf43..3d51b987 100644 --- a/.github/workflows/02.continuous-delivery-staging.yml +++ b/.github/workflows/02.continuous-delivery-staging.yml @@ -3,7 +3,7 @@ name: Continuous Delivery - Staging on: workflow_dispatch: push: - branches: feature-1.0.4 + branches: main #feature-1.0.4 jobs: deploy-to-env-one: diff --git a/solution/DeploymentV2/README.md b/solution/DeploymentV2/README.md index e912b806..8bec63fd 100644 --- a/solution/DeploymentV2/README.md +++ b/solution/DeploymentV2/README.md @@ -76,6 +76,7 @@ The configuration for this environment creation is read from the following locat ### Deployment Layers Layer | Description | Permissions Required when using Service Principal | Permissions Required when using User Principal | --- | --- | --- | --- | -Terraform Layer One | - Register AAD Enterprise Applications & Service Principals | - Application.ReadWrite.OwnedBy | - Application Administrator (Role) -Terraform Layer Two | - Core IAC deployment for approx. 70 ADS Go fast resources | - Resource Group Owner | - Resource Group Owner -Terraform Layer Three | - Update AAD Enterprise Applications by granting required roles and permissions to managed service identities created in Layer Two
- Create Private Endpoints for Purview | - Application.ReadWrite.OwnedBy
(Must be same identity as that which was used to run Layer One) | - Application Administrator (Role),
- Network Contributor +Terraform Layer Zero | - Deploys the spoke VNET with subnets, dns zones, bastion & a VM for the CICD agent | - Resouce Group Owner
- Blob Contributor on Terraform's State Storage Account | - Resouce Group Owner
- Blob Contributor on Terraform's State Storage Account +Terraform Layer One | - Register AAD Enterprise Applications & Service Principals | - Application.ReadWrite.OwnedBy
- Blob Contributor on Terraform's State Storage Account| - Application Administrator (Role)
- Blob Contributor on Terraform's State Storage Account +Terraform Layer Two | - Core IAC deployment for approx. 70 ADS Go fast resources | - Resource Group Owner
- Blob Contributor on Terraform's State Storage Account| - Resource Group Owner
- Blob Contributor on Terraform's State Storage Account +Terraform Layer Three | - Update AAD Enterprise Applications by granting required roles and permissions to managed service identities created in Layer Two
- Create Private Endpoints for Purview | - Application.ReadWrite.OwnedBy
(Must be same identity as that which was used to run Layer One)
- Blob Contributor on Terraform's State Storage Account | - Application Administrator (Role),
- Network Contributor
- Blob Contributor on Terraform's State Storage Account diff --git a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet index 43082168..e7b4b751 100644 --- a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet +++ b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet @@ -685,6 +685,12 @@ local SecretFileSensitiveVars = { // Object comprehension. + + + + + + diff --git a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc index 77c2945f..0034ace3 100644 --- a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc +++ b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc @@ -46,7 +46,7 @@ "is_private_network": true, "FeatureTemplateOverrides": { "is_onprem_datafactory_ir_registered": false, - "deployment_layer3_complete": false, + "deployment_layer3_complete": true, "synapse_git_toggle": false, "adf_git_toggle": false, "deploy_sentinel": false, diff --git a/solution/DeploymentV2/terraform_layer0/locals.tf b/solution/DeploymentV2/terraform_layer0/locals.tf index 15a5146a..138c1a20 100644 --- a/solution/DeploymentV2/terraform_layer0/locals.tf +++ b/solution/DeploymentV2/terraform_layer0/locals.tf @@ -35,6 +35,9 @@ locals { jumphost_vm_name = module.naming.virtual_machine.name jumphost_nic_name = "${module.naming.virtual_machine.name}-jumphost_nic" jumphost_password = ((var.is_vnet_isolated && var.jumphost_password == null) ? "" : var.jumphost_password) + cicdagent_vm_name = replace(module.naming.virtual_machine.name,"-vm-ads","-vm-cicd") + cicdagent_nic_name = "${replace(module.naming.virtual_machine.name,"-vm-ads","-vm-cicd")}-jumphost_nic" + cicdagent_password = ((var.is_vnet_isolated && var.jumphost_password == null) ? "" : var.jumphost_password) synapse_data_lake_name = (var.synapse_data_lake_name != "" ? var.synapse_data_lake_name : module.naming.data_lake_store.name_unique) synapse_workspace_name = (var.synapse_workspace_name != "" ? var.synapse_workspace_name : "${var.prefix}${var.environment_tag}synw${var.app_name}${element(split("-", module.naming.data_factory.name_unique), length(split("-", module.naming.data_factory.name_unique)) - 1)}") synapse_dwpool_name = (var.synapse_dwpool_name != "" ? var.synapse_dwpool_name : "${var.prefix}${var.environment_tag}syndp${var.app_name}") diff --git a/solution/DeploymentV2/terraform_layer0/virtual_machines.tf b/solution/DeploymentV2/terraform_layer0/virtual_machines.tf index ca3463a4..959d6129 100644 --- a/solution/DeploymentV2/terraform_layer0/virtual_machines.tf +++ b/solution/DeploymentV2/terraform_layer0/virtual_machines.tf @@ -1,6 +1,6 @@ resource "azurerm_network_interface" "cicd_vm_nic" { count = var.is_vnet_isolated && var.deploy_cicd_vm ? 1 : 0 - name = local.jumphost_nic_name + name = local.cicdagent_nic_name location = var.resource_location resource_group_name = var.resource_group_name diff --git a/solution/DeploymentV2/terraform_layer2/02-deploy.ps1 b/solution/DeploymentV2/terraform_layer2/02-deploy.ps1 index 7aaaead9..db843715 100644 --- a/solution/DeploymentV2/terraform_layer2/02-deploy.ps1 +++ b/solution/DeploymentV2/terraform_layer2/02-deploy.ps1 @@ -44,7 +44,7 @@ PrepareDeployment -gitDeploy $gitDeploy -deploymentFolderPath $deploymentFolderP # Main Terraform - Layer1 #------------------------------------------------------------------------------------------------------------ Write-Host "Starting Terraform Deployment- Layer 2" -Write-Host "Expect this to take 20-30 minutes to complete the first time it is run. Subsequent, incremental builds should only take a few minutes." +Write-Host "Expect this to take twenty to thirty minutes to complete the first time it is run. Subsequent, incremental builds should only take a few minutes." if([string]::IsNullOrEmpty($env:TF_VAR_jumphost_password) -and ($gitDeploy -eq $false -or $null -eq $gitdeploy)) { $env:TF_VAR_jumphost_password = Read-Host "Enter the Jumphost Password" diff --git a/solution/DeploymentV2/terraform_layer2/synapse.tf b/solution/DeploymentV2/terraform_layer2/synapse.tf index 0f5c6cc7..fa176482 100644 --- a/solution/DeploymentV2/terraform_layer2/synapse.tf +++ b/solution/DeploymentV2/terraform_layer2/synapse.tf @@ -121,7 +121,7 @@ resource "azurerm_synapse_spark_pool" "synapse_spark_pool" { # Synapse Workspace Firewall Rules (Allow Public Access) # -------------------------------------------------------------------------------------------------------------------- resource "azurerm_synapse_firewall_rule" "cicd" { - count = var.deploy_adls && var.deploy_synapse ? 1 : 0 + count = ((var.deploy_adls && var.deploy_synapse) && (var.is_vnet_isolated == false || var.delay_private_access)) ? 1 : 0 name = "CICDAgent" synapse_workspace_id = azurerm_synapse_workspace.synapse[0].id start_ip_address = var.ip_address @@ -129,7 +129,7 @@ resource "azurerm_synapse_firewall_rule" "cicd" { } resource "azurerm_synapse_firewall_rule" "cicd_user" { - count = var.deploy_adls && var.deploy_synapse ? 1 : 0 + count = ((var.deploy_adls && var.deploy_synapse) && (var.is_vnet_isolated == false || var.delay_private_access)) ? 1 : 0 name = "CICDUser" synapse_workspace_id = azurerm_synapse_workspace.synapse[0].id start_ip_address = var.ip_address2 diff --git a/solution/DeploymentV2/terraform_layer3/layer2.tf b/solution/DeploymentV2/terraform_layer3/layer2.tf index b93ecd9e..9d95a04d 100644 --- a/solution/DeploymentV2/terraform_layer3/layer2.tf +++ b/solution/DeploymentV2/terraform_layer3/layer2.tf @@ -7,7 +7,7 @@ config = { container_name = "tstate" key = "terraform_layer2.tfstate" - resource_group_name = "gfh5" - storage_account_name = "gfh5state" + resource_group_name = "gft6" + storage_account_name = "gf6state" } } diff --git a/solution/DeploymentV2/terraform_layer3/purview.tf b/solution/DeploymentV2/terraform_layer3/purview.tf index 0de62a67..b1476bb3 100644 --- a/solution/DeploymentV2/terraform_layer3/purview.tf +++ b/solution/DeploymentV2/terraform_layer3/purview.tf @@ -66,7 +66,7 @@ module "purview_ingestion_private_endpoints" { source = "./modules/purview_ingestion_private_endpoints" count = var.is_vnet_isolated && var.deploy_purview ? 1 : 0 resource_group_name = var.resource_group_name - purview_account_name = data.terraform_remote_state.layer2.outputs.azurerm_purview_account_purview_name + purview_account_name = data.terraform_remote_state.layer2.outputs.purview_name resource_location = var.resource_location queue_privatelink_name = "${local.purview_name}-queue-plink" storage_privatelink_name = "${local.purview_name}-storage-plink" diff --git a/solution/DeploymentV2/utilities/ConnectToCICDAgent.sh b/solution/DeploymentV2/utilities/ConnectToCICDAgent.sh new file mode 100644 index 00000000..01f9f4b0 --- /dev/null +++ b/solution/DeploymentV2/utilities/ConnectToCICDAgent.sh @@ -0,0 +1 @@ +az network bastion tunnel --name ads-stg-snap-ads-gjsg --resource-group gft6 --target-resource-id "/subscriptions/035a1364-f00d-48e2-b582-4fe125905ee3/resourceGroups/gft6/providers/Microsoft.Compute/virtualMachines/CICDAgent" --resource-port "22" --port "22" \ No newline at end of file diff --git a/solution/DeploymentV2/utilities/GitHubRunnerInstall.sh b/solution/DeploymentV2/utilities/GitHubRunnerInstall.sh index 89a637a3..e65bf4de 100644 --- a/solution/DeploymentV2/utilities/GitHubRunnerInstall.sh +++ b/solution/DeploymentV2/utilities/GitHubRunnerInstall.sh @@ -4,6 +4,10 @@ wget -q https://github.com/PowerShell/PowerShell/releases/download/v7.2.5/powers sudo dpkg -i powershell-lts_7.2.5-1.deb_amd64.deb && \ rm ./powershell-lts_7.2.5-1.deb_amd64.deb && \ sudo apt install -y dotnet-sdk-6.0 && \ + +#May need to do below on ubuntu 20.04 +sudo apt install -y aspnetcore-runtime-6.0=6.0.8-1 dotnet-apphost-pack-6.0=6.0.8-1 dotnet-host=6.0.8-1 dotnet-hostfxr-6.0=6.0.8-1 dotnet-runtime-6.0=6.0.8-1 dotnet-sdk-6.0=6.0.400-1 dotnet-targeting-pack-6.0=6.0.8-1 + wget https://github.com/google/go-jsonnet/releases/download/v0.17.0/jsonnet-go_0.17.0_linux_amd64.deb && \ sudo dpkg -i jsonnet-go_0.17.0_linux_amd64.deb && \ sudo rm jsonnet-go_0.17.0_linux_amd64.deb && \ From fb24378d6619726bcf83bcd5b58edca57578e028 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Mon, 22 Aug 2022 10:55:42 +0800 Subject: [PATCH 145/151] Testing for v2.0.0 release --- solution/DeploymentV2/0 | 1 + solution/DeploymentV2/Deploy.ps1 | 5 + solution/DeploymentV2/Prepare.ps1 | 59 ++++---- .../featuretemplates/full_deployment.jsonc | 13 +- .../tghcltemplates/GenerateTgHCLS.ps1 | 7 +- .../environments/tghcltemplates/layer1.hcl | 19 +++ .../vars/admz/common_vars_values.jsonc | 4 +- .../environments/vars/common_vars_schema.json | 73 +++++++++- .../vars/common_vars_template.jsonnet | 66 +++++++++ .../vars/local/common_vars_values.jsonc | 6 +- .../vars/production/common_vars_values.jsonc | 8 +- .../vars/staging/common_vars_values.jsonc | 23 ++-- .../vars/uat/common_vars_values.jsonc | 6 +- .../pwshmodules/Deploy_0_Prep.psm1 | 59 ++++++-- .../terraform_layer0/00-deploy.ps1 | 35 +---- .../terraform_layer0/02-publish.ps1 | 109 --------------- .../terraform_layer0/storage_adls_state.tf | 31 +++-- .../DeploymentV2/terraform_layer0/vars.tf | 9 +- .../terraform_layer0/vars/admz/terragrunt.hcl | 130 ++++++++---------- .../vars/local/terragrunt.hcl | 32 ++++- .../vars/production/terragrunt.hcl | 114 +++++++++------ .../vars/staging/terragrunt.hcl | 10 +- .../terraform_layer0/vars/uat/terragrunt.hcl | 31 ++++- .../DeploymentV2/terraform_layer1/layer0.tf | 13 ++ .../DeploymentV2/terraform_layer1/main.tf | 2 +- .../vars/local/terragrunt.hcl | 18 +++ .../vars/production/terragrunt.hcl | 18 +++ .../vars/staging/terragrunt.hcl | 18 +++ .../terraform_layer1/vars/uat/terragrunt.hcl | 18 +++ .../DeploymentV2/terraform_layer2/layer0.tf | 4 +- .../DeploymentV2/terraform_layer2/layer1.tf | 4 +- .../DeploymentV2/terraform_layer3/layer2.tf | 4 +- 32 files changed, 599 insertions(+), 350 deletions(-) create mode 100644 solution/DeploymentV2/0 delete mode 100644 solution/DeploymentV2/terraform_layer0/02-publish.ps1 create mode 100644 solution/DeploymentV2/terraform_layer1/layer0.tf diff --git a/solution/DeploymentV2/0 b/solution/DeploymentV2/0 new file mode 100644 index 00000000..d00491fd --- /dev/null +++ b/solution/DeploymentV2/0 @@ -0,0 +1 @@ +1 diff --git a/solution/DeploymentV2/Deploy.ps1 b/solution/DeploymentV2/Deploy.ps1 index 793904f6..f0208345 100644 --- a/solution/DeploymentV2/Deploy.ps1 +++ b/solution/DeploymentV2/Deploy.ps1 @@ -25,6 +25,11 @@ $PathToReturnTo = (Get-Location).Path $deploymentFolderPath = (Get-Location).Path +Set-Location $deploymentFolderPath +Set-Location ./terraform_layer0 +./00-deploy.ps1 + +Set-Location $deploymentFolderPath Set-Location ./terraform_layer1 ./01-deploy.ps1 diff --git a/solution/DeploymentV2/Prepare.ps1 b/solution/DeploymentV2/Prepare.ps1 index e8448176..d86877f0 100644 --- a/solution/DeploymentV2/Prepare.ps1 +++ b/solution/DeploymentV2/Prepare.ps1 @@ -22,17 +22,7 @@ # Once this script has finished, you then run Deploy.ps1 to create your environment # ------------------------------------------------------------------------------------------------------------ -Function Sleep-Progress($seconds) { - $s = 0; - Do { - $p = [math]::Round(100 - (($seconds - $s) / $seconds * 100)); - Write-Progress -Activity "Waiting..." -Status "$p% Complete:" -SecondsRemaining ($seconds - $s) -PercentComplete $p; - [System.Threading.Thread]::Sleep(1000) - $s++; - } - While($s -lt $seconds); - -} + #by default $gitDeploy will not be true, only being set by the git environment - meaning if not using a runner it will default to a standard execution. $gitDeploy = ([System.Environment]::GetEnvironmentVariable('gitDeploy') -eq 'true') @@ -164,33 +154,40 @@ else if([string]::IsNullOrEmpty($env:TF_VAR_resource_group_name) -eq $false) { $rg = az group create -n $env:TF_VAR_resource_group_name -l australiaeast --only-show-errors - if([string]::IsNullOrEmpty($env:TF_VAR_state_storage_account_name) -eq $false) { - Write-Host "Creating storage account" - #Public + if([string]::IsNullOrEmpty($env:TF_VAR_state_storage_account_name) -eq $false) { + $delay_private_access = $false + $layer0_state = "remote" + $deploy_state_storage_account = $false + $deploy_cicd_vm = $false $uinput = Get-SelectionFromUser -Options ('Public','Isolated', 'Private') -Prompt "Please select Network Isolation Level" if($uinput -eq "Public") { - $delay_private_access = true + $delay_private_access = $true Write-Host "Creating Public Storage" $storageId = az storage account create --resource-group $env:TF_VAR_resource_group_name --name $env:TF_VAR_state_storage_account_name --sku Standard_LRS --allow-blob-public-access false --public-network-access Enabled --default-action Allow --https-only true --min-tls-version TLS1_2 --query id -o tsv --only-show-errors } if($uinput -eq "Isolated") { - $delay_private_access = true - Write-Host "Creating Isolated Storage" + $delay_private_access = $true + Write-Host "Creating Isolated Storage. In this deployment mode private networking will be established but the CICD agent will open firewalls to allow targeted public internet access to facilitate deployment. " #Isolated $storageId = az storage account create --resource-group $env:TF_VAR_resource_group_name --name $env:TF_VAR_state_storage_account_name --sku Standard_LRS --allow-blob-public-access false --public-network-access Enabled --default-action Deny --https-only true --min-tls-version TLS1_2 --query id -o tsv --only-show-errors $hiddenoutput =az storage account network-rule add --resource-group $env:TF_VAR_resource_group_name --account-name $env:TF_VAR_state_storage_account_name --ip-address $env:TF_VAR_ip_address --only-show-errors #wait for network rule - Sleep-Progress 7 + [System.Threading.Thread]::Sleep(5000) } if($uinput -eq "Private") { - $delay_private_access = false - #Private - #Now run Layer 0 terraform + + Write-Host "Configuring for fully private storage. In this deployment mode private networking will be established from the onset." + Write-Warning "This is a more complex deployment and will require appropriate connectivity to be established between deployment agent and all deployed resources." + + $delay_private_access = $false + $layer0_state = "local" + $deploy_state_storage_account = $true + $deploy_cicd_vm =$ true #$storageId = az storage account create --resource-group $env:TF_VAR_resource_group_name --name $env:TF_VAR_state_storage_account_name --sku Standard_LRS --pr --allow-blob-public-access false --public-network-access Disabled --https-only true --min-tls-version TLS1_2 --query id -o tsv --only-show-errors #$DeploymentVnet = Read-Host "Please input the name of the spoke vnet for the deployment. If you leave it blank it will default to 'ads-stg-vnet-ads'" #if([string]::IsNullOrEmpty($DeploymentVnet)) @@ -215,12 +212,16 @@ else #az network private-dns record-set a create -g $env:TF_VAR_resource_group_name -z "privatelink.dfs.core.windows.net" -n "$env:TF_VAR_state_storage_account_name" --ttl 10 #$output = az network private-dns record-set a add-record -g $env:TF_VAR_resource_group_name -z "privatelink.dfs.core.windows.net" -n "$env:TF_VAR_state_storage_account_name" -a $storageip } - Write-Host "Creating Role Assignment" - $userObjectId = az ad signed-in-user show --query id -o tsv --only-show-errors - $assignment = az role assignment create --role "Storage Blob Data Contributor" --assignee-object-id $userObjectId --assignee-principal-type User --only-show-errors - Write-Host "Creating State Container" - $container = az storage container create --name $CONTAINER_NAME --account-name $env:TF_VAR_state_storage_account_name --auth-mode login --only-show-errors - + + + if($uinput -ne "Private") + { + Write-Host "Creating Role Assignment" + $userObjectId = az ad signed-in-user show --query id -o tsv --only-show-errors + $assignment = az role assignment create --role "Storage Blob Data Contributor" --assignee-object-id $userObjectId --assignee-principal-type User --only-show-errors + Write-Host "Creating State Container" + $container = az storage container create --name $CONTAINER_NAME --account-name $env:TF_VAR_state_storage_account_name --auth-mode login --only-show-errors + } } } @@ -309,6 +310,10 @@ else $foundUser = $false $common_vars_values.resource_owners = @() $common_vars_values.synapse_administrators = @{} + + $common_vars_values.FeatureTemplateOverrides.layer0_state = $layer0_state + $common_vars_values.deploy_state_storage_account = $deploy_state_storage_account + $common_vars_values.deploy_cicd_vm = $deploy_cicd_vm if([string]::IsNullOrEmpty($assigneeobject) -eq $false) { diff --git a/solution/DeploymentV2/environments/featuretemplates/full_deployment.jsonc b/solution/DeploymentV2/environments/featuretemplates/full_deployment.jsonc index 1ae86276..bbac800e 100644 --- a/solution/DeploymentV2/environments/featuretemplates/full_deployment.jsonc +++ b/solution/DeploymentV2/environments/featuretemplates/full_deployment.jsonc @@ -1,4 +1,5 @@ [ + //IAC Feature Toggles {"Name":"deploy_web_app","Value":true}, {"Name":"deploy_function_app","Value":true}, {"Name":"deploy_custom_terraform","Value":false}, @@ -9,6 +10,12 @@ {"Name":"deploy_synapse","Value":true}, {"Name":"deploy_metadata_database","Value":true}, {"Name":"is_vnet_isolated","Value":true}, + {"Name":"deploy_selfhostedsql","Value":false}, + + //Sample Toggles + {"Name":"publish_sif_database","Value":true}, + + //Post IAC Publish Toggles {"Name":"publish_web_app","Value":true}, {"Name":"publish_function_app","Value":true}, {"Name":"publish_sample_files","Value":true}, @@ -16,8 +23,10 @@ {"Name":"configure_networking","Value":true}, {"Name":"publish_datafactory_pipelines","Value":true}, {"Name":"publish_web_app_addcurrentuserasadmin","Value":true}, - {"Name":"deploy_selfhostedsql","Value":false}, + + //Deployment Lifecycle Toggles {"Name":"is_onprem_datafactory_ir_registered","Value":false}, - {"Name":"publish_sif_database","Value":true}, + {"Name":"delay_private_access","Value":true}, + {"Name":"layer0_state","Value":"remote"}, {"Name":"deployment_layer3_complete","Value":false} ] \ No newline at end of file diff --git a/solution/DeploymentV2/environments/tghcltemplates/GenerateTgHCLS.ps1 b/solution/DeploymentV2/environments/tghcltemplates/GenerateTgHCLS.ps1 index 43c2c7e9..dfcf11fb 100644 --- a/solution/DeploymentV2/environments/tghcltemplates/GenerateTgHCLS.ps1 +++ b/solution/DeploymentV2/environments/tghcltemplates/GenerateTgHCLS.ps1 @@ -7,7 +7,8 @@ function GenerateTgHCLS ( [String]$PathToReturnTo = "" ) { - $layers = @(0,1,2,3) + #$layers = @(0,1,2,3) + $layers = @(0) $envlist = (Get-ChildItem -Directory -Path ./../vars | Select-Object -Property Name).Name foreach ($l in $layers) @@ -19,7 +20,9 @@ function GenerateTgHCLS ( $inputs_t = (Get-Content "inputs.hcl" -Raw) $template = $layer_t + $inputs_t $template = $template.Replace("<@environment>", $e) - $template + $tgt_path = Convert-Path ("../../terraform_layer$($l.ToString())/vars/$e/") + Set-Content -Path ($tgt_path+"terragrunt.hcl") -Value $template + } } } \ No newline at end of file diff --git a/solution/DeploymentV2/environments/tghcltemplates/layer1.hcl b/solution/DeploymentV2/environments/tghcltemplates/layer1.hcl index 0aa7a8fd..01d23303 100644 --- a/solution/DeploymentV2/environments/tghcltemplates/layer1.hcl +++ b/solution/DeploymentV2/environments/tghcltemplates/layer1.hcl @@ -2,6 +2,25 @@ locals { common_vars = jsondecode(file("../../../bin/environments/<@environment>/common_vars_for_hcl.json")) } +generate "layer0.tf" { + path = "layer0.tf" + if_exists = "overwrite_terragrunt" + contents = < 0) + { + $hiddenoutput = az storage account network-rule add --resource-group $env:TF_VAR_resource_group_name --account-name $env:TF_VAR_state_storage_account_name --ip-address $env:TF_VAR_ip_address --only-show-errors + } + + #DataLake + $resourcecheck = ((az storage account list --resource-group $env:TF_VAR_resource_group_name | ConvertFrom-Json -Depth 10) | Where-Object {$_.name -eq $env:datalakeName}).count + if($resourcecheck > 0) { + $hiddenoutput = az storage account network-rule add --resource-group $env:TF_VAR_resource_group_name --account-name $env:datalakeName --ip-address $env:TF_VAR_ip_address --only-show-errors + } + + #Key Vault + $resourcecheck = ( (az keyvault list --resource-group gft7 | convertfrom-json -depth 10) | Where-Object {$_.name -eq $env:keyVaultName}).count + if($resourcecheck > 0) { + $hiddenoutput = az keyvault network-rule add -g $env:TF_VAR_resource_group_name --name $env:keyVaultName --ip-address $env:TF_VAR_ip_address/32 --only-show-errors + } + + #Synapse + $resourcecheck = ( (az synapse workspace list --resource-group gft7 | convertfrom-json -depth 10) | Where-Object {$_.name -eq $env:ARM_SYNAPSE_WORKSPACE_NAME}).count + if($resourcecheck > 0) { + $hiddenoutput = az synapse workspace firewall-rule create --name CICDAgent --resource-group $env:TF_VAR_resource_group_name --start-ip-address $env:TF_VAR_ip_address --end-ip-address $env:TF_VAR_ip_address --workspace-name $env:ARM_SYNAPSE_WORKSPACE_NAME --only-show-errors + } + } catch { Write-Warning 'Opening Firewalls for IP Address One Failed' @@ -104,11 +124,30 @@ function PrepareDeployment ( if ($env:TF_VAR_ip_address2 -ne "") { try { - $hiddenoutput = az storage account network-rule add --resource-group $env:TF_VAR_resource_group_name --account-name $env:TF_VAR_state_storage_account_name --ip-address $env:TF_VAR_ip_address2 --only-show-errors - $hiddenoutput = az keyvault network-rule add -g $env:TF_VAR_resource_group_name --name $env:keyVaultName --ip-address $env:TF_VAR_ip_address2/32 --only-show-errors - $hiddenoutput = az synapse workspace firewall-rule create --name CICDUser --resource-group $env:TF_VAR_resource_group_name --start-ip-address $env:TF_VAR_ip_address2 --end-ip-address $env:TF_VAR_ip_address2 --workspace-name $env:ARM_SYNAPSE_WORKSPACE_NAME --only-show-errors - $hiddenoutput = az storage account network-rule add --resource-group $env:TF_VAR_resource_group_name --account-name $env:datalakeName --ip-address $env:TF_VAR_ip_address2 --only-show-errors - $hiddenoutput = az storage account network-rule add --resource-group $env:TF_VAR_resource_group_name --account-name $env:TF_VAR_state_storage_account_name --ip-address $env:TF_VAR_ip_address2 --only-show-errors + #state + $resourcecheck = ((az storage account list --resource-group $env:TF_VAR_resource_group_name | ConvertFrom-Json -Depth 10) | Where-Object {$_.name -eq $env:TF_VAR_state_storage_account_name}).count + if($resourcecheck > 0) + { + $hiddenoutput = az storage account network-rule add --resource-group $env:TF_VAR_resource_group_name --account-name $env:TF_VAR_state_storage_account_name --ip-address $env:TF_VAR_ip_address2 --only-show-errors + } + + #DataLake + $resourcecheck = ((az storage account list --resource-group $env:TF_VAR_resource_group_name | ConvertFrom-Json -Depth 10) | Where-Object {$_.name -eq $env:datalakeName}).count + if($resourcecheck > 0) { + $hiddenoutput = az storage account network-rule add --resource-group $env:TF_VAR_resource_group_name --account-name $env:datalakeName --ip-address $env:TF_VAR_ip_address2 --only-show-errors + } + + #Key Vault + $resourcecheck = ( (az keyvault list --resource-group gft7 | convertfrom-json -depth 10) | Where-Object {$_.name -eq $env:keyVaultName}).count + if($resourcecheck > 0) { + $hiddenoutput = az keyvault network-rule add -g $env:TF_VAR_resource_group_name --name $env:keyVaultName --ip-address $env:TF_VAR_ip_address2/32 --only-show-errors + } + + #Synapse + $resourcecheck = ( (az synapse workspace list --resource-group gft7 | convertfrom-json -depth 10) | Where-Object {$_.name -eq $env:ARM_SYNAPSE_WORKSPACE_NAME}).count + if($resourcecheck > 0) { + $hiddenoutput = az synapse workspace firewall-rule create --name CICDAgent --resource-group $env:TF_VAR_resource_group_name --start-ip-address $env:TF_VAR_ip_address2 --end-ip-address $env:TF_VAR_ip_address2 --workspace-name $env:ARM_SYNAPSE_WORKSPACE_NAME --only-show-errors + } } catch { Write-Warning 'Opening Firewalls for IP Address Two Failed' diff --git a/solution/DeploymentV2/terraform_layer0/00-deploy.ps1 b/solution/DeploymentV2/terraform_layer0/00-deploy.ps1 index e42197d1..0f6d0982 100644 --- a/solution/DeploymentV2/terraform_layer0/00-deploy.ps1 +++ b/solution/DeploymentV2/terraform_layer0/00-deploy.ps1 @@ -44,42 +44,21 @@ PrepareDeployment -gitDeploy $gitDeploy -deploymentFolderPath $deploymentFolderP # Main Terraform - Layer1 #------------------------------------------------------------------------------------------------------------ Write-Host "Starting Terraform Deployment- Layer 0" +Write-Host "Note that the first time this runs it will take around 10 minutes to complete." if([string]::IsNullOrEmpty($env:TF_VAR_jumphost_password) -and ($gitDeploy -eq $false -or $null -eq $gitdeploy)) { $env:TF_VAR_jumphost_password = Read-Host "Enter the Jumphost Password" } +if([string]::IsNullOrEmpty($env:TF_VAR_synapse_sql_password) -and ($gitDeploy -eq $false -or $null -eq $gitdeploy)) +{ + $env:TF_VAR_synapse_sql_password = Read-Host "Enter the Synapse SQL Admin Password" +} + + $output = terragrunt init --terragrunt-config vars/$env:environmentName/terragrunt.hcl -reconfigure $output = terragrunt apply -auto-approve --terragrunt-config vars/$env:environmentName/terragrunt.hcl -json #-var synapse_sql_password=$env:TF_VAR_synapse_sql_password ProcessTerraformApply -output $output -gitDeploy $gitDeploy - - - -#Update Values for variables in Environment -#[Environment]::SetEnvironmentVariable("TF_VAR_state_storage_account_name", $Value) -$tout_raw = ((az storage blob download -c "tstate" -n "terraform_layer2.tfstate" --account-name $env:TF_VAR_state_storage_account_name --auth-mode login) | ConvertFrom-Json).outputs - - -#conditional -if(-not (([string]::IsNullOrEmpty($tout_raw.adlsstorage_name.value)) -or ([string]::IsNullOrEmpty($tout_raw.keyvault_name.value)) -or([string]::IsNullOrEmpty($tout_raw.synapse_workspace_name.value)) ) ) -{ - Write-Host "Writing ARM_DATALAKE_NAME / ARM_KEYVAULT_NAME / ARM_SYNAPSE_WORKSPACE_NAME to common vars environment file" - $envFolderPath = Convert-Path -Path ($deploymentFolderPath + "./environments/vars/$env:environmentName/") - $varsfile = $envFolderPath + "/common_vars_values.jsonc" - $common_vars_values = Get-Content $varsfile | ConvertFrom-Json -Depth 10 - $common_vars_values.ARM_DATALAKE_NAME = $tout_raw.adlsstorage_name.value - $common_vars_values.ARM_KEYVAULT_NAME = $tout_raw.keyvault_name.value - $common_vars_values.ARM_SYNAPSE_WORKSPACE_NAME = $tout_raw.synapse_workspace_name.value - $common_vars_values | Convertto-Json -Depth 10 | Set-Content $varsfile -} -else -{ - Write-Host "Not writing ARM_DATALAKE_NAME / ARM_KEYVAULT_NAME / ARM_SYNAPSE_WORKSPACE_NAME to common vars environment file" - Write-Host "ARM_DATALAKE_NAME =" $tout_raw.adlsstorage_name.value - Write-Host "ARM_KEYVAULT_NAME =" $tout_raw.keyvault_name.value - Write-Host "ARM_SYNAPSE_WORKSPACE_NAME =" $tout_raw.synapse_workspace_name.value -} - diff --git a/solution/DeploymentV2/terraform_layer0/02-publish.ps1 b/solution/DeploymentV2/terraform_layer0/02-publish.ps1 deleted file mode 100644 index 68cf6307..00000000 --- a/solution/DeploymentV2/terraform_layer0/02-publish.ps1 +++ /dev/null @@ -1,109 +0,0 @@ -#---------------------------------------------------------------------------------------------------------------- -# You must be logged into the Azure CLI to run this script -#---------------------------------------------------------------------------------------------------------------- -# This script will: -# - Deploy the required AAD objects (Application Registrations etc) -# -# This is intended for creating a once off deployment from your development machine. You should setup the -# GitHub actions for your long term prod/non-prod environments -# -# Intructions -# - Ensure that you have run the Prepare.ps1 script first. This will prepare your azure subscription for deployment -# - Ensure that you have run az login and az account set -# - Ensure you have Contributor Access to the subscription you are deploying to. -# - Ensure you have Application.ReadWrite.OwnedBy on the Azure AD. -# - Run this script -# -# You can run this script multiple times if needed. -# -#---------------------------------------------------------------------------------------------------------------- -param ( - [Parameter(Mandatory=$false)] - [string]$FeatureTemplate="" -) - - - -#------------------------------------------------------------------------------------------------------------ -# Module Imports #Mandatory -#------------------------------------------------------------------------------------------------------------ -import-Module ./../pwshmodules/GatherOutputsFromTerraform.psm1 -force -import-Module ./../pwshmodules/Deploy_0_Prep.psm1 -force -#------------------------------------------------------------------------------------------------------------ -# Preparation #Mandatory -#------------------------------------------------------------------------------------------------------------ -$PathToReturnTo = (Get-Location).Path -$deploymentFolderPath = Convert-Path -Path ((Get-Location).tostring() + './../') - -$gitDeploy = ([System.Environment]::GetEnvironmentVariable('gitDeploy') -eq 'true') -$skipTerraformDeployment = ([System.Environment]::GetEnvironmentVariable('skipTerraformDeployment') -eq 'true') -$ipaddress = $env:TF_VAR_ip_address -$ipaddress2 = $env:TF_VAR_ip_address2 - -PrepareDeployment -gitDeploy $gitDeploy -deploymentFolderPath $deploymentFolderPath -FeatureTemplate $FeatureTemplate -PathToReturnTo $PathToReturnTo - -if($env:TF_VAR_deployment_layer3_complete -eq $false -or $null -eq $env:TF_VAR_deployment_layer3_complete) -{ - Write-Error "Layer 3 Deployment is not complete. Code will now exit. Run terraform layer 3 for this deployment before running this layer (layer two) again." - exit -} - -#------------------------------------------------------------------------------------------------------------ -# Get Outputs #Mandatory -#------------------------------------------------------------------------------------------------------------ -$tout = GatherOutputsFromTerraform -TerraformFolderPath $PathToReturnTo - -#------------------------------------------------------------------------------------------------------------ -# Publish -#------------------------------------------------------------------------------------------------------------ -import-Module ./../pwshmodules/Deploy_4_PrivateLinks.psm1 -force -DeployPrivateLinks -tout $tout - -import-Module ./../pwshmodules/Deploy_5_WebApp.psm1 -force -DeployWebApp -tout $tout -deploymentFolderPath $deploymentFolderPath -PathToReturnTo $PathToReturnTo - -import-Module ./../pwshmodules/Deploy_6_FuncApp.psm1 -force -DeployFuncApp -tout $tout -deploymentFolderPath $deploymentFolderPath -PathToReturnTo $PathToReturnTo - -import-Module ./../pwshmodules/Deploy_7_MetadataDB.psm1 -force -DeployMataDataDB -publish_metadata_database $true -tout $tout -deploymentFolderPath $deploymentFolderPath -PathToReturnTo $PathToReturnTo - -import-Module ./../pwshmodules/Deploy_9_DataFactory.psm1 -force -DeployDataFactoryAndSynapseArtefacts -tout $tout -deploymentFolderPath $deploymentFolderPath -PathToReturnTo $PathToReturnTo - -import-Module ./../pwshmodules/Deploy_10_SampleFiles.psm1 -force -DeploySampleFiles -tout $tout -deploymentFolderPath $deploymentFolderPath -PathToReturnTo $PathToReturnTo - -#import-Module ./../pwshmodules/ConfigureAzurePurview.psm1 -force -#ConfigureAzurePurview -tout $tout - - -#---------------------------------------------------------------------------------------------------------------- -# Set up Purview -#---------------------------------------------------------------------------------------------------------------- -# This is a WIP - not recommended to use for standard user -#---------------------------------------------------------------------------------------------------------------- -# -if($skipConfigurePurview -or $null -eq $skipConfigurePurview) { - Write-Host "Skipping experimental Purview Configuration" -} -else { - Write-Host "Running Purview Configuration (experimental) Script" - Set-Location $deploymentFolderPath - Invoke-Expression ./ConfigureAzurePurview.ps1 -} - - -#---------------------------------------------------------------------------------------------------------------- -# Deploy Functional Tests -#---------------------------------------------------------------------------------------------------------------- -# This is for development purposes primarily - If using, understand these may not be all working with most recent platform version as tests can become outdated / missing new required fields. -#---------------------------------------------------------------------------------------------------------------- -if($skipFunctionalTests -or $null -eq $skipFunctionalTests) { - Write-Host "Skipping Functional Tests Upload" -} -else { - Write-Host "Deploying Functional Tests to Web App" - Set-Location $deploymentFolderPath - Invoke-Expression ./GenerateAndUploadFunctionalTests.ps1 -} \ No newline at end of file diff --git a/solution/DeploymentV2/terraform_layer0/storage_adls_state.tf b/solution/DeploymentV2/terraform_layer0/storage_adls_state.tf index ba9a987a..f7f09589 100644 --- a/solution/DeploymentV2/terraform_layer0/storage_adls_state.tf +++ b/solution/DeploymentV2/terraform_layer0/storage_adls_state.tf @@ -1,12 +1,12 @@ locals { - stateaccountname = "gf6state" + adls_state_deployment_agents_ros = var.deploy_state_storage_account ? var.resource_owners : [] } #note: Swapped from native tf due to https://github.com/hashicorp/terraform-provider-azurerm/issues/16335 resource "azapi_resource" "adls_state" { count = var.deploy_state_storage_account ? 1 : 0 type = "Microsoft.Storage/storageAccounts@2022-05-01" - name = local.stateaccountname + name = var.state_storage_account_name parent_id = "/subscriptions/${var.subscription_id}/resourceGroups/${var.resource_group_name}" location = var.resource_location @@ -18,7 +18,7 @@ resource "azapi_resource" "adls_state" { properties = { isHnsEnabled = true, minimumTlsVersion = "TLS1_2" - publicNetworkAccess = var.is_vnet_isolated ? "Disabled" : "Enabled" + publicNetworkAccess = (var.is_vnet_isolated == false || var.delay_private_access == true) ? "Enabled" : "Disabled" networkAcls = { defaultAction = "Deny", bypass = "AzureServices,Metrics" @@ -28,9 +28,16 @@ resource "azapi_resource" "adls_state" { }) } +resource "azurerm_storage_container" "tstate" { + count = var.deploy_state_storage_account ? 1 : 0 + name = "tstate" + storage_account_name = var.state_storage_account_name + container_access_type = "private" +} + resource "azurerm_role_assignment" "adls_state_deployment_agents" { for_each = { - for ro in var.resource_owners : ro => ro + for ro in local.adls_state_deployment_agents_ros : ro => ro } scope = azapi_resource.adls_state[0].id role_definition_name = "Storage Blob Data Contributor" @@ -41,15 +48,15 @@ resource "azurerm_role_assignment" "adls_state_deployment_agents" { } resource "azurerm_private_endpoint" "adls_state_storage_private_endpoint_with_dns" { - count = var.deploy_adls && var.is_vnet_isolated ? 1 : 0 - name = "${local.stateaccountname}-blob-plink" + count = var.deploy_state_storage_account ? 1 : 0 + name = "${var.state_storage_account_name}-blob-plink" location = var.resource_location resource_group_name = var.resource_group_name subnet_id = local.plink_subnet_id private_service_connection { - name = "${local.stateaccountname}-blob-plink-conn" - private_connection_resource_id = azapi_resource.adls_state.id + name = "${var.state_storage_account_name}-blob-plink-conn" + private_connection_resource_id = azapi_resource.adls_state[0].id is_manual_connection = false subresource_names = ["blob"] } @@ -72,15 +79,15 @@ resource "azurerm_private_endpoint" "adls_state_storage_private_endpoint_with_dn } resource "azurerm_private_endpoint" "adls_dfs_state_storage_private_endpoint_with_dns" { - count = var.deploy_adls && var.is_vnet_isolated ? 1 : 0 - name = "${local.stateaccountname}-dfs-plink" + count = var.deploy_state_storage_account ? 1 : 0 + name = "${var.state_storage_account_name}-dfs-plink" location = var.resource_location resource_group_name = var.resource_group_name subnet_id = local.plink_subnet_id private_service_connection { - name = "${local.stateaccountname}-dfs-plink-conn" - private_connection_resource_id = azapi_resource.adls_state.id + name = "${var.state_storage_account_name}-dfs-plink-conn" + private_connection_resource_id = azapi_resource.adls_state[0].id is_manual_connection = false subresource_names = ["dfs"] } diff --git a/solution/DeploymentV2/terraform_layer0/vars.tf b/solution/DeploymentV2/terraform_layer0/vars.tf index 7bebefb6..5e97cb34 100644 --- a/solution/DeploymentV2/terraform_layer0/vars.tf +++ b/solution/DeploymentV2/terraform_layer0/vars.tf @@ -488,6 +488,11 @@ variable "blob_storage_account_name" { default = "" type = string } +variable "state_storage_account_name" { + description = "The override name for the storage account used for storing state data. If empty, will be autogenerated." + default = "" + type = string +} variable "bastion_name" { description = "The override name for the Bastion service. If empty, will be autogenerated based on prefix settings" default = "" @@ -906,13 +911,13 @@ variable "resource_owners" { /*Toggles for Layer0 */ variable "deploy_cicd_vm" { description = "Set to true if you want to deploy a vm to host the github runnner." - default = true + default = false type = bool } variable "deploy_state_storage_account" { description = "Set to true if you want to deploy a the state storage account." - default = true + default = false type = bool } diff --git a/solution/DeploymentV2/terraform_layer0/vars/admz/terragrunt.hcl b/solution/DeploymentV2/terraform_layer0/vars/admz/terragrunt.hcl index dd5ec60b..8f9bd409 100644 --- a/solution/DeploymentV2/terraform_layer0/vars/admz/terragrunt.hcl +++ b/solution/DeploymentV2/terraform_layer0/vars/admz/terragrunt.hcl @@ -1,83 +1,69 @@ -remote_state { - backend = "azurerm" - generate = { - path = "backend.tf" - if_exists = "overwrite_terragrunt" - } - config = { - # You need to update the resource group and storage account here. - # You should have created these with the Prepare.ps1 script. - resource_group_name = "dlzdev08lite" - storage_account_name = "teststatedev08litestate" - container_name = "tstate" - key = "terraform.tfstate" - } +locals { + common_vars = jsondecode(file("../../../bin/environments/admz/common_vars_for_hcl.json")) } -# These inputs are provided to the terraform variables when deploying the environment +/*If performing a private networking deployment Prepare.ps1 will initially set TF_VAR_layer0_state to 'local' after initial deployment this should be set to 'remote'*/ +generate "backend.tf" { + + path = "backend.tf" + if_exists = "overwrite_terragrunt" + contents = < Date: Mon, 22 Aug 2022 13:31:54 +0800 Subject: [PATCH 146/151] Update README.md --- solution/DeploymentV2/README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/solution/DeploymentV2/README.md b/solution/DeploymentV2/README.md index 8bec63fd..8a7cbcda 100644 --- a/solution/DeploymentV2/README.md +++ b/solution/DeploymentV2/README.md @@ -33,6 +33,8 @@ When you execute the script it will ask you for a number of inputs: ![image](https://user-images.githubusercontent.com/11702150/184566884-89671236-cbb6-441d-a6b5-f7390a44b78c.png) - **Resource Provider Registration**: Select '1' (YES) to ensure that the pre-requisite resource providers have been enabled on your Azure subscription. ![image](https://user-images.githubusercontent.com/11702150/184566915-ad311bf1-59fc-4c1d-a94c-6d51c3b82101.png) +- **Network Isolation Level**: Select the level of network isolation that you would like for your deployed resources. 'Public' enables public IP access from anywhere. 'Isolated' enables private networking components while allowing tightly controlled public IP based access for a small number of whitelisted IP addresses. This is the most common deployment method which allows a cloud hosted, dynamically provisioned CICD agent to carry out the deployment. 'Private' networking is an advanced deployment option which will block all public traffic. +![image](https://user-images.githubusercontent.com/11702150/185845690-e0b64a24-1322-4934-b569-c6faf8f7d153.png) - **Resource Owner**: Insert the object id of the Azure identity or group that you would like to have ownership of the resource group. If you are planning to deploy the solution using a CICD agent, it is suggested that you enter the Agent Service Principal's object id here. If you will be deploying from the command line using an interactive session then leave this field blank. - **SQL Server AAD Admin**: Insert the object id of the Azure identity or group that you would like to be the AAD administrator of any SQL Server instances deployed. If you are planning to deploy the solution using a CICD agent, then it is suggested that you use an AAD group here. If you will be deploying from the command line using an interactive session then leave this field blank. - **Press any key to continue**: The script will now evaluate your system to gather required information. A summary of that information will be presented to you (similar to the screen capture below). Review the information and press any key to continue. From a2bc8527a7cebebd612070c257fb8fe4839ad165 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Mon, 22 Aug 2022 15:18:14 +0800 Subject: [PATCH 147/151] Update README.md --- solution/DeploymentV2/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/solution/DeploymentV2/README.md b/solution/DeploymentV2/README.md index 8a7cbcda..d67a368d 100644 --- a/solution/DeploymentV2/README.md +++ b/solution/DeploymentV2/README.md @@ -74,7 +74,7 @@ The configuration for this environment creation is read from the following locat - The environment configuration file (*where {selected_environment} is the name of the environment that you selected during execution of prepare.ps1): - ```/azure-data-services-go-fast-codebase/solution/DeploymentV2/environment/vars/{selected_environment}/terragrunt.hcl``` - +## :green_circle: PART 3. Deployment Details ### Deployment Layers Layer | Description | Permissions Required when using Service Principal | Permissions Required when using User Principal | --- | --- | --- | --- | From 58b3b3940e1e183612abe729289087dfecaacf43 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Mon, 22 Aug 2022 15:36:12 +0800 Subject: [PATCH 148/151] Update README.md --- solution/DeploymentV2/README.md | 61 ++++++++++++++++++++++++++++++++- 1 file changed, 60 insertions(+), 1 deletion(-) diff --git a/solution/DeploymentV2/README.md b/solution/DeploymentV2/README.md index d67a368d..477eb303 100644 --- a/solution/DeploymentV2/README.md +++ b/solution/DeploymentV2/README.md @@ -75,10 +75,69 @@ The configuration for this environment creation is read from the following locat - ```/azure-data-services-go-fast-codebase/solution/DeploymentV2/environment/vars/{selected_environment}/terragrunt.hcl``` ## :green_circle: PART 3. Deployment Details -### Deployment Layers +### Deployment Layers - Summary Layer | Description | Permissions Required when using Service Principal | Permissions Required when using User Principal | --- | --- | --- | --- | Terraform Layer Zero | - Deploys the spoke VNET with subnets, dns zones, bastion & a VM for the CICD agent | - Resouce Group Owner
- Blob Contributor on Terraform's State Storage Account | - Resouce Group Owner
- Blob Contributor on Terraform's State Storage Account Terraform Layer One | - Register AAD Enterprise Applications & Service Principals | - Application.ReadWrite.OwnedBy
- Blob Contributor on Terraform's State Storage Account| - Application Administrator (Role)
- Blob Contributor on Terraform's State Storage Account Terraform Layer Two | - Core IAC deployment for approx. 70 ADS Go fast resources | - Resource Group Owner
- Blob Contributor on Terraform's State Storage Account| - Resource Group Owner
- Blob Contributor on Terraform's State Storage Account Terraform Layer Three | - Update AAD Enterprise Applications by granting required roles and permissions to managed service identities created in Layer Two
- Create Private Endpoints for Purview | - Application.ReadWrite.OwnedBy
(Must be same identity as that which was used to run Layer One)
- Blob Contributor on Terraform's State Storage Account | - Application Administrator (Role),
- Network Contributor
- Blob Contributor on Terraform's State Storage Account + + +Provider | Resources | +| --- | --- | +azurerm_network_security_group|- app_service_nsg
- bastion_nsg
- plink_nsg
- vm_nsg +azurerm_bastion_host | bastion +azurerm_network_security_rule|app_service_in_deny_all[0] +azurerm_network_security_rule|bastion_inbound_control_plane[0] +azurerm_network_security_rule|bastion_inbound_data_plane[0] +azurerm_network_security_rule|bastion_inbound_internet[0] +azurerm_network_security_rule|bastion_inbound_load_balancer[0] +azurerm_network_security_rule|bastion_outbound_azure[0] +azurerm_network_security_rule|bastion_outbound_bastion_vms[0] +azurerm_network_security_rule|bastion_outbound_dataplane[0] +azurerm_network_security_rule|bastion_outbound_internet[0] +azurerm_network_security_rule|plink_out_deny_all[0] +azurerm_network_security_rule|vm_inbound_bastion[0] + +Layer | Description | +| --- | --- | +data.azurerm_client_config.current +azurerm_bastion_host.bastion[0] + + +azurerm_private_dns_zone.private_dns_zone_blob[0] +azurerm_private_dns_zone.private_dns_zone_db[0] +azurerm_private_dns_zone.private_dns_zone_dfs[0] +azurerm_private_dns_zone.private_dns_zone_kv[0] +azurerm_private_dns_zone.private_dns_zone_purview[0] +azurerm_private_dns_zone.private_dns_zone_purview_studio[0] +azurerm_private_dns_zone.private_dns_zone_queue[0] +azurerm_private_dns_zone.private_dns_zone_servicebus[0] +azurerm_private_dns_zone.synapse_gateway[0] +azurerm_private_dns_zone.synapse_sql[0] +azurerm_private_dns_zone.synapse_studio[0] +azurerm_private_dns_zone_virtual_network_link.blob[0] +azurerm_private_dns_zone_virtual_network_link.database[0] +azurerm_private_dns_zone_virtual_network_link.dfs[0] +azurerm_private_dns_zone_virtual_network_link.purview[0] +azurerm_private_dns_zone_virtual_network_link.purview_studio[0] +azurerm_private_dns_zone_virtual_network_link.queue[0] +azurerm_private_dns_zone_virtual_network_link.servicebus[0] +azurerm_private_dns_zone_virtual_network_link.synapse_gateway[0] +azurerm_private_dns_zone_virtual_network_link.synapse_sql[0] +azurerm_private_dns_zone_virtual_network_link.synapse_studio[0] +azurerm_private_dns_zone_virtual_network_link.vaultcore[0] +azurerm_public_ip.bastion_pip[0] +azurerm_subnet.app_service_subnet[0] +azurerm_subnet.bastion_subnet[0] +azurerm_subnet.plink_subnet[0] +azurerm_subnet.vm_subnet[0] +azurerm_subnet_network_security_group_association.app_service_nsg[0] +azurerm_subnet_network_security_group_association.bastion_nsg[0] +azurerm_subnet_network_security_group_association.plink_nsg[0] +azurerm_subnet_network_security_group_association.vm_nsg[0] +azurerm_virtual_network.vnet[0] +random_id.rg_deployment_unique +module.naming.random_string.first_letter +module.naming.random_string.main From 65e164df19829a195ab6ba492b9f11ac6034d6e3 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Mon, 22 Aug 2022 20:38:07 +0800 Subject: [PATCH 149/151] Added TFdocs --- solution/DeploymentV2/README.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/solution/DeploymentV2/README.md b/solution/DeploymentV2/README.md index 8bec63fd..a4125553 100644 --- a/solution/DeploymentV2/README.md +++ b/solution/DeploymentV2/README.md @@ -76,7 +76,7 @@ The configuration for this environment creation is read from the following locat ### Deployment Layers Layer | Description | Permissions Required when using Service Principal | Permissions Required when using User Principal | --- | --- | --- | --- | -Terraform Layer Zero | - Deploys the spoke VNET with subnets, dns zones, bastion & a VM for the CICD agent | - Resouce Group Owner
- Blob Contributor on Terraform's State Storage Account | - Resouce Group Owner
- Blob Contributor on Terraform's State Storage Account -Terraform Layer One | - Register AAD Enterprise Applications & Service Principals | - Application.ReadWrite.OwnedBy
- Blob Contributor on Terraform's State Storage Account| - Application Administrator (Role)
- Blob Contributor on Terraform's State Storage Account -Terraform Layer Two | - Core IAC deployment for approx. 70 ADS Go fast resources | - Resource Group Owner
- Blob Contributor on Terraform's State Storage Account| - Resource Group Owner
- Blob Contributor on Terraform's State Storage Account -Terraform Layer Three | - Update AAD Enterprise Applications by granting required roles and permissions to managed service identities created in Layer Two
- Create Private Endpoints for Purview | - Application.ReadWrite.OwnedBy
(Must be same identity as that which was used to run Layer One)
- Blob Contributor on Terraform's State Storage Account | - Application Administrator (Role),
- Network Contributor
- Blob Contributor on Terraform's State Storage Account +[Terraform Layer Zero](./terraform_layer0/tformdocs.md) | Deploys the spoke VNET with subnets, dns zones, bastion & a VM for the CICD agent | Resouce Group Owner

Blob Contributor on Terraform's State Storage Account | Resouce Group Owner

Blob Contributor on Terraform's State Storage Account +[Terraform Layer One](./terraform_layer1/tformdocs.md)| Register AAD Enterprise Applications & Service Principals | Application.ReadWrite.OwnedBy

Blob Contributor on Terraform's State Storage Account| Application Administrator (Role)

Blob Contributor on Terraform's State Storage Account +Terraform Layer Two | Core IAC deployment for approx. 70 ADS Go fast resources | Resource Group Owner

Blob Contributor on Terraform's State Storage Account| Resource Group Owner

Blob Contributor on Terraform's State Storage Account +Terraform Layer Three | Update AAD Enterprise Applications by granting required roles and permissions to managed service identities created in Layer Two

Create Private Endpoints for Purview | Application.ReadWrite.OwnedBy
(Must be same identity as that which was used to run Layer One)

Blob Contributor on Terraform's State Storage Account | Application Administrator (Role),

Network Contributor

Blob Contributor on Terraform's State Storage Account From d99c056c410b74418d9bc5be231d3bdc18b680ce Mon Sep 17 00:00:00 2001 From: John Rampono Date: Mon, 22 Aug 2022 20:38:24 +0800 Subject: [PATCH 150/151] Added TfDcos --- solution/DeploymentV2/0 | 1 - .../vars/common_vars_template.jsonnet | 24 ++ .../vars/staging/common_vars_values.jsonc | 5 +- .../pwshmodules/Deploy_0_Prep.psm1 | 16 +- .../terraform_layer0/tformdocs.md | 266 ++++++++++++ .../terraform_layer1/tformdocs.md | 91 ++++ .../terraform_layer2/tformdocs.md | 400 ++++++++++++++++++ .../terraform_layer3/tformdocs.md | 89 ++++ .../utilities/GenerateTerraformDocs.ps1 | 35 ++ 9 files changed, 916 insertions(+), 11 deletions(-) delete mode 100644 solution/DeploymentV2/0 create mode 100644 solution/DeploymentV2/terraform_layer0/tformdocs.md create mode 100644 solution/DeploymentV2/terraform_layer1/tformdocs.md create mode 100644 solution/DeploymentV2/terraform_layer2/tformdocs.md create mode 100644 solution/DeploymentV2/terraform_layer3/tformdocs.md create mode 100644 solution/DeploymentV2/utilities/GenerateTerraformDocs.ps1 diff --git a/solution/DeploymentV2/0 b/solution/DeploymentV2/0 deleted file mode 100644 index d00491fd..00000000 --- a/solution/DeploymentV2/0 +++ /dev/null @@ -1 +0,0 @@ -1 diff --git a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet index e6c4e03c..31a8afe4 100644 --- a/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet +++ b/solution/DeploymentV2/environments/vars/common_vars_template.jsonnet @@ -744,6 +744,30 @@ local SecretFileSensitiveVars = { // Object comprehension. + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc index 97c3d62e..abf184ae 100644 --- a/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc +++ b/solution/DeploymentV2/environments/vars/staging/common_vars_values.jsonc @@ -44,7 +44,7 @@ "deploy_state_storage_account": false, "deploy_cicd_vm": false, "FeatureTemplateOverrides": { - "is_onprem_datafactory_ir_registered": false, + "is_onprem_datafactory_ir_registered": true, "deployment_layer3_complete": true, "layer0_state": "remote", "delay_private_access": true, @@ -55,6 +55,7 @@ "synapse_git_repository_owner": "h-sha", "synapse_git_repository_name": "testLockbox", "synapse_git_repository_root_folder": "/Synapse", - "synapse_git_use_pat": false + "synapse_git_use_pat": false, + "deploy_selfhostedsql": true } } diff --git a/solution/DeploymentV2/pwshmodules/Deploy_0_Prep.psm1 b/solution/DeploymentV2/pwshmodules/Deploy_0_Prep.psm1 index 846ee7ef..875a44a0 100644 --- a/solution/DeploymentV2/pwshmodules/Deploy_0_Prep.psm1 +++ b/solution/DeploymentV2/pwshmodules/Deploy_0_Prep.psm1 @@ -92,26 +92,26 @@ function PrepareDeployment ( try { #state $resourcecheck = ((az storage account list --resource-group $env:TF_VAR_resource_group_name | ConvertFrom-Json -Depth 10) | Where-Object {$_.name -eq $env:TF_VAR_state_storage_account_name}).count - if($resourcecheck > 0) + if($resourcecheck -gt 0) { $hiddenoutput = az storage account network-rule add --resource-group $env:TF_VAR_resource_group_name --account-name $env:TF_VAR_state_storage_account_name --ip-address $env:TF_VAR_ip_address --only-show-errors } #DataLake $resourcecheck = ((az storage account list --resource-group $env:TF_VAR_resource_group_name | ConvertFrom-Json -Depth 10) | Where-Object {$_.name -eq $env:datalakeName}).count - if($resourcecheck > 0) { + if($resourcecheck -gt 0) { $hiddenoutput = az storage account network-rule add --resource-group $env:TF_VAR_resource_group_name --account-name $env:datalakeName --ip-address $env:TF_VAR_ip_address --only-show-errors } #Key Vault $resourcecheck = ( (az keyvault list --resource-group gft7 | convertfrom-json -depth 10) | Where-Object {$_.name -eq $env:keyVaultName}).count - if($resourcecheck > 0) { + if($resourcecheck -gt 0) { $hiddenoutput = az keyvault network-rule add -g $env:TF_VAR_resource_group_name --name $env:keyVaultName --ip-address $env:TF_VAR_ip_address/32 --only-show-errors } #Synapse $resourcecheck = ( (az synapse workspace list --resource-group gft7 | convertfrom-json -depth 10) | Where-Object {$_.name -eq $env:ARM_SYNAPSE_WORKSPACE_NAME}).count - if($resourcecheck > 0) { + if($resourcecheck -gt 0) { $hiddenoutput = az synapse workspace firewall-rule create --name CICDAgent --resource-group $env:TF_VAR_resource_group_name --start-ip-address $env:TF_VAR_ip_address --end-ip-address $env:TF_VAR_ip_address --workspace-name $env:ARM_SYNAPSE_WORKSPACE_NAME --only-show-errors } @@ -126,26 +126,26 @@ function PrepareDeployment ( try { #state $resourcecheck = ((az storage account list --resource-group $env:TF_VAR_resource_group_name | ConvertFrom-Json -Depth 10) | Where-Object {$_.name -eq $env:TF_VAR_state_storage_account_name}).count - if($resourcecheck > 0) + if($resourcecheck -gt 0) { $hiddenoutput = az storage account network-rule add --resource-group $env:TF_VAR_resource_group_name --account-name $env:TF_VAR_state_storage_account_name --ip-address $env:TF_VAR_ip_address2 --only-show-errors } #DataLake $resourcecheck = ((az storage account list --resource-group $env:TF_VAR_resource_group_name | ConvertFrom-Json -Depth 10) | Where-Object {$_.name -eq $env:datalakeName}).count - if($resourcecheck > 0) { + if($resourcecheck -gt 0) { $hiddenoutput = az storage account network-rule add --resource-group $env:TF_VAR_resource_group_name --account-name $env:datalakeName --ip-address $env:TF_VAR_ip_address2 --only-show-errors } #Key Vault $resourcecheck = ( (az keyvault list --resource-group gft7 | convertfrom-json -depth 10) | Where-Object {$_.name -eq $env:keyVaultName}).count - if($resourcecheck > 0) { + if($resourcecheck -gt 0) { $hiddenoutput = az keyvault network-rule add -g $env:TF_VAR_resource_group_name --name $env:keyVaultName --ip-address $env:TF_VAR_ip_address2/32 --only-show-errors } #Synapse $resourcecheck = ( (az synapse workspace list --resource-group gft7 | convertfrom-json -depth 10) | Where-Object {$_.name -eq $env:ARM_SYNAPSE_WORKSPACE_NAME}).count - if($resourcecheck > 0) { + if($resourcecheck -gt 0) { $hiddenoutput = az synapse workspace firewall-rule create --name CICDAgent --resource-group $env:TF_VAR_resource_group_name --start-ip-address $env:TF_VAR_ip_address2 --end-ip-address $env:TF_VAR_ip_address2 --workspace-name $env:ARM_SYNAPSE_WORKSPACE_NAME --only-show-errors } } diff --git a/solution/DeploymentV2/terraform_layer0/tformdocs.md b/solution/DeploymentV2/terraform_layer0/tformdocs.md new file mode 100644 index 00000000..8eac6fbf --- /dev/null +++ b/solution/DeploymentV2/terraform_layer0/tformdocs.md @@ -0,0 +1,266 @@ +## Requirements + +| Name | Version | +|------|---------| +| [azuread](#requirement\_azuread) | =2.22.0 | +| [azurerm](#requirement\_azurerm) | =3.12.0 | +| [random](#requirement\_random) | =3.3.0 | + +## Providers + +| Name | Version | +|------|---------| +| [azapi](#provider\_azapi) | 0.5.0 | +| [azurerm](#provider\_azurerm) | 3.12.0 | +| [random](#provider\_random) | 3.3.0 | + +## Modules + +| Name | Source | Version | +|------|--------|---------| +| [naming](#module\_naming) | Azure/naming/azurerm | 0.1.1 | + +## Resources + +| Name | Type | +|------|------| +| [azapi_resource.adls_state](https://registry.terraform.io/providers/Azure/azapi/latest/docs/resources/resource) | resource | +| [azurerm_bastion_host.bastion](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/bastion_host) | resource | +| [azurerm_network_interface.cicd_vm_nic](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/network_interface) | resource | +| [azurerm_network_security_group.app_service_nsg](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/network_security_group) | resource | +| [azurerm_network_security_group.bastion_nsg](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/network_security_group) | resource | +| [azurerm_network_security_group.plink_nsg](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/network_security_group) | resource | +| [azurerm_network_security_group.vm_nsg](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/network_security_group) | resource | +| [azurerm_network_security_rule.app_service_in_deny_all](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/network_security_rule) | resource | +| [azurerm_network_security_rule.bastion_inbound_control_plane](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/network_security_rule) | resource | +| [azurerm_network_security_rule.bastion_inbound_data_plane](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/network_security_rule) | resource | +| [azurerm_network_security_rule.bastion_inbound_internet](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/network_security_rule) | resource | +| [azurerm_network_security_rule.bastion_inbound_load_balancer](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/network_security_rule) | resource | +| [azurerm_network_security_rule.bastion_outbound_azure](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/network_security_rule) | resource | +| [azurerm_network_security_rule.bastion_outbound_bastion_vms](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/network_security_rule) | resource | +| [azurerm_network_security_rule.bastion_outbound_dataplane](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/network_security_rule) | resource | +| [azurerm_network_security_rule.bastion_outbound_internet](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/network_security_rule) | resource | +| [azurerm_network_security_rule.plink_out_deny_all](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/network_security_rule) | resource | +| [azurerm_network_security_rule.vm_inbound_bastion](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/network_security_rule) | resource | +| [azurerm_private_dns_zone.private_dns_zone_blob](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/private_dns_zone) | resource | +| [azurerm_private_dns_zone.private_dns_zone_db](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/private_dns_zone) | resource | +| [azurerm_private_dns_zone.private_dns_zone_dfs](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/private_dns_zone) | resource | +| [azurerm_private_dns_zone.private_dns_zone_kv](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/private_dns_zone) | resource | +| [azurerm_private_dns_zone.private_dns_zone_purview](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/private_dns_zone) | resource | +| [azurerm_private_dns_zone.private_dns_zone_purview_studio](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/private_dns_zone) | resource | +| [azurerm_private_dns_zone.private_dns_zone_queue](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/private_dns_zone) | resource | +| [azurerm_private_dns_zone.private_dns_zone_servicebus](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/private_dns_zone) | resource | +| [azurerm_private_dns_zone.synapse_gateway](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/private_dns_zone) | resource | +| [azurerm_private_dns_zone.synapse_sql](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/private_dns_zone) | resource | +| [azurerm_private_dns_zone.synapse_studio](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/private_dns_zone) | resource | +| [azurerm_private_dns_zone_virtual_network_link.blob](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/private_dns_zone_virtual_network_link) | resource | +| [azurerm_private_dns_zone_virtual_network_link.database](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/private_dns_zone_virtual_network_link) | resource | +| [azurerm_private_dns_zone_virtual_network_link.dfs](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/private_dns_zone_virtual_network_link) | resource | +| [azurerm_private_dns_zone_virtual_network_link.purview](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/private_dns_zone_virtual_network_link) | resource | +| [azurerm_private_dns_zone_virtual_network_link.purview_studio](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/private_dns_zone_virtual_network_link) | resource | +| [azurerm_private_dns_zone_virtual_network_link.queue](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/private_dns_zone_virtual_network_link) | resource | +| [azurerm_private_dns_zone_virtual_network_link.servicebus](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/private_dns_zone_virtual_network_link) | resource | +| [azurerm_private_dns_zone_virtual_network_link.synapse_gateway](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/private_dns_zone_virtual_network_link) | resource | +| [azurerm_private_dns_zone_virtual_network_link.synapse_sql](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/private_dns_zone_virtual_network_link) | resource | +| [azurerm_private_dns_zone_virtual_network_link.synapse_studio](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/private_dns_zone_virtual_network_link) | resource | +| [azurerm_private_dns_zone_virtual_network_link.vaultcore](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/private_dns_zone_virtual_network_link) | resource | +| [azurerm_private_endpoint.adls_dfs_state_storage_private_endpoint_with_dns](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/private_endpoint) | resource | +| [azurerm_private_endpoint.adls_state_storage_private_endpoint_with_dns](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/private_endpoint) | resource | +| [azurerm_public_ip.bastion_pip](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/public_ip) | resource | +| [azurerm_role_assignment.adls_state_deployment_agents](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/role_assignment) | resource | +| [azurerm_storage_container.tstate](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/storage_container) | resource | +| [azurerm_subnet.app_service_subnet](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/subnet) | resource | +| [azurerm_subnet.bastion_subnet](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/subnet) | resource | +| [azurerm_subnet.plink_subnet](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/subnet) | resource | +| [azurerm_subnet.vm_subnet](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/subnet) | resource | +| [azurerm_subnet_network_security_group_association.app_service_nsg](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/subnet_network_security_group_association) | resource | +| [azurerm_subnet_network_security_group_association.bastion_nsg](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/subnet_network_security_group_association) | resource | +| [azurerm_subnet_network_security_group_association.plink_nsg](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/subnet_network_security_group_association) | resource | +| [azurerm_subnet_network_security_group_association.vm_nsg](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/subnet_network_security_group_association) | resource | +| [azurerm_virtual_machine.cicd_vm_linux](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/virtual_machine) | resource | +| [azurerm_virtual_network.vnet](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/virtual_network) | resource | +| [random_id.rg_deployment_unique](https://registry.terraform.io/providers/hashicorp/random/3.3.0/docs/resources/id) | resource | +| [azurerm_client_config.current](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/data-sources/client_config) | data source | + +## Inputs + +| Name | Description | Type | Default | Required | +|------|-------------|------|---------|:--------:| +| [aad\_functionapp\_name](#input\_aad\_functionapp\_name) | The override name for the AAD App registration for the function app. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [aad\_webapp\_name](#input\_aad\_webapp\_name) | The override name for the AAD App registration for the web app. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [adf\_git\_email\_address](#input\_adf\_git\_email\_address) | The email address to be associated with the commit being done for the pipeline upload. | `string` | `""` | no | +| [adf\_git\_host\_url](#input\_adf\_git\_host\_url) | Specifies the GitHub Enterprise host name. For example: https://github.mydomain.com. Use https://github.com for open source repositories. | `string` | `"https://github.com"` | no | +| [adf\_git\_pat](#input\_adf\_git\_pat) | The personal access token used to authenticate the git account | `string` | `""` | no | +| [adf\_git\_repository\_branch\_name](#input\_adf\_git\_repository\_branch\_name) | The name of the github branch to be used | `string` | `"main"` | no | +| [adf\_git\_repository\_name](#input\_adf\_git\_repository\_name) | The name of the github repository to be used for synapse | `string` | `""` | no | +| [adf\_git\_repository\_owner](#input\_adf\_git\_repository\_owner) | The owner of the github repository to be used for adf. Eg. for the repository https://github.com/contoso/ads, the owner is contoso | `string` | `""` | no | +| [adf\_git\_repository\_root\_folder](#input\_adf\_git\_repository\_root\_folder) | The name of the root folder to be used in the branch | `string` | `"/"` | no | +| [adf\_git\_toggle\_integration](#input\_adf\_git\_toggle\_integration) | Feature toggle for enabling adf github integration | `bool` | `false` | no | +| [adf\_git\_use\_pat](#input\_adf\_git\_use\_pat) | Whether a pat is required for authentication (non public repo). | `bool` | `true` | no | +| [adf\_git\_user\_name](#input\_adf\_git\_user\_name) | The user name to be associated with the commit being done for the pipeline upload. | `string` | `""` | no | +| [adls\_storage\_account\_name](#input\_adls\_storage\_account\_name) | The override name for the storage account used for adls. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [allow\_public\_access\_to\_synapse\_studio](#input\_allow\_public\_access\_to\_synapse\_studio) | Should the synapse studio allow access to public IPs | `bool` | `false` | no | +| [app\_insights\_name](#input\_app\_insights\_name) | The override name for the app insights resource. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [app\_name](#input\_app\_name) | The app\_name suffix value to be used for autogenerated naming conventions | `string` | `"ads"` | no | +| [app\_service\_nsg\_name](#input\_app\_service\_nsg\_name) | The override name for the app service subnet NSG. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [app\_service\_plan\_name](#input\_app\_service\_plan\_name) | The override name for the app service plan resource. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [app\_service\_sku](#input\_app\_service\_sku) | The sku/scale of the app service |
object({
tier = string
size = string
capacity = number })
|
{
"capacity": 1,
"size": "S1",
"tier": "Standard"
}
| no | +| [app\_service\_subnet\_cidr](#input\_app\_service\_subnet\_cidr) | CIDR of the subnet used to host the app service plan | `string` | `"10.0.0.128/26"` | no | +| [app\_service\_subnet\_name](#input\_app\_service\_subnet\_name) | The override name for the app service subnet resource. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [author\_tag](#input\_author\_tag) | The tags to apply to resources. | `string` | `"opensource.microsoft.com"` | no | +| [azure\_purview\_data\_curators](#input\_azure\_purview\_data\_curators) | List of Azure Purview Data Curators for default root | `map(string)` | `{}` | no | +| [azure\_sql\_aad\_administrators](#input\_azure\_sql\_aad\_administrators) | List of Azure SQL Administrators | `map(string)` | `{}` | no | +| [bastion\_ip\_name](#input\_bastion\_ip\_name) | The override name for the Bastion service Public IP. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [bastion\_name](#input\_bastion\_name) | The override name for the Bastion service. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [bastion\_nsg\_name](#input\_bastion\_nsg\_name) | The override name for the bastion subnet NSG. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [bastion\_subnet\_cidr](#input\_bastion\_subnet\_cidr) | CIDR of the subnet used for bastion | `string` | `"10.0.0.64/26"` | no | +| [blob\_storage\_account\_name](#input\_blob\_storage\_account\_name) | The override name for the storage account used for staging data. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [cicd\_sp\_id](#input\_cicd\_sp\_id) | The Object Id of the GitHub Service Principal. This will ensure that keyvault access policies are configured for GitHub/terraform to read secret state later | `string` | `""` | no | +| [configure\_networking](#input\_configure\_networking) | Feature toggle for post IAC network configuration | `bool` | `true` | no | +| [custom\_vm\_image\_offer](#input\_custom\_vm\_image\_offer) | An Azure custom image marketplace image offer to be referenced for a custom vm image. | `string` | `""` | no | +| [custom\_vm\_image\_publisher](#input\_custom\_vm\_image\_publisher) | An Azure custom image marketplace image publisher to be referenced for a custom vm image. | `string` | `""` | no | +| [custom\_vm\_image\_sku](#input\_custom\_vm\_image\_sku) | An Azure custom image marketplace image sku to be referenced for a custom vm image. | `string` | `""` | no | +| [custom\_vm\_image\_version](#input\_custom\_vm\_image\_version) | An Azure custom image marketplace image version to be referenced for a custom vm image. | `string` | `"latest"` | no | +| [custom\_vm\_os](#input\_custom\_vm\_os) | User must define whether they wish deploy a 'windows' or 'linux' virtual machine. | `string` | `"linux"` | no | +| [custom\_vm\_plan\_name](#input\_custom\_vm\_plan\_name) | An Azure vm plan name to be referenced for a custom vm image. | `string` | `""` | no | +| [custom\_vm\_plan\_product](#input\_custom\_vm\_plan\_product) | An Azure vm plan product to be referenced for a custom vm image. | `string` | `""` | no | +| [custom\_vm\_plan\_publisher](#input\_custom\_vm\_plan\_publisher) | An Azure vm plan publisher to be referenced for a custom vm image. | `string` | `""` | no | +| [data\_factory\_name](#input\_data\_factory\_name) | The override name for the Data Factory component. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [delay\_private\_access](#input\_delay\_private\_access) | Whether to create resoruces with public access enabled and then disable it at the end. | `bool` | `true` | no | +| [deploy\_adls](#input\_deploy\_adls) | Feature toggle for deploying the internal data lake | `bool` | `true` | no | +| [deploy\_app\_insights](#input\_deploy\_app\_insights) | Feature toggle for deploying the App Insights | `bool` | `true` | no | +| [deploy\_app\_service\_plan](#input\_deploy\_app\_service\_plan) | Feature toggle for deploying the App Service | `bool` | `true` | no | +| [deploy\_azure\_ad\_function\_app\_registration](#input\_deploy\_azure\_ad\_function\_app\_registration) | Feature toggle for deploying the Azure AD App registration for the Function App | `bool` | `true` | no | +| [deploy\_azure\_ad\_web\_app\_registration](#input\_deploy\_azure\_ad\_web\_app\_registration) | Feature toggle for deploying the Azure AD App registration for the Web Portal | `bool` | `true` | no | +| [deploy\_azure\_role\_assignments](#input\_deploy\_azure\_role\_assignments) | Feature toggle for deploying the Azure Role Assignments | `bool` | `true` | no | +| [deploy\_bastion](#input\_deploy\_bastion) | Feature toggle for deploying bastion | `bool` | `true` | no | +| [deploy\_cicd\_vm](#input\_deploy\_cicd\_vm) | Set to true if you want to deploy a vm to host the github runnner. | `bool` | `false` | no | +| [deploy\_custom\_terraform](#input\_deploy\_custom\_terraform) | Whether the platform deploys the infrastructure located in the terraform\_custom folder | `bool` | `false` | no | +| [deploy\_custom\_vm](#input\_deploy\_custom\_vm) | Feature toggle for deploying a custom virtual machine | `bool` | `false` | no | +| [deploy\_data\_factory](#input\_deploy\_data\_factory) | Feature toggle for deploying the Azure Data Factory | `bool` | `true` | no | +| [deploy\_function\_app](#input\_deploy\_function\_app) | Feature toggle for deploying the Function App | `bool` | `true` | no | +| [deploy\_h2o-ai](#input\_deploy\_h2o-ai) | Feature toggle for deploying H2O-AI VM | `bool` | `false` | no | +| [deploy\_metadata\_database](#input\_deploy\_metadata\_database) | Feature toggle for deploying Metadata Database | `bool` | `true` | no | +| [deploy\_purview](#input\_deploy\_purview) | Feature toggle for deploying Azure Purview | `bool` | `false` | no | +| [deploy\_selfhostedsql](#input\_deploy\_selfhostedsql) | Feature toggle for deploying Self Hosted Sql VM | `bool` | `false` | no | +| [deploy\_sentinel](#input\_deploy\_sentinel) | Feature toggle for deploying Azure Sentinel | `bool` | `false` | no | +| [deploy\_sql\_extend\_audit\_policy](#input\_deploy\_sql\_extend\_audit\_policy) | Feature toggle for deploying the SQL Server Extended Audit policy | `bool` | `true` | no | +| [deploy\_sql\_server](#input\_deploy\_sql\_server) | Feature toggle for deploying the SQL Server | `bool` | `true` | no | +| [deploy\_state\_storage\_account](#input\_deploy\_state\_storage\_account) | Set to true if you want to deploy a the state storage account. | `bool` | `false` | no | +| [deploy\_storage\_account](#input\_deploy\_storage\_account) | Feature toggle for deploying the internal storage account | `bool` | `true` | no | +| [deploy\_synapse](#input\_deploy\_synapse) | Feature toggle for deploying Azure Synapse | `bool` | `false` | no | +| [deploy\_synapse\_sparkpool](#input\_deploy\_synapse\_sparkpool) | Feature toggle for deploying Azure Synapse Spark Pool | `bool` | `true` | no | +| [deploy\_synapse\_sqlpool](#input\_deploy\_synapse\_sqlpool) | Feature toggle for deploying Azure Synapse SQL Pool | `bool` | `true` | no | +| [deploy\_web\_app](#input\_deploy\_web\_app) | Feature toggle for deploying the Web App | `bool` | `true` | no | +| [deployment\_principal\_layers1and3](#input\_deployment\_principal\_layers1and3) | Object Id of the azure account that will deploy layers 1 & 3. If it is the same as the layer 2 user then leave as empty string. | `string` | `""` | no | +| [domain](#input\_domain) | The AAD domain | `string` | n/a | yes | +| [environment\_tag](#input\_environment\_tag) | The name of the environment. Don't use spaces | `string` | `"dev"` | no | +| [existing\_app\_service\_subnet\_id](#input\_existing\_app\_service\_subnet\_id) | An existing subnet id for reuse for the App Service delegation | `string` | `""` | no | +| [existing\_bastion\_subnet\_id](#input\_existing\_bastion\_subnet\_id) | An existing subnet id for reuse for the Bastion host | `string` | `""` | no | +| [existing\_log\_analytics\_resource\_id](#input\_existing\_log\_analytics\_resource\_id) | An existing log analytics resource id for reuse | `string` | `""` | no | +| [existing\_log\_analytics\_workspace\_id](#input\_existing\_log\_analytics\_workspace\_id) | An existing log analytics workspace id for reuse | `string` | `""` | no | +| [existing\_plink\_subnet\_id](#input\_existing\_plink\_subnet\_id) | An existing subnet id for reuse for the Private link resources | `string` | `""` | no | +| [existing\_private\_dns\_zone\_blob\_id](#input\_existing\_private\_dns\_zone\_blob\_id) | An existing private DNS zone for privatelink.blob.core.azure.net | `string` | `""` | no | +| [existing\_private\_dns\_zone\_db\_id](#input\_existing\_private\_dns\_zone\_db\_id) | An existing private DNS zone for privatelink.database.windows.net | `string` | `""` | no | +| [existing\_private\_dns\_zone\_dfs\_id](#input\_existing\_private\_dns\_zone\_dfs\_id) | An existing private DNS zone for privatelink.dfs.core.azure.net | `string` | `""` | no | +| [existing\_private\_dns\_zone\_kv\_id](#input\_existing\_private\_dns\_zone\_kv\_id) | An existing private DNS zone for privatelink.vaultcore.azure.net | `string` | `""` | no | +| [existing\_private\_dns\_zone\_purview\_id](#input\_existing\_private\_dns\_zone\_purview\_id) | An existing private DNS zone for privatelink.purview.azure.net | `string` | `""` | no | +| [existing\_private\_dns\_zone\_purview\_studio\_id](#input\_existing\_private\_dns\_zone\_purview\_studio\_id) | An existing private DNS zone for privatelink.purviewstudio.azure.net | `string` | `""` | no | +| [existing\_private\_dns\_zone\_queue\_id](#input\_existing\_private\_dns\_zone\_queue\_id) | An existing private DNS zone for privatelink.queue.core.azure.net | `string` | `""` | no | +| [existing\_private\_dns\_zone\_servicebus\_id](#input\_existing\_private\_dns\_zone\_servicebus\_id) | An existing private DNS zone for privatelink.servicebus.windows.net | `string` | `""` | no | +| [existing\_private\_dns\_zone\_synapse\_gateway\_id](#input\_existing\_private\_dns\_zone\_synapse\_gateway\_id) | An existing private DNS zone for privatelink.azuresynapse.net | `string` | `""` | no | +| [existing\_private\_dns\_zone\_synapse\_sql\_id](#input\_existing\_private\_dns\_zone\_synapse\_sql\_id) | An existing private DNS zone for privatelink.sql.azuresynapse.net | `string` | `""` | no | +| [existing\_private\_dns\_zone\_synapse\_studio\_id](#input\_existing\_private\_dns\_zone\_synapse\_studio\_id) | An existing private DNS zone for privatelink.dev.azuresynapse.net | `string` | `""` | no | +| [existing\_synapse\_private\_link\_hub\_id](#input\_existing\_synapse\_private\_link\_hub\_id) | An existing private link hub for synapse studio. | `string` | `""` | no | +| [existing\_vm\_subnet\_id](#input\_existing\_vm\_subnet\_id) | An existing subnet id for reuse for the Agent VMs | `string` | `""` | no | +| [existing\_vnet\_id](#input\_existing\_vnet\_id) | An existing virtual network. | `string` | `""` | no | +| [functionapp\_name](#input\_functionapp\_name) | The override name for the function app service resource. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [ip\_address](#input\_ip\_address) | The CICD ipaddress. We add an IP whitelisting to allow the setting of keyvault secrets | `string` | `""` | no | +| [ip\_address2](#input\_ip\_address2) | The CICD ipaddress. We add an IP whitelisting to allow the setting of keyvault secrets | `string` | `""` | no | +| [is\_onprem\_datafactory\_ir\_registered](#input\_is\_onprem\_datafactory\_ir\_registered) | Are all on-premise Integration runtimes configured? | `bool` | `false` | no | +| [is\_private\_network](#input\_is\_private\_network) | If true will disable public IP's entirely | `bool` | `false` | no | +| [is\_vnet\_isolated](#input\_is\_vnet\_isolated) | Whether to deploy the resources as vnet attached / private linked | `bool` | `true` | no | +| [jumphost\_password](#input\_jumphost\_password) | Password for the jumphost | `string` | n/a | yes | +| [key\_vault\_name](#input\_key\_vault\_name) | The override name for the keyvault resource. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [log\_analytics\_workspace\_name](#input\_log\_analytics\_workspace\_name) | The override name for the Log Analytics workspace. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [logs\_storage\_account\_name](#input\_logs\_storage\_account\_name) | The override name for the storage account used for logs. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [owner\_tag](#input\_owner\_tag) | The tags to apply to resources. | `string` | `"opensource.microsoft.com"` | no | +| [plink\_nsg\_name](#input\_plink\_nsg\_name) | The override name for the private link subnet NSG. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [plink\_subnet\_cidr](#input\_plink\_subnet\_cidr) | CIDR of the subnet used for private link endpoints | `string` | `"10.0.0.0/26"` | no | +| [plink\_subnet\_name](#input\_plink\_subnet\_name) | The override name for the private link subnet resource. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [prefix](#input\_prefix) | The prefix value to be used for autogenerated naming conventions | `string` | `"ark"` | no | +| [publish\_datafactory\_pipelines](#input\_publish\_datafactory\_pipelines) | Feature toggle for post IAC data factory pipeline deployment | `bool` | `true` | no | +| [publish\_function\_app](#input\_publish\_function\_app) | Feature toggle for Publishing Function Application Code Base | `bool` | `true` | no | +| [publish\_functional\_tests](#input\_publish\_functional\_tests) | Feature toggle for Publishing Functional Tests to the Web App | `bool` | `false` | no | +| [publish\_metadata\_database](#input\_publish\_metadata\_database) | Feature toggle for Publishing Metadata Database schema and seeding with data | `bool` | `true` | no | +| [publish\_purview\_configuration](#input\_publish\_purview\_configuration) | Feature toggle for deploying the Purview configuration script (WIP) | `bool` | `false` | no | +| [publish\_sample\_files](#input\_publish\_sample\_files) | Feature toggle for Publishing Sample Filess | `bool` | `true` | no | +| [publish\_sif\_database](#input\_publish\_sif\_database) | Feature toggle for Publishing SIF Database | `bool` | `false` | no | +| [publish\_sql\_logins](#input\_publish\_sql\_logins) | Feature toggle for Publishing Synapse / SQL database logins for lockbox | `bool` | `true` | no | +| [publish\_web\_app](#input\_publish\_web\_app) | Feature toggle for Publishing Web Application Code Base | `bool` | `true` | no | +| [publish\_web\_app\_addcurrentuserasadmin](#input\_publish\_web\_app\_addcurrentuserasadmin) | Feature toggle for adding user running deployment as a webapp admin | `bool` | `false` | no | +| [purview\_ir\_app\_reg\_name](#input\_purview\_ir\_app\_reg\_name) | The override name for the Purview Integration runtime SP. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [purview\_name](#input\_purview\_name) | The override name for the Purview component. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [purview\_resource\_location](#input\_purview\_resource\_location) | The override location for the Purview component. If empty, will be autogenerated based global location settings | `string` | `""` | no | +| [resource\_group\_name](#input\_resource\_group\_name) | n/a | `string` | n/a | yes | +| [resource\_location](#input\_resource\_location) | The Azure Region being deployed to. | `string` | `"Australia East"` | no | +| [resource\_owners](#input\_resource\_owners) | A web app Azure security group used for admin access. | `list(string)` | `[]` | no | +| [sif\_database\_name](#input\_sif\_database\_name) | SIF DataMart Name | `string` | `"sif"` | no | +| [sql\_admin\_username](#input\_sql\_admin\_username) | The username for the sql server admin | `string` | `"adsgofastsqladminuser11"` | no | +| [sql\_server\_name](#input\_sql\_server\_name) | The override name for the sql server resource. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [state\_storage\_account\_name](#input\_state\_storage\_account\_name) | The override name for the storage account used for storing state data. If empty, will be autogenerated. | `string` | `""` | no | +| [subscription\_id](#input\_subscription\_id) | The Azure Subscription ID | `string` | n/a | yes | +| [synapse\_administrators](#input\_synapse\_administrators) | List of Synapse Administrators | `map(string)` | `{}` | no | +| [synapse\_contributors](#input\_synapse\_contributors) | List of Synapse Contributors | `map(string)` | `{}` | no | +| [synapse\_data\_lake\_name](#input\_synapse\_data\_lake\_name) | The override name for the Synapse data lake component. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [synapse\_dwpool\_name](#input\_synapse\_dwpool\_name) | The override name for the Synapse Dedicated Pool component. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [synapse\_git\_devops\_project\_name](#input\_synapse\_git\_devops\_project\_name) | The name of the project to be referenced within devops. Note: Not used for github. | `string` | `"/"` | no | +| [synapse\_git\_devops\_tenant\_id](#input\_synapse\_git\_devops\_tenant\_id) | The tenant id of the devops project. By default it will be valued as your tenant\_id. Note: Not used for github. | `string` | `""` | no | +| [synapse\_git\_email\_address](#input\_synapse\_git\_email\_address) | The email address to be associated with the commit being done for the pipeline upload. | `string` | `""` | no | +| [synapse\_git\_github\_host\_url](#input\_synapse\_git\_github\_host\_url) | Specifies the GitHub Enterprise host name. For example: https://github.mydomain.com. Use https://github.com for open source repositories. Note: Not used for devops | `string` | `"https://github.com"` | no | +| [synapse\_git\_integration\_type](#input\_synapse\_git\_integration\_type) | User must define whether they wish to use 'github' integration or 'devops' | `string` | `"github"` | no | +| [synapse\_git\_pat](#input\_synapse\_git\_pat) | The personal access token used to authenticate the git account | `string` | `""` | no | +| [synapse\_git\_repository\_branch\_name](#input\_synapse\_git\_repository\_branch\_name) | The name of the github branch to be used | `string` | `"main"` | no | +| [synapse\_git\_repository\_name](#input\_synapse\_git\_repository\_name) | The name of the github repository to be used for synapse | `string` | `""` | no | +| [synapse\_git\_repository\_owner](#input\_synapse\_git\_repository\_owner) | The owner of the github repository to be used for synapse. Eg. for the repository https://github.com/contoso/ads, the owner is contoso | `string` | `""` | no | +| [synapse\_git\_repository\_root\_folder](#input\_synapse\_git\_repository\_root\_folder) | The name of the root folder to be used in the branch | `string` | `"/"` | no | +| [synapse\_git\_toggle\_integration](#input\_synapse\_git\_toggle\_integration) | Feature toggle for enabling synapse github integration | `bool` | `false` | no | +| [synapse\_git\_use\_pat](#input\_synapse\_git\_use\_pat) | Whether a pat is required for authentication (non public repo). | `bool` | `true` | no | +| [synapse\_git\_user\_name](#input\_synapse\_git\_user\_name) | The user name to be associated with the commit being done for the pipeline upload. | `string` | `""` | no | +| [synapse\_publishers](#input\_synapse\_publishers) | List of Synapse Publishers | `map(string)` | `{}` | no | +| [synapse\_sku](#input\_synapse\_sku) | The sku/scale of the Synapse SQL Pool | `string` | `"DW100c"` | no | +| [synapse\_spark\_max\_node\_count](#input\_synapse\_spark\_max\_node\_count) | The maximum number of spark nodes in the autoscale pool | `number` | `12` | no | +| [synapse\_spark\_min\_node\_count](#input\_synapse\_spark\_min\_node\_count) | The minimum number of spark nodes in the autoscale pool | `number` | `3` | no | +| [synapse\_sppool\_name](#input\_synapse\_sppool\_name) | The override name for the Synapse spark pool component. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [synapse\_sql\_login](#input\_synapse\_sql\_login) | Login for the Azure Synapse SQL admin | `string` | `"adsgofastsynapseadminuser14"` | no | +| [synapse\_workspace\_name](#input\_synapse\_workspace\_name) | The override name for the Synapse workspace component. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [tenant\_id](#input\_tenant\_id) | The AAD tenant ID | `string` | n/a | yes | +| [vm\_nsg\_name](#input\_vm\_nsg\_name) | The override name for the VM subnet NSG. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [vm\_subnet\_cidr](#input\_vm\_subnet\_cidr) | CIDR of the subnet used to host VM compute resources | `string` | `"10.0.0.192/26"` | no | +| [vm\_subnet\_name](#input\_vm\_subnet\_name) | The override name for the vm subnet resource. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [vnet\_cidr](#input\_vnet\_cidr) | CIDR of the vnet | `string` | `"10.0.0.0/24"` | no | +| [vnet\_name](#input\_vnet\_name) | The override name for the Virtual Network resource. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [web\_app\_admin\_security\_group](#input\_web\_app\_admin\_security\_group) | A web app Azure security group used for admin access. | `string` | `""` | no | +| [webapp\_name](#input\_webapp\_name) | The override name for the web app service. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | + +## Outputs + +| Name | Description | +|------|-------------| +| [app\_service\_subnet\_id](#output\_app\_service\_subnet\_id) | n/a | +| [azurerm\_virtual\_network\_vnet\_name](#output\_azurerm\_virtual\_network\_vnet\_name) | n/a | +| [naming\_unique\_seed](#output\_naming\_unique\_seed) | n/a | +| [naming\_unique\_suffix](#output\_naming\_unique\_suffix) | n/a | +| [plink\_subnet\_id](#output\_plink\_subnet\_id) | n/a | +| [private\_dns\_zone\_blob\_id](#output\_private\_dns\_zone\_blob\_id) | n/a | +| [private\_dns\_zone\_db\_id](#output\_private\_dns\_zone\_db\_id) | n/a | +| [private\_dns\_zone\_dfs\_id](#output\_private\_dns\_zone\_dfs\_id) | n/a | +| [private\_dns\_zone\_kv\_id](#output\_private\_dns\_zone\_kv\_id) | n/a | +| [private\_dns\_zone\_purview\_id](#output\_private\_dns\_zone\_purview\_id) | n/a | +| [private\_dns\_zone\_purview\_studio\_id](#output\_private\_dns\_zone\_purview\_studio\_id) | n/a | +| [private\_dns\_zone\_queue\_id](#output\_private\_dns\_zone\_queue\_id) | n/a | +| [private\_dns\_zone\_servicebus\_id](#output\_private\_dns\_zone\_servicebus\_id) | n/a | +| [private\_dns\_zone\_synapse\_gateway\_id](#output\_private\_dns\_zone\_synapse\_gateway\_id) | n/a | +| [private\_dns\_zone\_synapse\_sql\_id](#output\_private\_dns\_zone\_synapse\_sql\_id) | n/a | +| [private\_dns\_zone\_synapse\_studio\_id](#output\_private\_dns\_zone\_synapse\_studio\_id) | n/a | +| [vm\_subnet\_id](#output\_vm\_subnet\_id) | n/a | diff --git a/solution/DeploymentV2/terraform_layer1/tformdocs.md b/solution/DeploymentV2/terraform_layer1/tformdocs.md new file mode 100644 index 00000000..36e2e4e9 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer1/tformdocs.md @@ -0,0 +1,91 @@ +## Requirements + +| Name | Version | +|------|---------| +| [azuread](#requirement\_azuread) | =2.22.0 | +| [azurerm](#requirement\_azurerm) | =3.12.0 | +| [random](#requirement\_random) | =3.3.0 | + +## Providers + +| Name | Version | +|------|---------| +| [azuread](#provider\_azuread) | 2.22.0 | +| [azurerm](#provider\_azurerm) | 3.12.0 | +| [random](#provider\_random) | 3.3.0 | +| [terraform](#provider\_terraform) | n/a | +| [time](#provider\_time) | 0.7.2 | + +## Modules + +| Name | Source | Version | +|------|--------|---------| +| [naming](#module\_naming) | Azure/naming/azurerm | 0.1.1 | + +## Resources + +| Name | Type | +|------|------| +| [azuread_application.function_app_reg](https://registry.terraform.io/providers/hashicorp/azuread/2.22.0/docs/resources/application) | resource | +| [azuread_application.purview_ir](https://registry.terraform.io/providers/hashicorp/azuread/2.22.0/docs/resources/application) | resource | +| [azuread_application.web_reg](https://registry.terraform.io/providers/hashicorp/azuread/2.22.0/docs/resources/application) | resource | +| [azuread_service_principal.function_app](https://registry.terraform.io/providers/hashicorp/azuread/2.22.0/docs/resources/service_principal) | resource | +| [azuread_service_principal.purview_ir](https://registry.terraform.io/providers/hashicorp/azuread/2.22.0/docs/resources/service_principal) | resource | +| [azuread_service_principal.web_sp](https://registry.terraform.io/providers/hashicorp/azuread/2.22.0/docs/resources/service_principal) | resource | +| [random_id.rg_deployment_unique](https://registry.terraform.io/providers/hashicorp/random/3.3.0/docs/resources/id) | resource | +| [random_uuid.app_reg_role_id](https://registry.terraform.io/providers/hashicorp/random/3.3.0/docs/resources/uuid) | resource | +| [random_uuid.app_reg_role_id2](https://registry.terraform.io/providers/hashicorp/random/3.3.0/docs/resources/uuid) | resource | +| [random_uuid.function_app_reg_role_id](https://registry.terraform.io/providers/hashicorp/random/3.3.0/docs/resources/uuid) | resource | +| [time_sleep.wait_30_seconds](https://registry.terraform.io/providers/hashicorp/time/latest/docs/resources/sleep) | resource | +| [azurerm_client_config.current](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/data-sources/client_config) | data source | +| [terraform_remote_state.layer0](https://registry.terraform.io/providers/hashicorp/terraform/latest/docs/data-sources/remote_state) | data source | + +## Inputs + +| Name | Description | Type | Default | Required | +|------|-------------|------|---------|:--------:| +| [aad\_functionapp\_name](#input\_aad\_functionapp\_name) | The override name for the AAD App registration for the function app. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [aad\_webapp\_name](#input\_aad\_webapp\_name) | The override name for the AAD App registration for the web app. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [app\_name](#input\_app\_name) | The app\_name suffix value to be used for autogenerated naming conventions | `string` | `"ads"` | no | +| [author\_tag](#input\_author\_tag) | The tags to apply to resources. | `string` | `"opensource.microsoft.com"` | no | +| [deploy\_azure\_ad\_function\_app\_registration](#input\_deploy\_azure\_ad\_function\_app\_registration) | Feature toggle for deploying the Azure AD App registration for the Function App | `bool` | `true` | no | +| [deploy\_azure\_ad\_web\_app\_registration](#input\_deploy\_azure\_ad\_web\_app\_registration) | Feature toggle for deploying the Azure AD App registration for the Web Portal | `bool` | `true` | no | +| [deploy\_function\_app](#input\_deploy\_function\_app) | Feature toggle for deploying the Function App | `bool` | `true` | no | +| [deploy\_purview](#input\_deploy\_purview) | Feature toggle for deploying Azure Purview | `bool` | `false` | no | +| [deploy\_web\_app](#input\_deploy\_web\_app) | Feature toggle for deploying the Web App | `bool` | `true` | no | +| [environment\_tag](#input\_environment\_tag) | The name of the environment. Don't use spaces | `string` | `"dev"` | no | +| [functionapp\_name](#input\_functionapp\_name) | The override name for the function app service resource. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [ip\_address](#input\_ip\_address) | The CICD ipaddress. We add an IP whitelisting to allow the setting of keyvault secrets | `string` | n/a | yes | +| [is\_vnet\_isolated](#input\_is\_vnet\_isolated) | Whether to deploy the resources as vnet attached / private linked | `bool` | `true` | no | +| [naming\_unique\_seed](#input\_naming\_unique\_seed) | The unique seed value to use for naming | `string` | `""` | no | +| [owner\_tag](#input\_owner\_tag) | The tags to apply to resources. | `string` | `"opensource.microsoft.com"` | no | +| [prefix](#input\_prefix) | The prefix value to be used for autogenerated naming conventions | `string` | `"ark"` | no | +| [purview\_ir\_app\_reg\_name](#input\_purview\_ir\_app\_reg\_name) | The override name for the Purview Integration runtime SP. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [purview\_name](#input\_purview\_name) | The override name for the Purview component. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [resource\_group\_name](#input\_resource\_group\_name) | n/a | `string` | n/a | yes | +| [resource\_location](#input\_resource\_location) | The Azure Region being deployed to. | `string` | `"Australia East"` | no | +| [resource\_owners](#input\_resource\_owners) | A web app Azure security group used for admin access. | `list(string)` | `[]` | no | +| [subscription\_id](#input\_subscription\_id) | The Azure Subscription ID | `string` | n/a | yes | +| [tenant\_id](#input\_tenant\_id) | The AAD tenant ID | `string` | n/a | yes | +| [webapp\_name](#input\_webapp\_name) | The override name for the web app service. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | + +## Outputs + +| Name | Description | +|------|-------------| +| [aad\_funcreg\_id](#output\_aad\_funcreg\_id) | n/a | +| [aad\_webreg\_id](#output\_aad\_webreg\_id) | n/a | +| [azuread\_application\_function\_app\_reg\_object\_id](#output\_azuread\_application\_function\_app\_reg\_object\_id) | n/a | +| [azuread\_application\_purview\_ir\_object\_id](#output\_azuread\_application\_purview\_ir\_object\_id) | n/a | +| [azuread\_service\_principal\_function\_app\_object\_id](#output\_azuread\_service\_principal\_function\_app\_object\_id) | n/a | +| [functionapp\_name](#output\_functionapp\_name) | n/a | +| [naming\_unique\_seed](#output\_naming\_unique\_seed) | n/a | +| [naming\_unique\_suffix](#output\_naming\_unique\_suffix) | n/a | +| [purview\_name](#output\_purview\_name) | n/a | +| [purview\_sp\_id](#output\_purview\_sp\_id) | n/a | +| [purview\_sp\_name](#output\_purview\_sp\_name) | n/a | +| [purview\_sp\_object\_id](#output\_purview\_sp\_object\_id) | n/a | +| [random\_uuid\_function\_app\_reg\_role\_id](#output\_random\_uuid\_function\_app\_reg\_role\_id) | n/a | +| [resource\_group\_name](#output\_resource\_group\_name) | n/a | +| [tenant\_id](#output\_tenant\_id) | n/a | +| [webapp\_name](#output\_webapp\_name) | n/a | diff --git a/solution/DeploymentV2/terraform_layer2/tformdocs.md b/solution/DeploymentV2/terraform_layer2/tformdocs.md new file mode 100644 index 00000000..5ebbfd69 --- /dev/null +++ b/solution/DeploymentV2/terraform_layer2/tformdocs.md @@ -0,0 +1,400 @@ +## Requirements + +| Name | Version | +|------|---------| +| [azuread](#requirement\_azuread) | =2.22.0 | +| [azurerm](#requirement\_azurerm) | =3.12.0 | +| [random](#requirement\_random) | =3.3.0 | + +## Providers + +| Name | Version | +|------|---------| +| [azurerm](#provider\_azurerm) | 3.12.0 | +| [random](#provider\_random) | 3.3.0 | +| [terraform](#provider\_terraform) | n/a | +| [time](#provider\_time) | 0.7.2 | + +## Modules + +| Name | Source | Version | +|------|--------|---------| +| [data\_factory\_datasets](#module\_data\_factory\_datasets) | ./modules/data_factory_datasets | n/a | +| [naming](#module\_naming) | Azure/naming/azurerm | 0.1.1 | + +## Resources + +| Name | Type | +|------|------| +| [azurerm_app_service.web](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/app_service) | resource | +| [azurerm_app_service_plan.app_service_plan](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/app_service_plan) | resource | +| [azurerm_app_service_virtual_network_swift_connection.vnet_integration](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/app_service_virtual_network_swift_connection) | resource | +| [azurerm_app_service_virtual_network_swift_connection.vnet_integration_func](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/app_service_virtual_network_swift_connection) | resource | +| [azurerm_application_insights.app_insights](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/application_insights) | resource | +| [azurerm_data_factory.data_factory](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/data_factory) | resource | +| [azurerm_data_factory_integration_runtime_azure.azure_ir](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/data_factory_integration_runtime_azure) | resource | +| [azurerm_data_factory_integration_runtime_self_hosted.self_hosted_ir](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/data_factory_integration_runtime_self_hosted) | resource | +| [azurerm_data_factory_linked_custom_service.blob](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/data_factory_linked_custom_service) | resource | +| [azurerm_data_factory_linked_custom_service.data_lake](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/data_factory_linked_custom_service) | resource | +| [azurerm_data_factory_linked_custom_service.database](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/data_factory_linked_custom_service) | resource | +| [azurerm_data_factory_linked_custom_service.file](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/data_factory_linked_custom_service) | resource | +| [azurerm_data_factory_linked_custom_service.generic_kv](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/data_factory_linked_custom_service) | resource | +| [azurerm_data_factory_linked_custom_service.mssqldatabase](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/data_factory_linked_custom_service) | resource | +| [azurerm_data_factory_linked_custom_service.mssqldatabase_sqlauth](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/data_factory_linked_custom_service) | resource | +| [azurerm_data_factory_linked_custom_service.oracledb](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/data_factory_linked_custom_service) | resource | +| [azurerm_data_factory_linked_custom_service.rest_anonymous](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/data_factory_linked_custom_service) | resource | +| [azurerm_data_factory_linked_custom_service.rest_basic](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/data_factory_linked_custom_service) | resource | +| [azurerm_data_factory_linked_custom_service.rest_oauth2](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/data_factory_linked_custom_service) | resource | +| [azurerm_data_factory_linked_custom_service.rest_serviceprincipal](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/data_factory_linked_custom_service) | resource | +| [azurerm_data_factory_linked_custom_service.synapse](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/data_factory_linked_custom_service) | resource | +| [azurerm_data_factory_linked_service_azure_function.function_app](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/data_factory_linked_service_azure_function) | resource | +| [azurerm_data_factory_linked_service_key_vault.key_vault_default](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/data_factory_linked_service_key_vault) | resource | +| [azurerm_data_factory_managed_private_endpoint.adls](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/data_factory_managed_private_endpoint) | resource | +| [azurerm_data_factory_managed_private_endpoint.blob](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/data_factory_managed_private_endpoint) | resource | +| [azurerm_data_factory_managed_private_endpoint.keyvault](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/data_factory_managed_private_endpoint) | resource | +| [azurerm_data_factory_managed_private_endpoint.sqlserver](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/data_factory_managed_private_endpoint) | resource | +| [azurerm_data_factory_managed_private_endpoint.synapse](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/data_factory_managed_private_endpoint) | resource | +| [azurerm_function_app.function_app](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/function_app) | resource | +| [azurerm_key_vault.app_vault](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/key_vault) | resource | +| [azurerm_key_vault_access_policy.adf_access](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/key_vault_access_policy) | resource | +| [azurerm_key_vault_access_policy.cicd_access_layers1and3](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/key_vault_access_policy) | resource | +| [azurerm_key_vault_access_policy.cicd_and_admin_access](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/key_vault_access_policy) | resource | +| [azurerm_key_vault_access_policy.function_app](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/key_vault_access_policy) | resource | +| [azurerm_key_vault_access_policy.synapse_access](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/key_vault_access_policy) | resource | +| [azurerm_key_vault_secret.function_app_key](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/key_vault_secret) | resource | +| [azurerm_key_vault_secret.selfhostedsql_password](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/key_vault_secret) | resource | +| [azurerm_linux_virtual_machine.custom_vm_linux](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/linux_virtual_machine) | resource | +| [azurerm_linux_virtual_machine.h2o-ai](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/linux_virtual_machine) | resource | +| [azurerm_log_analytics_solution.sentinel](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/log_analytics_solution) | resource | +| [azurerm_log_analytics_workspace.log_analytics_workspace](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/log_analytics_workspace) | resource | +| [azurerm_monitor_diagnostic_setting.adls_storage_diagnostic_logs](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/monitor_diagnostic_setting) | resource | +| [azurerm_monitor_diagnostic_setting.app_service_diagnostic_logs](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/monitor_diagnostic_setting) | resource | +| [azurerm_monitor_diagnostic_setting.app_vault_diagnostic_logs](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/monitor_diagnostic_setting) | resource | +| [azurerm_monitor_diagnostic_setting.blob_storage_diagnostic_logs](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/monitor_diagnostic_setting) | resource | +| [azurerm_monitor_diagnostic_setting.data_factory_diagnostic_logs](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/monitor_diagnostic_setting) | resource | +| [azurerm_monitor_diagnostic_setting.function_diagnostic_logs](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/monitor_diagnostic_setting) | resource | +| [azurerm_monitor_diagnostic_setting.synapse_diagnostic_logs](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/monitor_diagnostic_setting) | resource | +| [azurerm_mssql_database.sample_db](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/mssql_database) | resource | +| [azurerm_mssql_database.staging_db](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/mssql_database) | resource | +| [azurerm_mssql_database.web_db](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/mssql_database) | resource | +| [azurerm_mssql_server.sqlserver](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/mssql_server) | resource | +| [azurerm_network_interface.custom_vm_nic](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/network_interface) | resource | +| [azurerm_network_interface.h2o-ai_nic](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/network_interface) | resource | +| [azurerm_network_interface.jumphost_nic](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/network_interface) | resource | +| [azurerm_network_interface.selfhostedsql_nic](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/network_interface) | resource | +| [azurerm_private_endpoint.adls_dfs_storage_private_endpoint_with_dns](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/private_endpoint) | resource | +| [azurerm_private_endpoint.adls_storage_private_endpoint_with_dns](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/private_endpoint) | resource | +| [azurerm_private_endpoint.app_vault_private_endpoint_with_dns](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/private_endpoint) | resource | +| [azurerm_private_endpoint.blob_storage_private_endpoint_with_dns](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/private_endpoint) | resource | +| [azurerm_private_endpoint.db_private_endpoint_with_dns](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/private_endpoint) | resource | +| [azurerm_private_endpoint.storage_private_endpoint_with_dns](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/private_endpoint) | resource | +| [azurerm_private_endpoint.synapse_dev](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/private_endpoint) | resource | +| [azurerm_private_endpoint.synapse_sql](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/private_endpoint) | resource | +| [azurerm_private_endpoint.synapse_sqlondemand](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/private_endpoint) | resource | +| [azurerm_private_endpoint.synapse_web](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/private_endpoint) | resource | +| [azurerm_public_ip.selfhostedsql](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/public_ip) | resource | +| [azurerm_purview_account.purview](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/purview_account) | resource | +| [azurerm_role_assignment.adls_data_factory](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/role_assignment) | resource | +| [azurerm_role_assignment.adls_deployment_agents](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/role_assignment) | resource | +| [azurerm_role_assignment.adls_function_app](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/role_assignment) | resource | +| [azurerm_role_assignment.adls_purview_sp](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/role_assignment) | resource | +| [azurerm_role_assignment.app_insights_function_app](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/role_assignment) | resource | +| [azurerm_role_assignment.app_insights_web_app](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/role_assignment) | resource | +| [azurerm_role_assignment.blob_data_factory](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/role_assignment) | resource | +| [azurerm_role_assignment.blob_deployment_agents](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/role_assignment) | resource | +| [azurerm_role_assignment.blob_function_app](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/role_assignment) | resource | +| [azurerm_role_assignment.blob_function_app_sec](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/role_assignment) | resource | +| [azurerm_role_assignment.blob_purview_sp](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/role_assignment) | resource | +| [azurerm_role_assignment.datafactory_function_app](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/role_assignment) | resource | +| [azurerm_role_assignment.loganalytics_function_app](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/role_assignment) | resource | +| [azurerm_role_assignment.loganalytics_web_app](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/role_assignment) | resource | +| [azurerm_role_assignment.synapse](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/role_assignment) | resource | +| [azurerm_role_assignment.synapse_function_app](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/role_assignment) | resource | +| [azurerm_storage_account.adls](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/storage_account) | resource | +| [azurerm_storage_account.blob](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/storage_account) | resource | +| [azurerm_storage_account.storage_acccount_security_logs](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/storage_account) | resource | +| [azurerm_storage_data_lake_gen2_filesystem.dlfs](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/storage_data_lake_gen2_filesystem) | resource | +| [azurerm_synapse_firewall_rule.cicd](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/synapse_firewall_rule) | resource | +| [azurerm_synapse_firewall_rule.cicd_user](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/synapse_firewall_rule) | resource | +| [azurerm_synapse_firewall_rule.public_access](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/synapse_firewall_rule) | resource | +| [azurerm_synapse_linked_service.synapse_functionapp_linkedservice](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/synapse_linked_service) | resource | +| [azurerm_synapse_linked_service.synapse_keyvault_linkedservice](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/synapse_linked_service) | resource | +| [azurerm_synapse_managed_private_endpoint.adls](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/synapse_managed_private_endpoint) | resource | +| [azurerm_synapse_private_link_hub.hub](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/synapse_private_link_hub) | resource | +| [azurerm_synapse_role_assignment.synapse_admin_assignments](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/synapse_role_assignment) | resource | +| [azurerm_synapse_role_assignment.synapse_contributor_assignments](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/synapse_role_assignment) | resource | +| [azurerm_synapse_role_assignment.synapse_function_app_assignment](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/synapse_role_assignment) | resource | +| [azurerm_synapse_role_assignment.synapse_publisher_assignments](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/synapse_role_assignment) | resource | +| [azurerm_synapse_spark_pool.synapse_spark_pool](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/synapse_spark_pool) | resource | +| [azurerm_synapse_sql_pool.synapse_sql_pool](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/synapse_sql_pool) | resource | +| [azurerm_synapse_workspace.synapse](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/synapse_workspace) | resource | +| [azurerm_windows_virtual_machine.custom_vm_windows](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/windows_virtual_machine) | resource | +| [azurerm_windows_virtual_machine.jumphost](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/windows_virtual_machine) | resource | +| [azurerm_windows_virtual_machine.selfhostedsqlvm](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/windows_virtual_machine) | resource | +| [random_id.rg_deployment_unique](https://registry.terraform.io/providers/hashicorp/random/3.3.0/docs/resources/id) | resource | +| [random_password.custom_vm](https://registry.terraform.io/providers/hashicorp/random/3.3.0/docs/resources/password) | resource | +| [random_password.database](https://registry.terraform.io/providers/hashicorp/random/3.3.0/docs/resources/password) | resource | +| [random_password.h2o-ai](https://registry.terraform.io/providers/hashicorp/random/3.3.0/docs/resources/password) | resource | +| [random_password.selfhostedsql](https://registry.terraform.io/providers/hashicorp/random/3.3.0/docs/resources/password) | resource | +| [time_sleep.azurerm_synapse_firewall_rule_wait_30_seconds_cicd](https://registry.terraform.io/providers/hashicorp/time/latest/docs/resources/sleep) | resource | +| [time_sleep.cicd_access](https://registry.terraform.io/providers/hashicorp/time/latest/docs/resources/sleep) | resource | +| [azurerm_client_config.current](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/data-sources/client_config) | data source | +| [azurerm_function_app_host_keys.function_app_host_key](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/data-sources/function_app_host_keys) | data source | +| [terraform_remote_state.layer0](https://registry.terraform.io/providers/hashicorp/terraform/latest/docs/data-sources/remote_state) | data source | +| [terraform_remote_state.layer1](https://registry.terraform.io/providers/hashicorp/terraform/latest/docs/data-sources/remote_state) | data source | + +## Inputs + +| Name | Description | Type | Default | Required | +|------|-------------|------|---------|:--------:| +| [aad\_functionapp\_name](#input\_aad\_functionapp\_name) | The override name for the AAD App registration for the function app. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [aad\_webapp\_name](#input\_aad\_webapp\_name) | The override name for the AAD App registration for the web app. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [adf\_git\_email\_address](#input\_adf\_git\_email\_address) | The email address to be associated with the commit being done for the pipeline upload. | `string` | `""` | no | +| [adf\_git\_host\_url](#input\_adf\_git\_host\_url) | Specifies the GitHub Enterprise host name. For example: https://github.mydomain.com. Use https://github.com for open source repositories. | `string` | `"https://github.com"` | no | +| [adf\_git\_pat](#input\_adf\_git\_pat) | The personal access token used to authenticate the git account | `string` | `""` | no | +| [adf\_git\_repository\_branch\_name](#input\_adf\_git\_repository\_branch\_name) | The name of the github branch to be used | `string` | `"main"` | no | +| [adf\_git\_repository\_name](#input\_adf\_git\_repository\_name) | The name of the github repository to be used for synapse | `string` | `""` | no | +| [adf\_git\_repository\_owner](#input\_adf\_git\_repository\_owner) | The owner of the github repository to be used for adf. Eg. for the repository https://github.com/contoso/ads, the owner is contoso | `string` | `""` | no | +| [adf\_git\_repository\_root\_folder](#input\_adf\_git\_repository\_root\_folder) | The name of the root folder to be used in the branch | `string` | `"/"` | no | +| [adf\_git\_toggle\_integration](#input\_adf\_git\_toggle\_integration) | Feature toggle for enabling adf github integration | `bool` | `false` | no | +| [adf\_git\_use\_pat](#input\_adf\_git\_use\_pat) | Whether a pat is required for authentication (non public repo). | `bool` | `true` | no | +| [adf\_git\_user\_name](#input\_adf\_git\_user\_name) | The user name to be associated with the commit being done for the pipeline upload. | `string` | `""` | no | +| [adls\_storage\_account\_name](#input\_adls\_storage\_account\_name) | The override name for the storage account used for adls. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [allow\_public\_access\_to\_synapse\_studio](#input\_allow\_public\_access\_to\_synapse\_studio) | Should the synapse studio allow access to public IPs | `bool` | `false` | no | +| [app\_insights\_name](#input\_app\_insights\_name) | The override name for the app insights resource. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [app\_name](#input\_app\_name) | The app\_name suffix value to be used for autogenerated naming conventions | `string` | `"ads"` | no | +| [app\_service\_nsg\_name](#input\_app\_service\_nsg\_name) | The override name for the app service subnet NSG. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [app\_service\_plan\_name](#input\_app\_service\_plan\_name) | The override name for the app service plan resource. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [app\_service\_sku](#input\_app\_service\_sku) | The sku/scale of the app service |
object({
tier = string
size = string
capacity = number })
|
{
"capacity": 1,
"size": "S1",
"tier": "Standard"
}
| no | +| [app\_service\_subnet\_cidr](#input\_app\_service\_subnet\_cidr) | CIDR of the subnet used to host the app service plan | `string` | `"10.0.0.128/26"` | no | +| [app\_service\_subnet\_name](#input\_app\_service\_subnet\_name) | The override name for the app service subnet resource. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [author\_tag](#input\_author\_tag) | The tags to apply to resources. | `string` | `"opensource.microsoft.com"` | no | +| [azure\_purview\_data\_curators](#input\_azure\_purview\_data\_curators) | List of Azure Purview Data Curators for default root | `map(string)` | `{}` | no | +| [azure\_sql\_aad\_administrators](#input\_azure\_sql\_aad\_administrators) | List of Azure SQL Administrators | `map(string)` | `{}` | no | +| [bastion\_ip\_name](#input\_bastion\_ip\_name) | The override name for the Bastion service Public IP. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [bastion\_name](#input\_bastion\_name) | The override name for the Bastion service. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [bastion\_nsg\_name](#input\_bastion\_nsg\_name) | The override name for the bastion subnet NSG. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [bastion\_subnet\_cidr](#input\_bastion\_subnet\_cidr) | CIDR of the subnet used for bastion | `string` | `"10.0.0.64/26"` | no | +| [blob\_storage\_account\_name](#input\_blob\_storage\_account\_name) | The override name for the storage account used for staging data. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [cicd\_sp\_id](#input\_cicd\_sp\_id) | The Object Id of the GitHub Service Principal. This will ensure that keyvault access policies are configured for GitHub/terraform to read secret state later | `string` | `""` | no | +| [configure\_networking](#input\_configure\_networking) | Feature toggle for post IAC network configuration | `bool` | `true` | no | +| [custom\_vm\_image\_offer](#input\_custom\_vm\_image\_offer) | An Azure custom image marketplace image offer to be referenced for a custom vm image. | `string` | `""` | no | +| [custom\_vm\_image\_publisher](#input\_custom\_vm\_image\_publisher) | An Azure custom image marketplace image publisher to be referenced for a custom vm image. | `string` | `""` | no | +| [custom\_vm\_image\_sku](#input\_custom\_vm\_image\_sku) | An Azure custom image marketplace image sku to be referenced for a custom vm image. | `string` | `""` | no | +| [custom\_vm\_image\_version](#input\_custom\_vm\_image\_version) | An Azure custom image marketplace image version to be referenced for a custom vm image. | `string` | `"latest"` | no | +| [custom\_vm\_os](#input\_custom\_vm\_os) | User must define whether they wish deploy a 'windows' or 'linux' virtual machine. | `string` | `"linux"` | no | +| [custom\_vm\_plan\_name](#input\_custom\_vm\_plan\_name) | An Azure vm plan name to be referenced for a custom vm image. | `string` | `""` | no | +| [custom\_vm\_plan\_product](#input\_custom\_vm\_plan\_product) | An Azure vm plan product to be referenced for a custom vm image. | `string` | `""` | no | +| [custom\_vm\_plan\_publisher](#input\_custom\_vm\_plan\_publisher) | An Azure vm plan publisher to be referenced for a custom vm image. | `string` | `""` | no | +| [data\_factory\_name](#input\_data\_factory\_name) | The override name for the Data Factory component. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [delay\_private\_access](#input\_delay\_private\_access) | Whether to create resoruces with public access enabled and then disable it at the end. | `bool` | `true` | no | +| [deploy\_adls](#input\_deploy\_adls) | Feature toggle for deploying the internal data lake | `bool` | `true` | no | +| [deploy\_app\_insights](#input\_deploy\_app\_insights) | Feature toggle for deploying the App Insights | `bool` | `true` | no | +| [deploy\_app\_service\_plan](#input\_deploy\_app\_service\_plan) | Feature toggle for deploying the App Service | `bool` | `true` | no | +| [deploy\_azure\_ad\_function\_app\_registration](#input\_deploy\_azure\_ad\_function\_app\_registration) | Feature toggle for deploying the Azure AD App registration for the Function App | `bool` | `true` | no | +| [deploy\_azure\_ad\_web\_app\_registration](#input\_deploy\_azure\_ad\_web\_app\_registration) | Feature toggle for deploying the Azure AD App registration for the Web Portal | `bool` | `true` | no | +| [deploy\_azure\_role\_assignments](#input\_deploy\_azure\_role\_assignments) | Feature toggle for deploying the Azure Role Assignments | `bool` | `true` | no | +| [deploy\_bastion](#input\_deploy\_bastion) | Feature toggle for deploying bastion | `bool` | `true` | no | +| [deploy\_custom\_terraform](#input\_deploy\_custom\_terraform) | Whether the platform deploys the infrastructure located in the terraform\_custom folder | `bool` | `false` | no | +| [deploy\_custom\_vm](#input\_deploy\_custom\_vm) | Feature toggle for deploying a custom virtual machine | `bool` | `false` | no | +| [deploy\_data\_factory](#input\_deploy\_data\_factory) | Feature toggle for deploying the Azure Data Factory | `bool` | `true` | no | +| [deploy\_function\_app](#input\_deploy\_function\_app) | Feature toggle for deploying the Function App | `bool` | `true` | no | +| [deploy\_h2o-ai](#input\_deploy\_h2o-ai) | Feature toggle for deploying H2O-AI VM | `bool` | `false` | no | +| [deploy\_metadata\_database](#input\_deploy\_metadata\_database) | Feature toggle for deploying Metadata Database | `bool` | `true` | no | +| [deploy\_purview](#input\_deploy\_purview) | Feature toggle for deploying Azure Purview | `bool` | `false` | no | +| [deploy\_selfhostedsql](#input\_deploy\_selfhostedsql) | Feature toggle for deploying Self Hosted Sql VM | `bool` | `false` | no | +| [deploy\_sentinel](#input\_deploy\_sentinel) | Feature toggle for deploying Azure Sentinel | `bool` | `false` | no | +| [deploy\_sql\_extend\_audit\_policy](#input\_deploy\_sql\_extend\_audit\_policy) | Feature toggle for deploying the SQL Server Extended Audit policy | `bool` | `true` | no | +| [deploy\_sql\_server](#input\_deploy\_sql\_server) | Feature toggle for deploying the SQL Server | `bool` | `true` | no | +| [deploy\_storage\_account](#input\_deploy\_storage\_account) | Feature toggle for deploying the internal storage account | `bool` | `true` | no | +| [deploy\_synapse](#input\_deploy\_synapse) | Feature toggle for deploying Azure Synapse | `bool` | `false` | no | +| [deploy\_synapse\_sparkpool](#input\_deploy\_synapse\_sparkpool) | Feature toggle for deploying Azure Synapse Spark Pool | `bool` | `true` | no | +| [deploy\_synapse\_sqlpool](#input\_deploy\_synapse\_sqlpool) | Feature toggle for deploying Azure Synapse SQL Pool | `bool` | `true` | no | +| [deploy\_web\_app](#input\_deploy\_web\_app) | Feature toggle for deploying the Web App | `bool` | `true` | no | +| [deployment\_principal\_layers1and3](#input\_deployment\_principal\_layers1and3) | Object Id of the azure account that will deploy layers 1 & 3. If it is the same as the layer 2 user then leave as empty string. | `string` | `""` | no | +| [domain](#input\_domain) | The AAD domain | `string` | n/a | yes | +| [environment\_tag](#input\_environment\_tag) | The name of the environment. Don't use spaces | `string` | `"dev"` | no | +| [existing\_app\_service\_subnet\_id](#input\_existing\_app\_service\_subnet\_id) | An existing subnet id for reuse for the App Service delegation | `string` | `""` | no | +| [existing\_bastion\_subnet\_id](#input\_existing\_bastion\_subnet\_id) | An existing subnet id for reuse for the Bastion host | `string` | `""` | no | +| [existing\_log\_analytics\_resource\_id](#input\_existing\_log\_analytics\_resource\_id) | An existing log analytics resource id for reuse | `string` | `""` | no | +| [existing\_log\_analytics\_workspace\_id](#input\_existing\_log\_analytics\_workspace\_id) | An existing log analytics workspace id for reuse | `string` | `""` | no | +| [existing\_plink\_subnet\_id](#input\_existing\_plink\_subnet\_id) | An existing subnet id for reuse for the Private link resources | `string` | `""` | no | +| [existing\_private\_dns\_zone\_blob\_id](#input\_existing\_private\_dns\_zone\_blob\_id) | An existing private DNS zone for privatelink.blob.core.azure.net | `string` | `""` | no | +| [existing\_private\_dns\_zone\_db\_id](#input\_existing\_private\_dns\_zone\_db\_id) | An existing private DNS zone for privatelink.database.windows.net | `string` | `""` | no | +| [existing\_private\_dns\_zone\_dfs\_id](#input\_existing\_private\_dns\_zone\_dfs\_id) | An existing private DNS zone for privatelink.dfs.core.azure.net | `string` | `""` | no | +| [existing\_private\_dns\_zone\_kv\_id](#input\_existing\_private\_dns\_zone\_kv\_id) | An existing private DNS zone for privatelink.vaultcore.azure.net | `string` | `""` | no | +| [existing\_private\_dns\_zone\_purview\_id](#input\_existing\_private\_dns\_zone\_purview\_id) | An existing private DNS zone for privatelink.purview.azure.net | `string` | `""` | no | +| [existing\_private\_dns\_zone\_purview\_studio\_id](#input\_existing\_private\_dns\_zone\_purview\_studio\_id) | An existing private DNS zone for privatelink.purviewstudio.azure.net | `string` | `""` | no | +| [existing\_private\_dns\_zone\_queue\_id](#input\_existing\_private\_dns\_zone\_queue\_id) | An existing private DNS zone for privatelink.queue.core.azure.net | `string` | `""` | no | +| [existing\_private\_dns\_zone\_servicebus\_id](#input\_existing\_private\_dns\_zone\_servicebus\_id) | An existing private DNS zone for privatelink.servicebus.windows.net | `string` | `""` | no | +| [existing\_private\_dns\_zone\_synapse\_gateway\_id](#input\_existing\_private\_dns\_zone\_synapse\_gateway\_id) | An existing private DNS zone for privatelink.azuresynapse.net | `string` | `""` | no | +| [existing\_private\_dns\_zone\_synapse\_sql\_id](#input\_existing\_private\_dns\_zone\_synapse\_sql\_id) | An existing private DNS zone for privatelink.sql.azuresynapse.net | `string` | `""` | no | +| [existing\_private\_dns\_zone\_synapse\_studio\_id](#input\_existing\_private\_dns\_zone\_synapse\_studio\_id) | An existing private DNS zone for privatelink.dev.azuresynapse.net | `string` | `""` | no | +| [existing\_synapse\_private\_link\_hub\_id](#input\_existing\_synapse\_private\_link\_hub\_id) | An existing private link hub for synapse studio. | `string` | `""` | no | +| [existing\_vm\_subnet\_id](#input\_existing\_vm\_subnet\_id) | An existing subnet id for reuse for the Agent VMs | `string` | `""` | no | +| [functionapp\_name](#input\_functionapp\_name) | The override name for the function app service resource. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [ip\_address](#input\_ip\_address) | The CICD ipaddress. We add an IP whitelisting to allow the setting of keyvault secrets | `string` | `""` | no | +| [ip\_address2](#input\_ip\_address2) | The CICD ipaddress. We add an IP whitelisting to allow the setting of keyvault secrets | `string` | `""` | no | +| [is\_onprem\_datafactory\_ir\_registered](#input\_is\_onprem\_datafactory\_ir\_registered) | Are all on-premise Integration runtimes configured? | `bool` | `false` | no | +| [is\_vnet\_isolated](#input\_is\_vnet\_isolated) | Whether to deploy the resources as vnet attached / private linked | `bool` | `true` | no | +| [jumphost\_password](#input\_jumphost\_password) | Password for the jumphost | `string` | n/a | yes | +| [key\_vault\_name](#input\_key\_vault\_name) | The override name for the keyvault resource. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [log\_analytics\_workspace\_name](#input\_log\_analytics\_workspace\_name) | The override name for the Log Analytics workspace. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [logs\_storage\_account\_name](#input\_logs\_storage\_account\_name) | The override name for the storage account used for logs. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [owner\_tag](#input\_owner\_tag) | The tags to apply to resources. | `string` | `"opensource.microsoft.com"` | no | +| [plink\_nsg\_name](#input\_plink\_nsg\_name) | The override name for the private link subnet NSG. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [plink\_subnet\_cidr](#input\_plink\_subnet\_cidr) | CIDR of the subnet used for private link endpoints | `string` | `"10.0.0.0/26"` | no | +| [plink\_subnet\_name](#input\_plink\_subnet\_name) | The override name for the private link subnet resource. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [prefix](#input\_prefix) | The prefix value to be used for autogenerated naming conventions | `string` | `"ark"` | no | +| [publish\_datafactory\_pipelines](#input\_publish\_datafactory\_pipelines) | Feature toggle for post IAC data factory pipeline deployment | `bool` | `true` | no | +| [publish\_function\_app](#input\_publish\_function\_app) | Feature toggle for Publishing Function Application Code Base | `bool` | `true` | no | +| [publish\_functional\_tests](#input\_publish\_functional\_tests) | Feature toggle for Publishing Functional Tests to the Web App | `bool` | `false` | no | +| [publish\_metadata\_database](#input\_publish\_metadata\_database) | Feature toggle for Publishing Metadata Database schema and seeding with data | `bool` | `true` | no | +| [publish\_purview\_configuration](#input\_publish\_purview\_configuration) | Feature toggle for deploying the Purview configuration script (WIP) | `bool` | `false` | no | +| [publish\_sample\_files](#input\_publish\_sample\_files) | Feature toggle for Publishing Sample Filess | `bool` | `true` | no | +| [publish\_sif\_database](#input\_publish\_sif\_database) | Feature toggle for Publishing SIF Database | `bool` | `false` | no | +| [publish\_sql\_logins](#input\_publish\_sql\_logins) | Feature toggle for Publishing Synapse / SQL database logins for lockbox | `bool` | `true` | no | +| [publish\_web\_app](#input\_publish\_web\_app) | Feature toggle for Publishing Web Application Code Base | `bool` | `true` | no | +| [publish\_web\_app\_addcurrentuserasadmin](#input\_publish\_web\_app\_addcurrentuserasadmin) | Feature toggle for adding user running deployment as a webapp admin | `bool` | `false` | no | +| [purview\_ir\_app\_reg\_name](#input\_purview\_ir\_app\_reg\_name) | The override name for the Purview Integration runtime SP. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [purview\_name](#input\_purview\_name) | The override name for the Purview component. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [purview\_resource\_location](#input\_purview\_resource\_location) | The override location for the Purview component. If empty, will be autogenerated based global location settings | `string` | `""` | no | +| [resource\_group\_name](#input\_resource\_group\_name) | n/a | `string` | n/a | yes | +| [resource\_location](#input\_resource\_location) | The Azure Region being deployed to. | `string` | `"Australia East"` | no | +| [resource\_owners](#input\_resource\_owners) | A web app Azure security group used for admin access. | `list(string)` | `[]` | no | +| [sif\_database\_name](#input\_sif\_database\_name) | SIF DataMart Name | `string` | `"sif"` | no | +| [sql\_admin\_username](#input\_sql\_admin\_username) | The username for the sql server admin | `string` | `"adsgofastsqladminuser11"` | no | +| [sql\_server\_name](#input\_sql\_server\_name) | The override name for the sql server resource. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [subscription\_id](#input\_subscription\_id) | The Azure Subscription ID | `string` | n/a | yes | +| [synapse\_administrators](#input\_synapse\_administrators) | List of Synapse Administrators | `map(string)` | `{}` | no | +| [synapse\_contributors](#input\_synapse\_contributors) | List of Synapse Contributors | `map(string)` | `{}` | no | +| [synapse\_data\_lake\_name](#input\_synapse\_data\_lake\_name) | The override name for the Synapse data lake component. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [synapse\_dwpool\_name](#input\_synapse\_dwpool\_name) | The override name for the Synapse Dedicated Pool component. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [synapse\_git\_devops\_project\_name](#input\_synapse\_git\_devops\_project\_name) | The name of the project to be referenced within devops. Note: Not used for github. | `string` | `"/"` | no | +| [synapse\_git\_devops\_tenant\_id](#input\_synapse\_git\_devops\_tenant\_id) | The tenant id of the devops project. By default it will be valued as your tenant\_id. Note: Not used for github. | `string` | `""` | no | +| [synapse\_git\_email\_address](#input\_synapse\_git\_email\_address) | The email address to be associated with the commit being done for the pipeline upload. | `string` | `""` | no | +| [synapse\_git\_github\_host\_url](#input\_synapse\_git\_github\_host\_url) | Specifies the GitHub Enterprise host name. For example: https://github.mydomain.com. Use https://github.com for open source repositories. Note: Not used for devops | `string` | `"https://github.com"` | no | +| [synapse\_git\_integration\_type](#input\_synapse\_git\_integration\_type) | User must define whether they wish to use 'github' integration or 'devops' | `string` | `"github"` | no | +| [synapse\_git\_pat](#input\_synapse\_git\_pat) | The personal access token used to authenticate the git account | `string` | `""` | no | +| [synapse\_git\_repository\_branch\_name](#input\_synapse\_git\_repository\_branch\_name) | The name of the github branch to be used | `string` | `"main"` | no | +| [synapse\_git\_repository\_name](#input\_synapse\_git\_repository\_name) | The name of the github repository to be used for synapse | `string` | `""` | no | +| [synapse\_git\_repository\_owner](#input\_synapse\_git\_repository\_owner) | The owner of the github repository to be used for synapse. Eg. for the repository https://github.com/contoso/ads, the owner is contoso | `string` | `""` | no | +| [synapse\_git\_repository\_root\_folder](#input\_synapse\_git\_repository\_root\_folder) | The name of the root folder to be used in the branch | `string` | `"/"` | no | +| [synapse\_git\_toggle\_integration](#input\_synapse\_git\_toggle\_integration) | Feature toggle for enabling synapse github integration | `bool` | `false` | no | +| [synapse\_git\_use\_pat](#input\_synapse\_git\_use\_pat) | Whether a pat is required for authentication (non public repo). | `bool` | `true` | no | +| [synapse\_git\_user\_name](#input\_synapse\_git\_user\_name) | The user name to be associated with the commit being done for the pipeline upload. | `string` | `""` | no | +| [synapse\_publishers](#input\_synapse\_publishers) | List of Synapse Publishers | `map(string)` | `{}` | no | +| [synapse\_sku](#input\_synapse\_sku) | The sku/scale of the Synapse SQL Pool | `string` | `"DW100c"` | no | +| [synapse\_spark\_max\_node\_count](#input\_synapse\_spark\_max\_node\_count) | The maximum number of spark nodes in the autoscale pool | `number` | `12` | no | +| [synapse\_spark\_min\_node\_count](#input\_synapse\_spark\_min\_node\_count) | The minimum number of spark nodes in the autoscale pool | `number` | `3` | no | +| [synapse\_sppool\_name](#input\_synapse\_sppool\_name) | The override name for the Synapse spark pool component. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [synapse\_sql\_login](#input\_synapse\_sql\_login) | Login for the Azure Synapse SQL admin | `string` | `"adsgofastsynapseadminuser14"` | no | +| [synapse\_sql\_password](#input\_synapse\_sql\_password) | Password for the Azure Synapse SQL admin | `string` | n/a | yes | +| [synapse\_workspace\_name](#input\_synapse\_workspace\_name) | The override name for the Synapse workspace component. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [tenant\_id](#input\_tenant\_id) | The AAD tenant ID | `string` | n/a | yes | +| [vm\_nsg\_name](#input\_vm\_nsg\_name) | The override name for the VM subnet NSG. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [vm\_subnet\_cidr](#input\_vm\_subnet\_cidr) | CIDR of the subnet used to host VM compute resources | `string` | `"10.0.0.192/26"` | no | +| [vm\_subnet\_name](#input\_vm\_subnet\_name) | The override name for the vm subnet resource. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [vnet\_cidr](#input\_vnet\_cidr) | CIDR of the vnet | `string` | `"10.0.0.0/24"` | no | +| [vnet\_name](#input\_vnet\_name) | The override name for the Virtual Network resource. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [web\_app\_admin\_security\_group](#input\_web\_app\_admin\_security\_group) | A web app Azure security group used for admin access. | `string` | `""` | no | +| [webapp\_name](#input\_webapp\_name) | The override name for the web app service. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | + +## Outputs + +| Name | Description | +|------|-------------| +| [aad\_funcreg\_id](#output\_aad\_funcreg\_id) | n/a | +| [aad\_webreg\_id](#output\_aad\_webreg\_id) | n/a | +| [adf\_git\_email\_address](#output\_adf\_git\_email\_address) | n/a | +| [adf\_git\_host\_url](#output\_adf\_git\_host\_url) | n/a | +| [adf\_git\_pat](#output\_adf\_git\_pat) | n/a | +| [adf\_git\_repository\_branch\_name](#output\_adf\_git\_repository\_branch\_name) | n/a | +| [adf\_git\_repository\_name](#output\_adf\_git\_repository\_name) | n/a | +| [adf\_git\_repository\_owner](#output\_adf\_git\_repository\_owner) | n/a | +| [adf\_git\_repository\_root\_folder](#output\_adf\_git\_repository\_root\_folder) | n/a | +| [adf\_git\_toggle\_integration](#output\_adf\_git\_toggle\_integration) | n/a | +| [adf\_git\_use\_pat](#output\_adf\_git\_use\_pat) | n/a | +| [adf\_git\_user\_name](#output\_adf\_git\_user\_name) | n/a | +| [adlsstorage\_name](#output\_adlsstorage\_name) | n/a | +| [azure\_purview\_data\_curators](#output\_azure\_purview\_data\_curators) | n/a | +| [azure\_sql\_aad\_administrators](#output\_azure\_sql\_aad\_administrators) | n/a | +| [azuread\_application\_function\_app\_reg\_object\_id](#output\_azuread\_application\_function\_app\_reg\_object\_id) | n/a | +| [azuread\_application\_purview\_ir\_object\_id](#output\_azuread\_application\_purview\_ir\_object\_id) | n/a | +| [azuread\_service\_principal\_function\_app\_object\_id](#output\_azuread\_service\_principal\_function\_app\_object\_id) | n/a | +| [azurerm\_function\_app\_identity\_principal\_id](#output\_azurerm\_function\_app\_identity\_principal\_id) | n/a | +| [azurerm\_key\_vault\_app\_vault\_id](#output\_azurerm\_key\_vault\_app\_vault\_id) | n/a | +| [azurerm\_purview\_account\_purview\_id](#output\_azurerm\_purview\_account\_purview\_id) | n/a | +| [azurerm\_virtual\_network\_vnet\_name](#output\_azurerm\_virtual\_network\_vnet\_name) | n/a | +| [blobstorage\_name](#output\_blobstorage\_name) | n/a | +| [configure\_networking](#output\_configure\_networking) | n/a | +| [datafactory\_name](#output\_datafactory\_name) | n/a | +| [datafactory\_principal\_id](#output\_datafactory\_principal\_id) | n/a | +| [deploy\_custom\_terraform](#output\_deploy\_custom\_terraform) | n/a | +| [deploy\_function\_app](#output\_deploy\_function\_app) | n/a | +| [deploy\_metadata\_database](#output\_deploy\_metadata\_database) | n/a | +| [deploy\_sql\_server](#output\_deploy\_sql\_server) | n/a | +| [deploy\_synapse](#output\_deploy\_synapse) | n/a | +| [deploy\_web\_app](#output\_deploy\_web\_app) | n/a | +| [function\_app\_principal\_id](#output\_function\_app\_principal\_id) | n/a | +| [functionapp\_name](#output\_functionapp\_name) | n/a | +| [functionapp\_url](#output\_functionapp\_url) | n/a | +| [integration\_runtimes](#output\_integration\_runtimes) | n/a | +| [is\_onprem\_datafactory\_ir\_registered](#output\_is\_onprem\_datafactory\_ir\_registered) | n/a | +| [is\_vnet\_isolated](#output\_is\_vnet\_isolated) | n/a | +| [jumphost\_vm\_name](#output\_jumphost\_vm\_name) | n/a | +| [keyvault\_name](#output\_keyvault\_name) | n/a | +| [keyvault\_url](#output\_keyvault\_url) | n/a | +| [loganalyticsworkspace\_id](#output\_loganalyticsworkspace\_id) | n/a | +| [metadatadb\_name](#output\_metadatadb\_name) | n/a | +| [naming\_unique\_seed](#output\_naming\_unique\_seed) | n/a | +| [naming\_unique\_suffix](#output\_naming\_unique\_suffix) | n/a | +| [plink\_subnet\_id](#output\_plink\_subnet\_id) | n/a | +| [private\_dns\_zone\_blob\_id](#output\_private\_dns\_zone\_blob\_id) | n/a | +| [private\_dns\_zone\_dfs\_id](#output\_private\_dns\_zone\_dfs\_id) | n/a | +| [private\_dns\_zone\_purview\_id](#output\_private\_dns\_zone\_purview\_id) | n/a | +| [private\_dns\_zone\_purview\_studio\_id](#output\_private\_dns\_zone\_purview\_studio\_id) | n/a | +| [private\_dns\_zone\_queue\_id](#output\_private\_dns\_zone\_queue\_id) | n/a | +| [private\_dns\_zone\_servicebus\_id](#output\_private\_dns\_zone\_servicebus\_id) | n/a | +| [publish\_datafactory\_pipelines](#output\_publish\_datafactory\_pipelines) | n/a | +| [publish\_function\_app](#output\_publish\_function\_app) | n/a | +| [publish\_functional\_tests](#output\_publish\_functional\_tests) | n/a | +| [publish\_metadata\_database](#output\_publish\_metadata\_database) | n/a | +| [publish\_purview\_configuration](#output\_publish\_purview\_configuration) | n/a | +| [publish\_sample\_files](#output\_publish\_sample\_files) | n/a | +| [publish\_sif\_database](#output\_publish\_sif\_database) | n/a | +| [publish\_sql\_logins](#output\_publish\_sql\_logins) | n/a | +| [publish\_web\_app](#output\_publish\_web\_app) | n/a | +| [publish\_web\_app\_addcurrentuserasadmin](#output\_publish\_web\_app\_addcurrentuserasadmin) | n/a | +| [purview\_account\_principal\_id](#output\_purview\_account\_principal\_id) | n/a | +| [purview\_name](#output\_purview\_name) | n/a | +| [purview\_sp\_id](#output\_purview\_sp\_id) | n/a | +| [purview\_sp\_name](#output\_purview\_sp\_name) | n/a | +| [purview\_sp\_object\_id](#output\_purview\_sp\_object\_id) | n/a | +| [random\_uuid\_function\_app\_reg\_role\_id](#output\_random\_uuid\_function\_app\_reg\_role\_id) | n/a | +| [resource\_group\_name](#output\_resource\_group\_name) | n/a | +| [sampledb\_name](#output\_sampledb\_name) | n/a | +| [selfhostedsqlvm\_name](#output\_selfhostedsqlvm\_name) | n/a | +| [sif\_database\_name](#output\_sif\_database\_name) | n/a | +| [sqlserver\_name](#output\_sqlserver\_name) | n/a | +| [stagingdb\_name](#output\_stagingdb\_name) | n/a | +| [subscription\_id](#output\_subscription\_id) | n/a | +| [synapse\_git\_devops\_project\_name](#output\_synapse\_git\_devops\_project\_name) | n/a | +| [synapse\_git\_devops\_tenant\_id](#output\_synapse\_git\_devops\_tenant\_id) | n/a | +| [synapse\_git\_email\_address](#output\_synapse\_git\_email\_address) | n/a | +| [synapse\_git\_github\_host\_url](#output\_synapse\_git\_github\_host\_url) | n/a | +| [synapse\_git\_integration\_type](#output\_synapse\_git\_integration\_type) | n/a | +| [synapse\_git\_pat](#output\_synapse\_git\_pat) | n/a | +| [synapse\_git\_repository\_branch\_name](#output\_synapse\_git\_repository\_branch\_name) | n/a | +| [synapse\_git\_repository\_name](#output\_synapse\_git\_repository\_name) | n/a | +| [synapse\_git\_repository\_owner](#output\_synapse\_git\_repository\_owner) | n/a | +| [synapse\_git\_repository\_root\_folder](#output\_synapse\_git\_repository\_root\_folder) | n/a | +| [synapse\_git\_toggle\_integration](#output\_synapse\_git\_toggle\_integration) | n/a | +| [synapse\_git\_use\_pat](#output\_synapse\_git\_use\_pat) | n/a | +| [synapse\_git\_user\_name](#output\_synapse\_git\_user\_name) | n/a | +| [synapse\_lakedatabase\_container\_name](#output\_synapse\_lakedatabase\_container\_name) | n/a | +| [synapse\_spark\_pool\_name](#output\_synapse\_spark\_pool\_name) | n/a | +| [synapse\_sql\_pool\_name](#output\_synapse\_sql\_pool\_name) | n/a | +| [synapse\_workspace\_name](#output\_synapse\_workspace\_name) | n/a | +| [tenant\_id](#output\_tenant\_id) | n/a | +| [webapp\_name](#output\_webapp\_name) | n/a | diff --git a/solution/DeploymentV2/terraform_layer3/tformdocs.md b/solution/DeploymentV2/terraform_layer3/tformdocs.md new file mode 100644 index 00000000..3da3d13a --- /dev/null +++ b/solution/DeploymentV2/terraform_layer3/tformdocs.md @@ -0,0 +1,89 @@ +## Requirements + +| Name | Version | +|------|---------| +| [azuread](#requirement\_azuread) | =2.22.0 | +| [azurerm](#requirement\_azurerm) | =3.12.0 | +| [random](#requirement\_random) | =3.3.0 | + +## Providers + +| Name | Version | +|------|---------| +| [azuread](#provider\_azuread) | 2.22.0 | +| [azurerm](#provider\_azurerm) | 3.12.0 | +| [random](#provider\_random) | 3.3.0 | +| [terraform](#provider\_terraform) | n/a | + +## Modules + +| Name | Source | Version | +|------|--------|---------| +| [naming](#module\_naming) | Azure/naming/azurerm | 0.1.1 | +| [purview\_ingestion\_private\_endpoints](#module\_purview\_ingestion\_private\_endpoints) | ./modules/purview_ingestion_private_endpoints | n/a | + +## Resources + +| Name | Type | +|------|------| +| [azuread_app_role_assignment.func_msi_app_role](https://registry.terraform.io/providers/hashicorp/azuread/2.22.0/docs/resources/app_role_assignment) | resource | +| [azuread_app_role_assignment.func_sp_app_role](https://registry.terraform.io/providers/hashicorp/azuread/2.22.0/docs/resources/app_role_assignment) | resource | +| [azuread_application_password.function_app](https://registry.terraform.io/providers/hashicorp/azuread/2.22.0/docs/resources/application_password) | resource | +| [azuread_application_password.purview_ir](https://registry.terraform.io/providers/hashicorp/azuread/2.22.0/docs/resources/application_password) | resource | +| [azurerm_key_vault_access_policy.purview_access](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/key_vault_access_policy) | resource | +| [azurerm_key_vault_secret.azure_function_secret](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/key_vault_secret) | resource | +| [azurerm_key_vault_secret.purview_ir_sp_password](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/key_vault_secret) | resource | +| [azurerm_private_endpoint.purview_account_private_endpoint_with_dns](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/private_endpoint) | resource | +| [azurerm_private_endpoint.purview_portal_private_endpoint_with_dns](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/resources/private_endpoint) | resource | +| [random_id.rg_deployment_unique](https://registry.terraform.io/providers/hashicorp/random/3.3.0/docs/resources/id) | resource | +| [azurerm_client_config.current](https://registry.terraform.io/providers/hashicorp/azurerm/3.12.0/docs/data-sources/client_config) | data source | +| [terraform_remote_state.layer2](https://registry.terraform.io/providers/hashicorp/terraform/latest/docs/data-sources/remote_state) | data source | + +## Inputs + +| Name | Description | Type | Default | Required | +|------|-------------|------|---------|:--------:| +| [aad\_functionapp\_name](#input\_aad\_functionapp\_name) | The override name for the AAD App registration for the function app. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [aad\_webapp\_name](#input\_aad\_webapp\_name) | The override name for the AAD App registration for the web app. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [app\_name](#input\_app\_name) | The app\_name suffix value to be used for autogenerated naming conventions | `string` | `"ads"` | no | +| [author\_tag](#input\_author\_tag) | The tags to apply to resources. | `string` | `"opensource.microsoft.com"` | no | +| [azure\_purview\_data\_curators](#input\_azure\_purview\_data\_curators) | List of Azure Purview Data Curators for default root | `map(string)` | `{}` | no | +| [azure\_sql\_aad\_administrators](#input\_azure\_sql\_aad\_administrators) | List of Azure SQL Administrators | `map(string)` | `{}` | no | +| [deploy\_azure\_ad\_function\_app\_registration](#input\_deploy\_azure\_ad\_function\_app\_registration) | Feature toggle for deploying the Azure AD App registration for the Function App | `bool` | `true` | no | +| [deploy\_azure\_ad\_web\_app\_registration](#input\_deploy\_azure\_ad\_web\_app\_registration) | Feature toggle for deploying the Azure AD App registration for the Web Portal | `bool` | `true` | no | +| [deploy\_data\_factory](#input\_deploy\_data\_factory) | Feature toggle for deploying the Azure Data Factory | `bool` | `true` | no | +| [deploy\_function\_app](#input\_deploy\_function\_app) | Feature toggle for deploying the Function App | `bool` | `true` | no | +| [deploy\_purview](#input\_deploy\_purview) | Feature toggle for deploying Azure Purview | `bool` | `false` | no | +| [deploy\_web\_app](#input\_deploy\_web\_app) | Feature toggle for deploying the Web App | `bool` | `true` | no | +| [environment\_tag](#input\_environment\_tag) | The name of the environment. Don't use spaces | `string` | `"dev"` | no | +| [functionapp\_name](#input\_functionapp\_name) | The override name for the function app service resource. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | +| [ip\_address](#input\_ip\_address) | The CICD ipaddress. We add an IP whitelisting to allow the setting of keyvault secrets | `string` | n/a | yes | +| [is\_vnet\_isolated](#input\_is\_vnet\_isolated) | Whether to deploy the resources as vnet attached / private linked | `bool` | `true` | no | +| [owner\_tag](#input\_owner\_tag) | The tags to apply to resources. | `string` | `"opensource.microsoft.com"` | no | +| [prefix](#input\_prefix) | The prefix value to be used for autogenerated naming conventions | `string` | `"ark"` | no | +| [resource\_group\_name](#input\_resource\_group\_name) | n/a | `string` | n/a | yes | +| [resource\_location](#input\_resource\_location) | The Azure Region being deployed to. | `string` | `"Australia East"` | no | +| [resource\_owners](#input\_resource\_owners) | A web app Azure security group used for admin access. | `map(string)` | `{}` | no | +| [subscription\_id](#input\_subscription\_id) | The Azure Subscription ID | `string` | n/a | yes | +| [tenant\_id](#input\_tenant\_id) | The AAD tenant ID | `string` | n/a | yes | +| [webapp\_name](#input\_webapp\_name) | The override name for the web app service. If empty, will be autogenerated based on prefix settings | `string` | `""` | no | + +## Outputs + +| Name | Description | +|------|-------------| +| [aad\_funcreg\_id](#output\_aad\_funcreg\_id) | n/a | +| [aad\_webreg\_id](#output\_aad\_webreg\_id) | n/a | +| [datafactory\_name](#output\_datafactory\_name) | n/a | +| [functionapp\_name](#output\_functionapp\_name) | n/a | +| [metadatadb\_name](#output\_metadatadb\_name) | n/a | +| [naming\_unique\_seed](#output\_naming\_unique\_seed) | n/a | +| [naming\_unique\_suffix](#output\_naming\_unique\_suffix) | n/a | +| [resource\_group\_name](#output\_resource\_group\_name) | n/a | +| [sampledb\_name](#output\_sampledb\_name) | n/a | +| [sqlserver\_name](#output\_sqlserver\_name) | n/a | +| [stagingdb\_name](#output\_stagingdb\_name) | n/a | +| [synapse\_sql\_pool\_name](#output\_synapse\_sql\_pool\_name) | n/a | +| [synapse\_workspace\_name](#output\_synapse\_workspace\_name) | n/a | +| [tenant\_id](#output\_tenant\_id) | n/a | +| [webapp\_name](#output\_webapp\_name) | n/a | diff --git a/solution/DeploymentV2/utilities/GenerateTerraformDocs.ps1 b/solution/DeploymentV2/utilities/GenerateTerraformDocs.ps1 new file mode 100644 index 00000000..8361b2d5 --- /dev/null +++ b/solution/DeploymentV2/utilities/GenerateTerraformDocs.ps1 @@ -0,0 +1,35 @@ + + + + + +$PathToReturnTo = (Get-Location).Path +$deploymentFolderPath = Convert-Path( (Get-Location).Path + '/../') + +Set-Location $deploymentFolderPath +Set-Location ./terraform_layer0 +$content = terraform-docs markdown table . +$content | Set-Content tformdocs.md + +Set-Location $deploymentFolderPath +Set-Location ./terraform_layer1 +$content = terraform-docs markdown table . +$content | Set-Content tformdocs.md + +Set-Location $deploymentFolderPath +Set-Location ./terraform_layer2 +$content = terraform-docs markdown table . +$content | Set-Content tformdocs.md + +Set-Location $deploymentFolderPath +Set-Location ./terraform_layer3 +$content = terraform-docs markdown table . +$content | Set-Content tformdocs.md + +Set-Location $deploymentFolderPath +Set-Location ./terraform_layer2 +$content = terraform-docs markdown table . +$content | Set-Content tformdocs.md + +Set-Location $deploymentFolderPath +Write-Host "Finished" \ No newline at end of file From 3617312d53872872b34af313079f3ecd0a6017b5 Mon Sep 17 00:00:00 2001 From: John Rampono Date: Mon, 22 Aug 2022 20:48:10 +0800 Subject: [PATCH 151/151] Radme changes --- solution/DeploymentV2/README.md | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/solution/DeploymentV2/README.md b/solution/DeploymentV2/README.md index cbd82c1e..531a3ef1 100644 --- a/solution/DeploymentV2/README.md +++ b/solution/DeploymentV2/README.md @@ -76,9 +76,12 @@ The configuration for this environment creation is read from the following locat ## :green_circle: PART 3. Deployment Details ### Deployment Layers - Summary + +[Deploy.ps1](Deploy.ps1) provides a simple way for you to deploy all of the terraform layers included in this deployment at once. In practice, when setting up a CICD based deployment you will most likely choose to break this up and deploy each layer separately. The table below provides a summary of the different terraform layers included in this solution. Click the links in the first column to browse detailed layer documentation prodcued using [https://terraform-docs.io/](https://terraform-docs.io/) + Layer | Description | Permissions Required when using Service Principal | Permissions Required when using User Principal | --- | --- | --- | --- | [Terraform Layer Zero](./terraform_layer0/tformdocs.md) | Deploys the spoke VNET with subnets, dns zones, bastion & a VM for the CICD agent | Resouce Group Owner

Blob Contributor on Terraform's State Storage Account | Resouce Group Owner

Blob Contributor on Terraform's State Storage Account [Terraform Layer One](./terraform_layer1/tformdocs.md)| Register AAD Enterprise Applications & Service Principals | Application.ReadWrite.OwnedBy

Blob Contributor on Terraform's State Storage Account| Application Administrator (Role)

Blob Contributor on Terraform's State Storage Account -Terraform Layer Two | Core IAC deployment for approx. 70 ADS Go fast resources | Resource Group Owner

Blob Contributor on Terraform's State Storage Account| Resource Group Owner

Blob Contributor on Terraform's State Storage Account -Terraform Layer Three | Update AAD Enterprise Applications by granting required roles and permissions to managed service identities created in Layer Two

Create Private Endpoints for Purview | Application.ReadWrite.OwnedBy
(Must be same identity as that which was used to run Layer One)

Blob Contributor on Terraform's State Storage Account | Application Administrator (Role),

Network Contributor

Blob Contributor on Terraform's State Storage Account +[Terraform Layer Two](./terraform_layer2/tformdocs.md)| Core IAC deployment for approx. 70 ADS Go fast resources | Resource Group Owner

Blob Contributor on Terraform's State Storage Account| Resource Group Owner

Blob Contributor on Terraform's State Storage Account +[Terraform Layer Three](./terraform_layer3/tformdocs.md)| Update AAD Enterprise Applications by granting required roles and permissions to managed service identities created in Layer Two

Create Private Endpoints for Purview | Application.ReadWrite.OwnedBy
(Must be same identity as that which was used to run Layer One)

Blob Contributor on Terraform's State Storage Account | Application Administrator (Role),

Network Contributor

Blob Contributor on Terraform's State Storage Account