Release Admin Snippet

Usefull snippets for administration and release processes #

Start/Stop/Restart environment #

iisreset /start
Start-D365Environment
Stop-D365Environment
iisreset /stop
Restart-D365Environment
iisreset

Restore database from bacpac #

Import-D365Bacpac -ImportModeTier1 -BacpacFile "C:\temp\CLI TEST1backup.bacpac" -NewDatabaseName "TestData_20230428" -ShowOriginalProgress

Source

Clear-D365TableDataFromBacpac -Path "C:\temp\CLI TEST1backup.bacpac" -TableName "SECURITYOBJECTHISTORY","*Staging*","BatchHistory","SYSDATABASELOG*","ReqCalcTaskTrace", "DOCUHISTORY", "EVENTCUD", "BATCH", "DOCUREF", "SYSLASTVALUE", "DMFSTAGINGLOG" -ClearFromSource

Sql script few moment later after run restore bacpac

ALTER DATABASE [AxDb] SET RECOVERY SIMPLE WITH NO_WAIT
GO
ALTER DATABASE [AxDB] SET DELAYED_DURABILITY = FORCED WITH NO_WAIT
GO

One more way to restore database from bacpac

# This script can be used to 
# - remove AutoDrop properties
# - remove Grant.KillDatabaseConnection.Database elements of type SqlPermissionStatement
# from the model file of a SQL Server 2022 (or equivalent Azure SQL) bacpac backup.
# This enables restoring the bacpac on a SQL server 2019.
# The d365fo.tools module needs to be installed to run this script.
# See also 
# - https://github.com/d365collaborative/d365fo.tools/issues/747 (for AutoDrop)
# - https://github.com/d365collaborative/d365fo.tools/issues/820 (for Grant.KillDatabaseConnection.Database)
# Original script by @batetech in https://www.yammer.com/dynamicsaxfeedbackprograms/#/Threads/show?threadId=2382104258371584
# Grant.KillDatabaseConnection.Database changes by @batetech in https://www.yammer.com/dynamicsaxfeedbackprograms/threads/2782521902366720
# Minor changes by @FH-Inway
# Gist of script: https://gist.github.com/FH-Inway/f485c720b43b72bffaca5fb6c094707e

# Add your file information here
$bacpacFileNameAndPath = "C:\Users\Admin6e3\Downloads\testbackup.bacpac"
# Will be created by script. Existing files will be overwritten.
$modelFilePath = "C:\Temp\BacpacModel.xml" 
$modelFileUpdatedPath = "C:\Temp\UpdatedBacpacModel.xml"
$newDatabaseName = "AxDB_TEST20250812"

function Local-FixBacPacModelFile
{
    param(
        [string]$sourceFile, 

        [string]$destinationFile,

        [int]$flushCnt = 500000
    )

    if($sourceFile.Equals($destinationFile, [System.StringComparison]::CurrentCultureIgnoreCase))
    {
        throw "Source and destination files must not be the same."
        return;
    }

    $searchForString = '<Property Name="AutoDrop" Value="True" />';
    $replaceWithString = '';
    
    $killDBConnStart = '<Element Type="SqlPermissionStatement" Name="[Grant.KillDatabaseConnection.Database].';
    $killDBConnEnd = '</Element>';

    #using performance suggestions from here: https://learn.microsoft.com/en-us/powershell/scripting/dev-cross-plat/performance/script-authoring-considerations
    # * use List<String> instead of PS Array @()
    # * use StreamReader instead of Get-Content
    $buffer = [System.Collections.Generic.List[string]]::new($flushCnt) #much faster than PS array using +=
    $buffCnt = 0;

    #delete dest file if it already exists.
    if(Test-Path -LiteralPath $destinationFile)
    {
        Remove-Item -LiteralPath $destinationFile -Force;
    }

    try
    {
        $stream = [System.IO.StreamReader]::new($sourceFile)
        $streamEncoding = $stream.CurrentEncoding;
        Write-Verbose "StreamReader.CurrentEncoding: $($streamEncoding.BodyName) $($streamEncoding.CodePage)"

        while ($stream.Peek() -ge 0)
        {
            $line = $stream.ReadLine()
            if(-not [string]::IsNullOrEmpty($line))
            {
                $lineIndex = $line.IndexOf($killDBConnStart, [System.StringComparison]::CurrentCultureIgnoreCase)
                if($lineIndex -ge 0)
                {
                    Write-Host "Skipping line: $line"
                    #Note: This fix assumes that the element we need to remove will be on it's own line, which it is currently. If this changes in the future and other content 
                    #      is included on the same file line, then this fix could corrupt the bacpac and we would need the fix our logic.
                    #Fix for error: Error SQL72014: Core Microsoft SqlClient Data Provider: Msg 4630, Level 16, State 1, Line 1 The permission 'KILL DATABASE CONNECTION' is not supported in this version of SQL Server. 
                    #     Alternatively, use the server level 'ALTER ANY CONNECTION' permission. Error SQL72045: Script execution error.  The executed script: GRANT KILL DATABASE CONNECTION TO [ms_db_configreader];  
                    #The XML block that causes this issue (so we need to remove) looks like this: 
                    <#
                    <Element Type="SqlPermissionStatement" Name="[Grant.KillDatabaseConnection.Database].[ms_db_configreader].[dbo]">
	                    <Property Name="Permission" Value="1114" />
	                    <Relationship Name="Grantee">
		                    <Entry>
			                    <References Name="[ms_db_configreader]" />
		                    </Entry>
	                    </Relationship>
	                    <Relationship Name="SecuredObject">
		                    <Entry>
			                    <References Disambiguator="1" />
		                    </Entry>
	                    </Relationship>
                    </Element>
                    #>
                    
                    #Loop until we get to the end tag. 
                    $foundEndTag = $false;
                    while($foundEndTag -eq $false -and $stream.Peek() -ge 0)
                    {
                        $line = $stream.ReadLine();
                        if($line.IndexOf($killDBConnEnd, [System.StringComparison]::CurrentCultureIgnoreCase) -ge 0)
                        {
                            #we found the end tag, so skip it and move on.
                            $foundEndTag = $true;
                        }     
                    }
                }
                else
                {
                    #AutoDrop fix
                    $buffer.Add($line.Replace($searchForString,$replaceWithString));
                }
            }
            else
            {
                $buffer.Add($line);
            }

            $buffCnt++;
            if($buffCnt -ge $flushCnt)
            {
                Write-Verbose "$(Get-Date -Format 'u') Flush buffer"
                $buffer | Add-Content -LiteralPath $destinationFile -Encoding UTF8
                $buffer = [System.Collections.Generic.List[string]]::new($flushCnt);
                $buffCnt = 0;
                Write-Verbose "$(Get-Date -Format 'u') Flush complete"
            }
        }
    }
    finally
    {
        $stream.Dispose()
        Write-Verbose 'Stream disposed'
    }

    #flush anything still remaining in the buffer
    if($buffCnt -gt 0)
    {
        $buffer | Add-Content -LiteralPath $destinationFile -Encoding UTF8
        $buffer = $null;
        $buffCnt = 0;
    }

}

Export-D365BacpacModelFile -Path $bacpacFileNameAndPath -OutputPath $modelFilePath -Force -Verbose

Write-Host "$(Get-Date -Format 'u') Fixing model file..."
$VerbosePreferenceOriginal = $VerbosePreference
$VerbosePreference = 'Continue' # Comment or remove this line to remove the "Flush buffer/complete" messages
Local-FixBacPacModelFile -sourceFile $modelFilePath -destinationFile $modelFileUpdatedPath -Verbose
$VerbosePreference = $VerbosePreferenceOriginal
Write-Host "$(Get-Date -Format 'u') Model file fixed."

Import-D365Bacpac -ImportModeTier1 -BacpacFile "$bacpacFileNameAndPath" -NewDatabaseName "$newDatabaseName" -ModelFile $modelFileUpdatedPath -MaxParallelism 32 -Verbose

# Where Local-FixBacPacModelFile is the function defined above using StreamReader with buffering instead on Get-Content piped directly to Add-Content, which on a dev VM (B8ms, HHDs) made a big difference.

Switch database #

Switch-D365ActiveDatabase -SourceDatabaseName "AxDB_PRODBKUP20220316" -DestinationSuffix "_reverted"

Check if anuone locked the DB

SELECT a.*, d.name DatabaseName, f.name LogicalName, f.physical_name AS PhysicalName, f.type_desc TypeofFile FROM
   (SELECT DB_NAME(dbid) as DBName, COUNT(dbid) as NumberOfConnections, loginame as LoginName FROM sys.sysprocesses
      WHERE dbid > 0
      GROUP BY dbid, loginame) a, sys.master_files f
   INNER JOIN sys.databases d ON d.database_id = f.database_id
   where d.Name = a.DBName 
   AND f.type_desc = 'ROWS'
ORDER BY a.DBName, a.LoginName

Sync database #

Invoke-D365DBSync -Verbose

Enable all users #

Enable-D365User

Publish all reports #

Publish-D365SsrsReport

Reset cache #

?mi=SysClassRunner&cls=SysFlushAOD&cmp=DAT

Run job #

https://axurl/?mi=SysClassRunner&cls=YouRunnableClass

Browse table #

https://axurl/?mi=SysTableBrowser&TableName=TABLE&cmp=DAT&lng=en-US&limitednav=true

Import all models from folder #

$files = Get-ChildItem "C:\SISTemp\IPPackages\10.0.22.46.22.02.05.HFX4\AXModelSource_7.0.6164.49_2022.4.18.1\"

for ($i=0; $i -lt $files.Count; $i++) {
    $outfile = $files[$i].FullName 
    Write-Output $outfile
    Import-D365Model -Path $outfile -Replace -ShowOriginalProgress
}

Install license #

Traditioanl approach #

Microsoft.Dynamics.AX.Deployment.Setup.exe --setupmode importlicensefile --metadatadir K:\AosService\PackagesLocalDirectory --bindir K:\AosService\PackagesLocalDirectory --sqlserver . --sqldatabase AxDB --sqluser axdbadmin --sqlpwd C545454545454545=c!M= --licensefilename "K:\Licenses\IPCPlatform.txt"

Modern approach #

Install-D365License -Path "C:\SISTemp\IPPackages\

Install all licenses from folder #

$files = Get-ChildItem "C:\Temp\Licenses\"
 
for ($i=0; $i -lt $files.Count; $i++) {
   $outfile = $files[$i].FullName 
   Write-Output $outfile
   Invoke-D365InstallLicense -Path $outfile -ShowOriginalProgress
} 

Rename database #

--set main db to single connections and drop the existing connections
ALTER DATABASE AxDB SET SINGLE_USER WITH ROLLBACK IMMEDIATE
--rename datbase to db_old
ALTER DATABASE AxDB MODIFY NAME = AxDB_old
--set the old db to multi user
ALTER DATABASE AxDB_old SET MULTI_USER
--rename the new db to the main db name
ALTER DATABASE AxDB_new MODIFY NAME = AxDB

Setup deployable package #

Standard approach #

AXUpdateInstaller.exe generate -runbookid=OneBoxDev -topologyfile=DefaultTopologyData.xml -servicemodelfile=DefaultServiceModelData.xml -runbookfile=OneBoxDev-runbook.xml
AXUpdateInstaller.exe import -runbookfile=OneBoxDev-runbook.xml
AXUpdateInstaller.exe execute -runbookid=OneBoxDev

Alternative variant #

Invoke-D365SDPInstall -Path "C:\SISTmp\SISIP\10.0.18.42.21.08.04.HFX20\10.0.18.42.21.08.04.HFX20\" -Command RunAll -ShowOriginalProgress -RunbookId <NEW Value>

Import users to Tier1 #

Import users to Tier1 one line #

Import-D365ExternalUser -Id user1 -Name user1 -Email user1@user1.com

List of users #

Enable-D365User #optional
$users = 
"nkovalova@sisn.com",
"vhlushchenko@sisn.com",
"vyakovliev@sisn.com"


foreach ($user in $users)
{
    Write-Output $user
    $userId = $user.Split("@")[0]
    Import-D365ExternalUser -Id $userId -Name $userId -Email $user
}

Rebuild all indexes in all tables all databases #

DECLARE @Database NVARCHAR(255)   
DECLARE @Table NVARCHAR(255)  
DECLARE @cmd NVARCHAR(1000)  

DECLARE DatabaseCursor CURSOR READ_ONLY FOR  
SELECT name FROM master.sys.databases   
WHERE name NOT IN ('master','msdb','tempdb','model','distribution')  -- databases to exclude
AND name IN ('AxDB', 'AXDW') -- use this to select specific databases and comment out line above
AND state = 0 -- database is online
AND is_in_standby = 0 -- database is not read only for log shipping
ORDER BY 1  

OPEN DatabaseCursor  

FETCH NEXT FROM DatabaseCursor INTO @Database  
WHILE @@FETCH_STATUS = 0  
BEGIN  

   SET @cmd = 'DECLARE TableCursor CURSOR READ_ONLY FOR SELECT ''['' + table_catalog + ''].['' + table_schema + ''].['' +  
   table_name + '']'' as tableName FROM [' + @Database + '].INFORMATION_SCHEMA.TABLES WHERE table_type = ''BASE TABLE'''   

   -- create table cursor  
   EXEC (@cmd)  
   OPEN TableCursor   

   FETCH NEXT FROM TableCursor INTO @Table   
   WHILE @@FETCH_STATUS = 0   
   BEGIN
      BEGIN TRY   
         SET @cmd = 'ALTER INDEX ALL ON ' + @Table + ' REBUILD' 
         --PRINT @cmd -- uncomment if you want to see commands
         EXEC (@cmd) 
      END TRY
      BEGIN CATCH
         PRINT '---'
         PRINT @cmd
         PRINT ERROR_MESSAGE() 
         PRINT '---'
      END CATCH

      FETCH NEXT FROM TableCursor INTO @Table   
   END   

   CLOSE TableCursor   
   DEALLOCATE TableCursor  

   FETCH NEXT FROM DatabaseCursor INTO @Database  
END  
CLOSE DatabaseCursor   
DEALLOCATE DatabaseCursor

Datamart reset #

Powershell approach #

NET STOP "MR2012ProcessService"
cd K:\MROneBox\MRInstallDirectory\Server\MRDeploy\
Import-Module .\MRDeploy.psd1
Reset-DatamartIntegration -Reason OTHER -ReasonDetail "MyReason"

User interface approach #

TBD

Error search during install new HF #

‘Get-D365Runbook -Latest | Invoke-D365RunbookAnalyzer | Out-File “C:\Temp\runbook-analyze-results.xml” '

Unify environment operations #

Power Platform Tools for Visual Studio Code #

pac auth create --environment https://env.crm.dynamics.com
pac auth list
pac auth select --environment https://env.crm.dynamics.com
BaseDir:\PackagesLocalDirectory\bin\ModelUtil.exe -convertToUnifiedPackage -file="CloudHostedPackage.zip" -outputpath=OutputFolder 
pac package deploy --logconsole --package OutputFolder\TemplatePackage.dll --logFile OutputFolder\deploy.log

Merge 2 UDE packages #

filename MergeAndUpdate.ps1

param (
    [Parameter(Mandatory=$true)][string]$Source1,
    [Parameter(Mandatory=$true)][string]$Source2,
    [Parameter(Mandatory=$true)][string]$Target
)

# Initialize counters
$stats = @{
    Source1Files = 0
    Source2Files = 0
    SubfoldersProcessed = 0
    XmlFilesUpdated = 0
    ZipEntriesAdded = 0
}

$globalStopwatch = [System.Diagnostics.Stopwatch]::StartNew()

function Copy-FolderContent {
    param (
        [string]$Source,
        [string]$Destination,
        [ref]$FileCounter
    )
    Write-Host "Copying files from $Source..."
    Get-ChildItem -Path $Source -Recurse | ForEach-Object {
        $relativePath = $_.FullName.Substring($Source.Length).TrimStart('\')
        $destPath = Join-Path $Destination $relativePath
        if ($_.PSIsContainer) {
            New-Item -ItemType Directory -Path $destPath -Force | Out-Null
        } else {
            Copy-Item -Path $_.FullName -Destination $destPath -Force
            $FileCounter.Value++
        }
    }
    Write-Host "Done copying from $Source. Files copied: $($FileCounter.Value)"
}

function Update-ImportConfigXml {
    param (
        [string]$Folder,
        [ref]$ZipCounter
    )
    $xmlPath = Join-Path $Folder "ImportConfig.xml"
    if (-not (Test-Path $xmlPath)) { return }

    [xml]$xml = Get-Content $xmlPath
    $zipFiles = Get-ChildItem -Path $Folder -Filter "*_managed.zip" | Select-Object -ExpandProperty Name

    $existingFiles = @()
    if ($xml.configdatastorage.externalpackages.package) {
        $existingFiles = $xml.configdatastorage.externalpackages.package | ForEach-Object { $_.filename }
    }

    $added = 0
    foreach ($zip in $zipFiles) {
        if (-not ($existingFiles -contains $zip)) {
            $newNode = $xml.CreateElement("package")
            $newNode.SetAttribute("type", "xpp")
            $newNode.SetAttribute("filename", $zip)
            $xml.configdatastorage.externalpackages.AppendChild($newNode) | Out-Null
            $added++
        }
    }

    if ($added -gt 0) {
        $xml.Save($xmlPath)
        $ZipCounter.Value += $added
        $true
    } else {
        $false
    }
}

# Step 1: Copy contents
Copy-FolderContent -Source $Source1 -Destination $Target -FileCounter ([ref]$stats.Source1Files)
Copy-FolderContent -Source $Source2 -Destination $Target -FileCounter ([ref]$stats.Source2Files)

# Step 2: Update XMLs
Write-Host "Scanning for ImportConfig.xml files..."
Get-ChildItem -Path $Target -Recurse -Filter "ImportConfig.xml" | ForEach-Object {
    $folder = Split-Path $_.FullName
    $stats.SubfoldersProcessed++
    $updated = Update-ImportConfigXml -Folder $folder -ZipCounter ([ref]$stats.ZipEntriesAdded)
    if ($updated) {
        $stats.XmlFilesUpdated++
        Write-Host "Updated XML in: $folder"
    } else {
        Write-Host "No changes needed in: $folder"
    }
}

$globalStopwatch.Stop()

# Final summary
Write-Host "All done"
Write-Host "  🕒 Total execution time:  $([math]::Round($globalStopwatch.Elapsed.TotalSeconds, 2)) seconds"

call it .\MergeAndUpdate.ps1 -Source1 "D:\Downloads\UDEJoin\AXDeployablePackage_2025.8.22.6" -Source2 "D:\Downloads\UDEJoin\ALOPSAXDeployablePackage_2025.8.22.6" -Target "D:\Downloads\UDEJoin\Merged"

Build models in the CH environment #

$logPath = "c:\temp\d365fo.tools\Logs\BuildModels-$(Get-Date -Format 'yyyyMMdd_HHmmss').log"

# ===================== LOGGING =====================
Start-Transcript -Path $logPath

$modelsQueue = New-Object System.Collections.Queue

# Start time measurement
$startTime = Get-Date

$models = Get-D365Model -ExcludeMicrosoftModels
# Mark the start execution time

# Method definitions
function buildModel {
    param(
        [string]$ModelName,
        [string]$Module,
        [int]$Attempt = 1
    )

    Write-Host "[$Attempt] Compiling model: $ModelName from module: $Module"
    #Invoke-D365ModuleCompile -Module $ModelName -ShowOriginalProgress | Out-Null
    Invoke-D365ModuleFullCompile -Module $ModelName -ShowOriginalProgress | Out-Null
    $logPath = "c:\temp\d365fo.tools\Logs\ModuleCompile\$ModelName\Dynamics.AX.$ModelName.xppc.log"
    if (-not (Test-Path $logPath)) {
        Write-Warning "Log file not found for $ModelName"
        return @{ ModelName = $ModelName; Module = $Module; Errors = -1 }
    }

    $lastLines = Get-Content $logPath | Select-Object -Last 5
    $errorLine = $lastLines | Where-Object { $_ -match '^Errors:\s*(\d+)' }

    if ($errorLine -match '^Errors:\s*(\d+)') {
        $errorCount = [int]$matches[1]
        Write-Host "Errors: $errorCount"
        return @{ ModelName = $ModelName; Module = $Module; Errors = $errorCount }
    } else {
        Write-Warning "Could not parse error count for $ModelName"
        return @{ ModelName = $ModelName; Module = $Module; Errors = -1 }
    }
}

function sync {
    param([string]$ModelName)

    $logPath = "c:\temp\d365fo.tools\Sync\$ModelName.log"
    Write-Host "🔄 Syncing model: $ModelName → Logging to $logPath"

    Invoke-D365DbSyncModule -Module $ModelName > $logPath
}

function show_the_errors {
    param([string]$ModelName)

    $logPath = "c:\temp\d365fo.tools\Logs\ModuleCompile\$ModelName\Dynamics.AX.$ModelName.xppc.log"
    Write-Host "🧾 Analyzing compiler results for: $ModelName → $logPath"

    Invoke-D365CompilerResultAnalyzer -Path $logPath
}

# Model parsing
$rawBlocks = ($models -replace '}\s*@{', '}|@{') -split '\|'
$parsedModels = @()

foreach ($block in $rawBlocks) {
    $cleanBlock = $block.Trim('@{}') -replace ';', "`n"
    $props = @{}
    foreach ($line in $cleanBlock -split "`n") {
        if ($line -match '^\s*(\w+)\s*=\s*(.*)$') {
            $props[$matches[1]] = $matches[2].Trim()
        }
    }
    if ($props.ModelName -and $props.Module -and $props.IsBinary -ne "True") {
        $parsedModels += $props
    }
}

# Tracking models with errors
$retryQueue = @{}

# Initial build
foreach ($model in $parsedModels) {
    $result = buildModel -ModelName $model.ModelName -Module $model.Module -Attempt 1
    if ($result.Errors -gt 0) {
        $retryQueue[$model.ModelName] = @{ Module = $model.Module; Attempts = 1 }
    } else {
        $modelsQueue.Enqueue($model)
    }
}

# Retry attempts (maximum 5)
while ($retryQueue.Count -gt 0) {
    foreach ($key in @($retryQueue.Keys)) {

        $info = $retryQueue[$key]
        $attempts = $info.Attempts + 1

        if ($attempts -gt 5) {
            Write-Warning "Max attempts reached for $key"
            $retryQueue.Remove($key)
            break
        }

        $result = buildModel -ModelName $key -Module $info.Module -Attempt $attempts
        if ($result.Errors -eq 0) {
            Write-Host "Model $key compiled successfully on attempt $attempts"
            $retryQueue.Remove($key)
            $modelsQueue.Enqueue($key)
        } else {
            $retryQueue[$key].Attempts = $attempts
        }
    }

    # Remove models that exceeded the limit
    foreach ($key in $retryQueue.Keys) {
        if ($retryQueue[$key].Attempts -gt 5) {
            Write-Warning "Max attempts reached for $key"
            $retryQueue.Remove($key)
        }
    }
}

$logRoot = "C:\Temp\d365fo.tools\Logs\ModuleCompile"
$logFiles = Get-ChildItem -Path $logRoot -Recurse -Filter "*xppc.log"
$totalErrors = 0

foreach ($file in $logFiles) {
    $fileName = $file.Name
    if ($fileName -match '^Dynamics\.AX\.(.+?)\.xppc\.log$') {
        $modelName = $matches[1]
        $errorLine = Get-Content $file.FullName | Where-Object { $_ -match '^Errors:\s*(\d+)' } | Select-Object -First 1
        if ($errorLine -match '^Errors:\s*(\d+)') {
            $errorCount = $matches[1]
        } else {
            $errorCount = "N/A"
        }
        $lastWriteTime = $file.LastWriteTime.ToString("yyyy-MM-dd HH:mm:ss")
        Write-Host "Errors: $errorCount, Modified: $lastWriteTime, Model: $modelName"
        $totalErrors += $errorCount 
    }
}

Write-Host "Total errors: $totalErrors"

if ($totalErrors -gt 0) {
    Write-Host "`n❌ Showing errors for models that failed after 5 attempts..."
    foreach ($key in $retryQueue.Keys) {
        show_the_errors -ModelName $key
    }
} else {
    Write-Host "`n🔄 Running sync for all models..."

    #foreach ($model in $parsedModels) {
    #    sync -ModelName $model.ModelName
    #}
    Invoke-D365DBSync -Verbose
    Write-Host "`n✅ All models compiled successfully. No errors to show."

    Publish-D365SsrsReport
    Write-Host "`n✅ Reports are published"
}

# 🗂️ Opening folder in explorer
Start-Process "explorer.exe" "C:\Temp\d365fo.tools"

# Models was processed in this order
Write-Host "Models was processed in this order:`n"
foreach ($item in $modelsQueue) {
    Write-Host $item
}

# Mark the end execution time
$endTime = Get-Date

# Calculate duration
$duration = $endTime - $startTime
Write-Host "⏱️ Script execution time: $($duration.TotalSeconds) seconds"
Stop-Transcript

Extract models from CHE package #

#param(
#    [Parameter(Mandatory = $true)]
#    [string]$ZipFilePath,
#
#    [Parameter(Mandatory = $true)]
#    [string]$TargetPath
#)

$ZipFilePath = "D:\Downloads\AOTBrowser_DeployablePackage_863.0.5.15.zip"
#$ZipFilePath = "D:\Repository\Play\PCM-10.0.38.62.24.09.04.HFX9.zip"


if (Test-Path -Path $ZipFilePath) {
    Write-Output "$ZipFilePath Source fhe file exists."
} else {
    Write-Output "$ZipFilePath The file does not exist."
}

$fileNameWithoutExtension = [System.IO.Path]::GetFileNameWithoutExtension($ZipFilePath)
$TmpTargetPath = [System.IO.Path]::GetDirectoryName($ZipFilePath)
$TargetPath = "$TmpTargetPath\$fileNameWithoutExtension"

if (-Not (Test-Path -Path $TargetPath)) {
    New-Item -Path $TargetPath -ItemType Directory
    Write-Output "Folder was created."
} else {
    Write-Output "Folder already exists."
}

# Extract files from zip archive
$ZipFile = Expand-Archive -Path $ZipFilePath -DestinationPath $TargetPath -Force

Get-ChildItem -Path "$TargetPath\AOSService\Packages" -Filter "*nupkg" | ForEach-Object {
    $newName = $_.BaseName + ".zip"
    Rename-Item -Path $_.FullName -NewName $newName
}

# Get list of .nupkg files
$NupkgFiles = Get-ChildItem -Path "$TargetPath\AOSService\Packages\*.zip"

# Array for storing created folders
$CreatedFolders = @()

foreach ($NupkgFile in $NupkgFiles) {
    # Get filename without extension
    $FileName = [System.IO.Path]::GetFileNameWithoutExtension($NupkgFile.FullName)

    # Get corresponding zip file
    $ZipFile = Get-ChildItem -Path "$TargetPath\AOSService\Packages\files\$FileName.zip"

    # Extract nupkg file to temporary folder
    $TempPath = [System.IO.Path]::GetTempPath() + "vvss\$FileName\"

    
    Expand-Archive -Path $NupkgFile.FullName -DestinationPath $TempPath -Force 
    cd "$TempPath"
    $NuspecFile = Get-ChildItem -Filter "*.nuspec"

    # Get folder name from nuspec file
    $Xml = [xml](Get-Content $NuspecFile.FullName)
    $FolderName = $Xml.package.metadata.summary

    if ([string]::IsNullOrEmpty($FolderName)) {
        $FolderName = $FileName
    }

    # Create destination folder
    $DestinationPath = Join-Path -Path $TargetPath -ChildPath $FolderName
    New-Item -ItemType Directory -Path $DestinationPath -Force

    # Add created folder to array
    $CreatedFolders += $DestinationPath

    # Extract zip file to destination folder
    Expand-Archive -Path $ZipFile.FullName -DestinationPath $DestinationPath -Force

    # Remove temporary files
    Remove-Item -Path $NuspecFile.FullName -Force
    Remove-Item -Path "$TempPath\*" -Recurse -Force
}

cd $TargetPath
# Clean destination folder


foreach ($Folder in $CreatedFolders) {

    if ((Get-ChildItem -Path "$Folder\AdditionalFiles").Count -eq 0) {
        Remove-Item -Path "$Folder\AdditionalFiles" -Force
    }
    else {
        Write-Warning "Folder '$("$Folder\AdditionalFiles")' is not empty and will not be deleted."
    }
}


$ExistingFolders = Get-ChildItem -Path $TargetPath | Where-Object {$_.FullName -notin $CreatedFolders}


foreach ($Folder in $ExistingFolders) {
    Remove-Item -Path $Folder.FullName -Force -Recurse
}

$TempPath = [System.IO.Path]::GetTempPath() + "vvss\"
Remove-Item -Path $TempPath -Force -Recurse

Merge 2 LCS packages to one #

Merge-LCSPackages.ps1

# Merge-LCSPackages.ps1
param (
    [string]$Package1Zip = "D:\Downloads\SCT\1006357\SCriptTest\PCM-10.0.42.66.25.09.01_orig.zip",
    [string]$Package2Zip = "D:\Downloads\SCT\1006357\SCriptTest\ALOPS-10.0.42.66.25.09.01_orig.zip",
    [string]$OutputZip = "D:\Downloads\SCT\1006357\SCriptTest\PCM-10.0.42.66.25.09.01_AllInOne.zip",
    [string]$WorkingDir = "C:\Temp\LCSPackageMerge"
)

function Log {
    param ([string]$Message)
    $timestamp = Get-Date -Format "yyyy-MM-dd HH:mm:ss"
    Write-Host "$timestamp - $Message"
}

$stopwatch = [System.Diagnostics.Stopwatch]::StartNew()
Log "🔄 Starting LCS package merge process"

# Setup working directories
$commonDir = Join-Path $WorkingDir "CommonPackage"
$temp2Dir = Join-Path $WorkingDir "TempPackage2"
Remove-Item $WorkingDir -Recurse -Force -ErrorAction SilentlyContinue
New-Item -ItemType Directory -Path $commonDir, $temp2Dir | Out-Null

# Step 1: Extract Package1
Log "📦 Extracting Package1 to CommonPackage"
Expand-Archive -Path $Package1Zip -DestinationPath $commonDir

# Step 2: Extract Package2
Log "📦 Extracting Package2 to TempPackage2"
Expand-Archive -Path $Package2Zip -DestinationPath $temp2Dir

# Step 3: Copy AOSService from Package2
$sourceAOS = Join-Path $temp2Dir "AOSService"
$targetAOS = Join-Path $commonDir "AOSService"
Log "📁 Merging AOSService content from Package2"
Copy-Item -Path "$sourceAOS\*" -Destination $targetAOS -Recurse -Force

# Step 4: Merge HotfixInstallationInfo.xml
$xmlPath1 = Join-Path $commonDir "HotfixInstallationInfo.xml"
$xmlPath2 = Join-Path $temp2Dir "HotfixInstallationInfo.xml"

if ((Test-Path $xmlPath1) -and (Test-Path $xmlPath2)) {
    Log "📝 Merging HotfixInstallationInfo.xml files from root of each package"
    [xml]$xml1 = Get-Content $xmlPath1
    [xml]$xml2 = Get-Content $xmlPath2

    # Append all MetadataModuleList entries from xml2 to xml1
    foreach ($mod in $xml2.HotfixInstallationInfo.MetadataModuleList.string) {
        $node = $xml1.CreateElement("string")
        $node.InnerText = $mod
        $xml1.HotfixInstallationInfo.MetadataModuleList.AppendChild($node) | Out-Null
    }

    # Append all AllComponentList entries from xml2 to xml1
    foreach ($comp in $xml2.HotfixInstallationInfo.AllComponentList.ArrayOfString) {
        $imported = $xml1.ImportNode($comp, $true)
        $xml1.HotfixInstallationInfo.AllComponentList.AppendChild($imported) | Out-Null
    }

    $xml1.Save($xmlPath1)
    Log "✅ HotfixInstallationInfo.xml merged successfully (root-level)"
} else {
    Log "⚠️ One or both HotfixInstallationInfo.xml files not found at root. Skipping merge."
}


# Step 5: Repackage CommonPackage
Log "📦 Creating final merged ZIP package"
if (Test-Path $OutputZip) { Remove-Item $OutputZip -Force }
Compress-Archive -Path "$commonDir\*" -DestinationPath $OutputZip -Force

$stopwatch.Stop()
Log "✅ Merge completed in $($stopwatch.Elapsed.TotalSeconds) seconds"
Log "📁 Final package saved to: $OutputZip"
DELETE FROM DualWriteProjectConfiguration;
DELETE FROM DualWriteProjectFieldConfiguration;
DELETE FROM BusinessEventsDefinition;
comments powered by Disqus