DMF ImportFromPackage API - Automate Package Creation
As we know, the D365 package import API requires your source file content along with two XML files, PackageHeader.xml and Manifest.xml - these all require being packaged into a zip archive and generally the use of a SAS link to this archive to call the import api:
POST /data/DataManagementDefinitionGroups/Microsoft.Dynamics.DataEntities.ImportFromPackage
BODY
{
"packageUrl":"<string>",
"definitionGroupId":"<string>",
"executionId":"<string>",
"execute":<bool>,
"overwrite":<bool>,
"legalEntityId":"<string>"
}
If we know the source has the correct headers, (CSV) in this example, it's possible to automatically generate these files from the source content file.
Deploy an Azure Function with a Blob Trigger using an associated container structure:
LegalEntity would be a real entity in your target instance.
Customer groups is your Entity Name
Function main body
[Function(nameof(ProcessBlobFile))]
public async Task Run([BlobTrigger("dynamics/{name}", Connection = "AzureWebJobsStorage")] Stream stream, string name)
{
string packageID = Guid.NewGuid().ToString();
try
{
_logger.LogInformation("Function triggered by blob: {name}", name);
using var blobStreamReader = new StreamReader(stream);
var content = await blobStreamReader.ReadToEndAsync();
_logger.LogInformation("Blob content read successfully. Size: {size} Bytes", stream.Length);
// Extract Legalentity, EntityName, and filename from the path
var pathParts = name.Split('/');
if (pathParts.Length < 3)
{
_logger.LogError("Invalid path format. Expected format: dynamics/{Legalentity}/{EntityName}/{fileName}");
return;
}
var legalEntity = pathParts[pathParts.Length - 3];
var entityName = pathParts[pathParts.Length - 2];
var fileName = pathParts[pathParts.Length - 1];
_logger.LogInformation("Extracted path parts - Legalentity: {legalEntity}, EntityName: {entityName}, FileName: {fileName}", legalEntity, entityName, fileName);
//read CSV
string[] headers;
using (var reader = new StringReader(content))
{
headers = reader.ReadLine().Split(',');
}
_logger.LogInformation("CSV headers extracted: {headers}", string.Join(", ", headers));
// generate the manifest
bool result = await ProcessManifest.GenerateManifestAsync(entityName, fileName, headers.ToList(), _logger, packageID);
if (result)
{
_logger.LogInformation("Manifest generated successfully.");
}
else
{
_logger.LogError("Failed to generate the manifest.");
}
// generate the package header
bool packageResult = await ProcessPackage.GeneratePackageHeaderAsync(legalEntity, _logger, packageID);
if (packageResult)
{
_logger.LogInformation("Package header generated successfully.");
}
else
{
_logger.LogError("Failed to generate the package header.");
}
// Save Blob to folder with Package \ Manifest
bool saveBlobResult = await SaveOriginalBlob.SaveBlobAsync(stream, packageID, fileName, _logger);
if (saveBlobResult)
{
_logger.LogInformation("Original blob saved successfully.");
}
else
{
_logger.LogError("Failed to save the original blob.");
}
// ZipFolderContents
bool zipResult = await ZipFolderContents.ZipAndSaveFolderAsync(packageID, _logger);
if (zipResult)
{
_logger.LogInformation("Folder contents zipped and saved successfully.");
}
else
{
_logger.LogError("Failed to zip and save the folder contents.");
}
// Call the ImportPackageAsync method
bool importResult = await D365FinanceImport.ImportPackageAsync(packageID, packageID, packageID, true, true, legalEntity, _logger);
// Check the result
if (importResult)
{
Console.WriteLine("Package imported successfully.");
}
else
{
Console.WriteLine("Failed to import package.");
}
}
catch (Exception ex)
{
_logger.LogError(ex, "An error occurred while processing the blob. Exception: {exceptionMessage}", ex.Message);
throw;
}
}
Split the path of the name to extract your legal entity, entity name and filename
Generate your Manifest using the headers from the input file:
public static class ProcessManifest
{
public static async Task<bool> GenerateManifestAsync(string entityName, string fileName, List<string> headers, ILogger logger, string packageID)
{
try
{
XNamespace ns = "http://schemas.microsoft.com/dynamics/2015/01/DataManagement";
XNamespace xsi = "http://www.w3.org/2001/XMLSchema-instance";
// Generate the XML content
var entityMapList = new XElement(ns + "EntityMapList",
headers.Select((header, index) => new XElement(ns + "EntityMap",
new XElement(ns + "ArrayIndex", index),
new XElement(ns + "EntityField", header.ToUpper()),
new XElement(ns + "EntityFieldConversionList", new XAttribute(xsi + "nil", "true")),
new XElement(ns + "IsAutoDefault", "false"),
new XElement(ns + "IsAutoGenerated", "false"),
new XElement(ns + "IsDefaultValueEqualNull", "false"),
new XElement(ns + "UseTextQualifier", "false"),
new XElement(ns + "XMLField", header.ToUpper())
))
);
logger.LogInformation("EntityMapList created: {entityMapList}");
// Log each step of the XML creation process
logger.LogInformation("Creating DataManagementPackageManifest element...");
XElement dataManagementPackageManifest;
try
{
dataManagementPackageManifest = new XElement(ns + "DataManagementPackageManifest",
new XAttribute(XNamespace.Xmlns + "i", xsi)
);
}
catch (Exception ex)
{
logger.LogError(ex, "Error creating DataManagementPackageManifest element.");
throw;
}
logger.LogInformation("DataManagementPackageManifest element created.");
logger.LogInformation("Creating DefinitionGroupName element...");
var definitionGroupName = new XElement(ns + "DefinitionGroupName", packageID);
logger.LogInformation("Creating Description element...");
var description = new XElement(ns + "Description", "DevTest");
logger.LogInformation("Creating PackageEntityList element...");
var packageEntityList = new XElement(ns + "PackageEntityList");
logger.LogInformation("Creating DataManagementPackageEntityData element...");
var dataManagementPackageEntityData = new XElement(ns + "DataManagementPackageEntityData",
new XElement(ns + "DefaultRefreshType", "IncrementalPush"),
new XElement(ns + "Disable", "false"),
entityMapList,
new XElement(ns + "EntityName", entityName),
new XElement(ns + "EntityTransformList", new XAttribute(xsi + "nil", "true")),
new XElement(ns + "ExcelSheetName"),
new XElement(ns + "ExecutionUnit", "1"),
new XElement(ns + "FailExecutionUnitOnError", "false"),
new XElement(ns + "FailLevelOnError", "false"),
new XElement(ns + "InputFilePath", fileName),
new XElement(ns + "LevelInExecutionUnit", "1"),
new XElement(ns + "QueryData", "4a012f270000110001e649010000000a4de9030000862b00008c2b0000882b00008b2b000000008404430075007300740043007500730074006f006d0065007200470072006f007500700045006e0074006900740079000000110001e8033400430075007300740043007500730074006f006d0065007200470072006f007500700045006e0074006900740079005f0031000000e2093000430075007300740043007500730074006f006d0065007200470072006f007500700045006e0074006900740079000000094de8030000f319000000920402001100010000ffffffffffffffff9b04ffff9a04ffff00000000000001ffffffff009005000000000000000000000000000000000000000000000000000000000000000000"),
new XElement(ns + "QueryFilter", new XAttribute(xsi + "nil", "true")),
new XElement(ns + "RunBusinessLogic", "true"),
new XElement(ns + "RunBusinessValidation", "true"),
new XElement(ns + "SampleFilePath", "IAIVOIN9E"),
new XElement(ns + "SequenceInLevel", "1"),
new XElement(ns + "SourceFormat", "CSV"),
new XElement(ns + "TargetEntity", "CustCustomerGroupEntity"),
new XElement(ns + "ValidationStatus", "Yes")
);
logger.LogInformation("Adding DataManagementPackageEntityData to PackageEntityList...");
packageEntityList.Add(dataManagementPackageEntityData);
logger.LogInformation("Adding elements to DataManagementPackageManifest...");
dataManagementPackageManifest.Add(definitionGroupName);
dataManagementPackageManifest.Add(description);
dataManagementPackageManifest.Add(packageEntityList);
dataManagementPackageManifest.Add(new XElement(ns + "ProjectCategory", "0"));
dataManagementPackageManifest.Add(new XElement(ns + "RulesData", new XAttribute(xsi + "nil", "true")));
logger.LogInformation("Creating XDocument...");
var xmlContent = new XDocument(dataManagementPackageManifest);
// Convert the XML content to a string and log it
string xmlString = xmlContent.ToString(SaveOptions.DisableFormatting);
logger.LogInformation("Generated XML content:\n{xmlString}");
// Write the XML content to the output blob
var connectionString = Environment.GetEnvironmentVariable("AzureWebJobsStorage");
var blobServiceClient = new BlobServiceClient(connectionString);
var containerClient = blobServiceClient.GetBlobContainerClient("dynamicspackages");
var blobClient = containerClient.GetBlobClient($"{packageID}/manifest.xml");
using (var outputBlob = new MemoryStream())
{
using (var writer = new StreamWriter(outputBlob, Encoding.UTF8, 1024, true))
{
xmlContent.Save(writer, SaveOptions.DisableFormatting);
}
outputBlob.Position = 0;
await blobClient.UploadAsync(outputBlob, overwrite: true);
}
logger.LogInformation($"XML file created and uploaded successfully to dynamicspackages/{packageID} as manifest.xml.");
return true;
}
catch (Exception ex)
{
logger.LogError(ex, "An error occurred while generating the manifest.");
return false;
}
}
}
Generate the PackageHeader.xml
public static async Task<bool> GeneratePackageHeaderAsync(string description, ILogger logger, string packageID)
{
try
{
XNamespace ns = "http://schemas.microsoft.com/dynamics/2015/01/DataManagement";
XNamespace xsi = "http://www.w3.org/2001/XMLSchema-instance";
// Generate the XML content
var packageHeader = new XElement(ns + "DataManagementPackageHeader",
new XAttribute(XNamespace.Xmlns + "i", xsi),
new XElement(ns + "Description", packageID),
new XElement(ns + "ManifestType", "Microsoft.Dynamics.AX.Framework.Tools.DataManagement.Serialization.DataManagementPackageManifest"),
new XElement(ns + "PackageType", "DefinitionGroup"),
new XElement(ns + "PackageVersion", "2")
);
logger.LogInformation("DataManagementPackageHeader created: {packageHeader}");
// Create the XML document with declaration
var xmlContent = new XDocument(
new XDeclaration("1.0", "utf-8", null),
packageHeader);
// Convert the XML content to a string and log it
string xmlString = xmlContent.ToString(SaveOptions.DisableFormatting);
logger.LogInformation("Generated XML content:\n{xmlString}");
// Write the XML content to the output blob
var connectionString = Environment.GetEnvironmentVariable("AzureWebJobsStorage");
var blobServiceClient = new BlobServiceClient(connectionString);
var containerClient = blobServiceClient.GetBlobContainerClient("dynamicspackages");
var blobClient = containerClient.GetBlobClient($"{packageID}/PackageHeader.xml");
using (var outputBlob = new MemoryStream())
{
using (var writer = new StreamWriter(outputBlob, Encoding.UTF8, 1024, true))
{
xmlContent.Save(writer, SaveOptions.DisableFormatting);
}
outputBlob.Position = 0;
await blobClient.UploadAsync(outputBlob, overwrite: true);
}
logger.LogInformation($"XML file created and uploaded successfully to dynamicspackages/{packageID} as PackageHeader.xml.");
return true;
}
catch (Exception ex)
{
logger.LogError(ex, "An error occurred while generating the package header.");
return false;
}
}
Move your original Blob and create a zip file of the contents:
public static async Task<bool> SaveBlobAsync(Stream originalBlobStream, string packageID, string fileName, ILogger logger)
{
try
{
// Write the original blob content to the output blob
var connectionString = Environment.GetEnvironmentVariable("AzureWebJobsStorage");
var blobServiceClient = new BlobServiceClient(connectionString);
var containerClient = blobServiceClient.GetBlobContainerClient("dynamicspackages");
var blobClient = containerClient.GetBlobClient($"{packageID}/{fileName}");
originalBlobStream.Position = 0; // Ensure the stream position is at the beginning
await blobClient.UploadAsync(originalBlobStream, overwrite: true);
logger.LogInformation($"Original blob saved successfully to output-container/{packageID} as {fileName}.");
return true;
}
catch (Exception ex)
{
logger.LogError(ex, "An error occurred while saving the original blob.");
return false;
}
}
public static async Task<bool> ZipAndSaveFolderAsync(string sourceFolder, ILogger logger)
{
try
{
var connectionString = Environment.GetEnvironmentVariable("AzureWebJobsStorage");
var blobServiceClient = new BlobServiceClient(connectionString);
var containerClient = blobServiceClient.GetBlobContainerClient("dynamicspackages");
// Create a temporary directory to download the blobs
string tempDirectory = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString());
Directory.CreateDirectory(tempDirectory);
// Download all blobs in the folder to the temporary directory
await foreach (var blobItem in containerClient.GetBlobsAsync(prefix: sourceFolder))
{
var blobClient = containerClient.GetBlobClient(blobItem.Name);
string localFilePath = Path.Combine(tempDirectory, Path.GetFileName(blobItem.Name));
await blobClient.DownloadToAsync(localFilePath);
}
// Create the zip file
string zipFilePath = Path.Combine(Path.GetTempPath(), sourceFolder);
ZipFile.CreateFromDirectory(tempDirectory, zipFilePath);
// Upload the zip file to the container
var zipBlobClient = containerClient.GetBlobClient($"{sourceFolder}/{sourceFolder}.zip");
using (var zipFileStream = File.OpenRead(zipFilePath))
{
await zipBlobClient.UploadAsync(zipFileStream, overwrite: true);
}
// Clean up temporary files
Directory.Delete(tempDirectory, true);
File.Delete(zipFilePath);
logger.LogInformation($"Folder {sourceFolder} zipped and saved as {sourceFolder}.zip successfully.");
return true;
}
catch (Exception ex)
{
logger.LogError(ex, "An error occurred while zipping and saving the folder contents.");
return false;
}
}
Now execute the import API
public static class D365FinanceImport
{
public static async Task<bool> ImportPackageAsync(string folderPath, string definitionGroupId, string executionId, bool execute, bool overwrite, string legalEntityId, ILogger logger)
{
try
{
var connectionString = Environment.GetEnvironmentVariable("AzureWebJobsStorage");
var blobServiceClient = new BlobServiceClient(connectionString);
var containerClient = blobServiceClient.GetBlobContainerClient("dynamicspackages");
var blobClient = containerClient.GetBlobClient($"{folderPath}/{folderPath}.zip");
// Generate SAS URL
var sasUrl = GenerateSasUrl(blobClient);
logger.LogInformation($"SAS URL: {sasUrl}");
// Get D365 Finance API details from environment variables
var scmURL = Environment.GetEnvironmentVariable("scmURL");
var client_id = Environment.GetEnvironmentVariable("client_id");
var client_secret = Environment.GetEnvironmentVariable("client_secret");
var directory = Environment.GetEnvironmentVariable("directory");
// Get OAuth token
var token = await GetOAuthTokenAsync(client_id, client_secret, directory, scmURL, logger);
if (string.IsNullOrEmpty(token))
{
logger.LogError("Failed to obtain OAuth token.");
return false;
}
// Call the D365 Finance import package API
using (var httpClient = new HttpClient())
{
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Bearer", token);
var content = new StringContent(
$"{{\"packageUrl\":\"{sasUrl}\", \"definitionGroupId\":\"{definitionGroupId}\", \"executionId\":\"{executionId}\", \"execute\":{execute.ToString().ToLower()}, \"overwrite\":{overwrite.ToString().ToLower()}, \"legalEntityId\":\"{legalEntityId}\"}}",
Encoding.UTF8,
"application/json"
);
var response = await httpClient.PostAsync($"{scmURL}/data/DataManagementDefinitionGroups/Microsoft.Dynamics.DataEntities.ImportFromPackage", content);
if (response.IsSuccessStatusCode)
{
logger.LogInformation("Package imported successfully.");
return true;
}
else
{
var responseContent = await response.Content.ReadAsStringAsync();
logger.LogError($"Failed to import package. Status code: {response.StatusCode}, Response: {responseContent}");
return false;
}
}
}
catch (Exception ex)
{
logger.LogError(ex, "An error occurred while importing the package.");
return false;
}
}
private static string GenerateSasUrl(BlobClient blobClient)
{
var sasBuilder = new BlobSasBuilder
{
BlobContainerName = blobClient.BlobContainerName,
BlobName = blobClient.Name,
Resource = "b",
ExpiresOn = DateTimeOffset.UtcNow.AddHours(1) // Set the expiration time as needed
};
sasBuilder.SetPermissions(BlobSasPermissions.Read);
var sasUri = blobClient.GenerateSasUri(sasBuilder);
return sasUri.ToString();
}
private static async Task<string> GetOAuthTokenAsync(string clientId, string clientSecret, string directory, string resource, ILogger logger)
{
try
{
var url = $"https://login.microsoftonline.com/{directory}/oauth2/token";
var body = new List<KeyValuePair<string, string>>
{
new KeyValuePair<string, string>("grant_type", "client_credentials"),
new KeyValuePair<string, string>("client_id", clientId),
new KeyValuePair<string, string>("client_secret", clientSecret),
new KeyValuePair<string, string>("resource", resource)
};
using (var httpClient = new HttpClient())
{
var content = new FormUrlEncodedContent(body);
var response = await httpClient.PostAsync(url, content);
var responseContent = await response.Content.ReadAsStringAsync();
if (response.IsSuccessStatusCode)
{
var json = JsonDocument.Parse(responseContent);
return json.RootElement.GetProperty("access_token").GetString();
}
else
{
logger.LogError($"Failed to obtain OAuth token. Status code: {response.StatusCode}, Response: {responseContent}");
return null;
}
}
}
catch (Exception ex)
{
logger.LogError(ex, "An error occurred while obtaining the OAuth token.");
return null;
}
}
}
Drop your file into your container and wait for the function to process it:
With a different file for Vendor groups:
This is by no means a substitute for the standard functionality, Nice to know it can be done though....