Azure Integration: Generate a Flat file with ANSI encoding using Logic App

Handling non UTF8 encoding in Logic App

Recently there was a requirement to generate a flat file in Azure Logic App and deliver to Azure File Share with ANSI encoding as targeting application could only process ANSI encoding file. Much of cloud services assume that a text payload will be some form of UTF (Unicode) encoding. Azure Logic App Assumes it is UTF-8. Such that when your text payload is in a different encoding, such as a page code based encoding, the non-basic Latin characters gets mangled. This is particularly common with Flat Files because they integrate with ancient systems that often were not written with Unicode support.

My approach to solving the problem was to create an Azure Function App that converts the encoding from UTF-8 to windows-1252 (or to any other encoding) and then stores the file content in Azure File storage.

Azure Integration Logic APP ANSI Encoding using Azure Function App

This seems to be an easy fix but the main problem was that Azure Logic App did not like the output from Azure Function App and threw an exception as shown below

BadRequest. Http request failed as the content was not valid: ‘Unable to translate bytes [E4] at index 83 from specified code page to Unicode.’.

The solution would be to use Base 64 encoding. Base 64 encoding ensures that none of the services in Azure integration going to assume a UTF encoding. Once you convert any non-UTF flat file such (as windows-1252) to UTF-8, then base 64 decodes it safely and process it with flat-file decode.

Azure Function App to change the encoding

The following azure function app can be used to convert the encoding of the text in base64 encoding

using System;
using System.Net;
using System.Net.Http;
using System.Text;
using System.Threading.Tasks;
using Microsoft.Azure.WebJobs;
using Microsoft.Azure.WebJobs.Host;
using Newtonsoft.Json;

namespace PnJ.FunctionApp.ConvertEncoding
{
    public static class ChangeBase64Encoding
    {
        [FunctionName("ChangeBase64Encoding")]
        public static async Task<object> Run([HttpTrigger(WebHookType = "genericJson")]HttpRequestMessage req, TraceWriter log)
        {
            log.Info($"Change base 64 Encoding function App was triggered");

            Encoding inputEncoding = null;

            string jsonContent = await req.Content.ReadAsStringAsync();
            dynamic data = JsonConvert.DeserializeObject(jsonContent);

            if (data == null || data.text == null || data.encodingInput == null || data.encodingOutput == null)
            {
                return req.CreateResponse(HttpStatusCode.BadRequest, new
                {
                    error = "Please pass text/encodingOutput properties in the input Json object."
                });
            }

            try
            {
                string encodingInput = data.encodingInput;
                inputEncoding = Encoding.GetEncoding(name: encodingInput);
            }
            catch (ArgumentException)
            {
                return req.CreateResponse(HttpStatusCode.BadRequest, new
                {
                    error = "Input char set value '" + data.encodingInput + "' is not supported. Supported value are listed at https://msdn.microsoft.com/en-us/library/system.text.encoding(v=vs.110).aspx."
                });
            }

            Encoding encodingOutput;
            try
            {
                string outputEncoding = data.encodingOutput;
                encodingOutput = Encoding.GetEncoding(outputEncoding);
            }
            catch (ArgumentException)
            {
                return req.CreateResponse(HttpStatusCode.BadRequest, new
                {
                    error = "Output char set value '" + data.encodingOutput + "' is not supported. Supported value are listed at https://msdn.microsoft.com/en-us/library/system.text.encoding(v=vs.110).aspx."
                });
            }

            string input = data.text;
            var outputBytes = Encoding.Convert(srcEncoding: inputEncoding, dstEncoding: encodingOutput, bytes: Convert.FromBase64String(input));

            var response = req.CreateResponse(HttpStatusCode.OK);
            response.Content = new StringContent(content: JsonConvert.SerializeObject(new
            {
                text = Convert.ToBase64String(outputBytes)
            }).ToString(), encoding: encodingOutput, mediaType: "application/json");

            return response;
        }
    }
}

The function app receives the following input and encodes to the desired format, sends it back.

{
  "encodingInput": "utf-8",
  "encodingOutput": "windows-1252",
  "text": "U0hQMDAwMDExMzZ8VHN1YmFraSBFdXJvcGUgQi5WLnxBdmVudHVyaWpufDMzMTYgTEJ8RG9yZHJlY2h0fE5MfDE4NDMwOHxSdXRoZW5iZXJnIExhbmR0ZWNobmlrfENhcmwtQm9yZ3dhcmQtU3RyIDF8R8O8dGVyc2xvaHwzMzMzNXxERXwyMDIwMDQyOXw0OE58U3xCb3ggM3xDaGFpbiBhbmQgcGFydHN8M3wwLjAyfA=="
}

If the above approach doesn’t work, then please use the following approach, which gave me the desired results.

using System;
using System.IO;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Text;
using System.Threading.Tasks;
using Microsoft.Azure.WebJobs;
using Microsoft.Azure.WebJobs.Extensions.Http;
using Microsoft.Azure.WebJobs.Host;
using Newtonsoft.Json;

namespace PnJ.FunctionApp.ConvertEncoding
{
    public static class ChangeEncoding
    {
        [FunctionName("ChangeEncoding")]
        public static async Task<HttpResponseMessage> Run([HttpTrigger(AuthorizationLevel.Function, "get", "post", Route = null)]HttpRequestMessage req, TraceWriter log)
        {
            log.Info($"Change Encoding function App was triggered was triggered!");

            Encoding inputEncoding = null;

            string jsonContent = await req.Content.ReadAsStringAsync();
            dynamic data = JsonConvert.DeserializeObject(jsonContent);

            if (data == null || data.text == null || data.encodingInput == null || data.encodingOutput == null)
            {
                return req.CreateResponse(HttpStatusCode.BadRequest, new
                {
                    error = "Please pass text/encodingOutput properties in the input Json object."
                });
            }
            try
            {
                string encodingInput = data.encodingInput;
                inputEncoding = Encoding.GetEncoding(name: encodingInput);
            }
            catch (ArgumentException)
            {
                return req.CreateResponse(HttpStatusCode.BadRequest, new
                {
                    error = "Input char set value '" + data.encodingInput + "' is not supported. Supported value are listed at https://msdn.microsoft.com/en-us/library/system.text.encoding(v=vs.110).aspx."
                });
            }

            Encoding encodingOutput = null;
            try
            {
                string outputEncoding = data.encodingOutput;
                encodingOutput = Encoding.GetEncoding(outputEncoding);
            }
            catch (ArgumentException)
            {
                return req.CreateResponse(HttpStatusCode.BadRequest, new
                {
                    error = "Output char set value '" + data.encodingOutput + "' is not supported. Supported value are listed at https://msdn.microsoft.com/en-us/library/system.text.encoding(v=vs.110).aspx."
                });
            }

            string input = data.text;
            var outputBytes = Encoding.Convert(srcEncoding: inputEncoding, dstEncoding: encodingOutput,  inputEncoding.GetBytes(input));
            var response = req.CreateResponse(HttpStatusCode.OK);
            MemoryStream ms = new MemoryStream(outputBytes);
            response.Content = new StreamContent(ms);
            response.Content.Headers.ContentType = new MediaTypeHeaderValue("application/octet-stream");
            return response;
        }
    }
}
{
  "encodingInput": "utf-8",
  "encodingOutput": "windows-1252",
  "text": "U0hQMDAwMDExMzZ8VHN1YmFraSBFdXJvcGUgQi5WLnxBdmVudHVyaWpufDMzMTYgTEJ8RG9yZHJlY2h0fE5MfDE4NDMwOHxSdXRoZW5iZXJnIExhbmR0ZWNobmlrfENhcmwtQm9yZ3dhcmQtU3RyIDF8R8O8dGVyc2xvaHwzMzMzNXxERXwyMDIwMDQyOXw0OE58U3xCb3ggM3xDaGFpbiBhbmQgcGFydHN8M3wwLjAyfA=="
}

I created following Azure to Logic App to test the out come. The outcome from the second approach gave me better results.

Dynamics 365 UO Integration Design Patterns

Dynamics 365 for Unified Operations has evolved into purpose-built applications to help you manage business functions. This would mean that there would be integration with diverse systems. The blog describes integration patterns, integration scenarios, and best practices. There is a number of ways users can interact with the D365 UO. There are different ways to populate data to Dynamics 365 UO and retrieve data from D365 UO. In my personal opinion, the integration option can be decided based on the following three criteria

  • Retrieve data from D365 UO or Populate data to D365 UO
  • Real-Time interaction with D365UO or Batch processing of Data
  • Amount of Data which needs to be exchanged (Data volume)
Real-Time/Near Real-Time (Small Data volume)Batch Job (Large Data Volume)
Retrieve Data from D365UOoData
Custom Web service
Business Events
Business Alert

Data Management Framework
Recurring Integration
Populate Data to D365UOoData
Custom Web service
Data Management Framework
Recurring Integration
Dynamics 365 UO Integration Patterns
Dynamics 365 UO Real-Time Integration Options
Dynamics 365 UO Integration Design Patterns
Dynamics 365 UO Bulk/Batch Processing
Dynamics 365 UO Integration Design Patterns

Dynamics 365 UO oData and REST

Dynamics 365 UO provides a REST API to interact with D365UO via Data Entities. The REST API provides a mechanism to interact in real-time or near real-time way to interact with the D365 UO. oData can be used to populate, retrieve, update, and delete (CRUD) data in Dynamics 365 UO.
oData: Open Data Protocol (OData) is a standard protocol for consuming data exposed by Dynamics 365 for Operations. OData is a new Representational State Transfer (REST) based protocol for CRUD operations – C-Create, R-Read, U-Update, and D-Delete – that allows for integrating with Dynamics 365 for Operations. It is applied to all types of web technologies, such as HTTP and JavaScript Object Notation (JSON).
Data Entity: A data entity in D365 is an abstraction from the physical implementation of database tables. A data entity is a simplified de-normalized representation of underlying tables. A data entity represents a common data concept or functionality, (e.g. Vendors V2 where the details are stored in normalized relational tables) but all details are represented in one flat view in Vendor Details data entity.
The data flow for interacting with Dynamics 365 UO using oData:

Dynamics 365 UO Integration Design Patterns: oData REST API

The Technical implementation of oData with Dynamics 365 UO can be found here

Dynamics 365 UO Business Event

The Dynamics 365 UO Business Events can send events/trigger/notification to external applications such as Azure Integrations, which can use this trigger to handle specific integration or business process scenarios.
The Events existed in Finance and Operations were previously confined to use within Finance and Operations. The new capability provides a framework that will allow business processes in Finance and Operations to capture business events as business processes are executed and send the events to an external system or application.
More about the business event can be found here

Business events provide a perfect integration scenario when an event occurs in D365FO and requires this information to be passed on to ThirdParty systems.

These business events can be used by

  • Azure Service Bus
  • Azure Logic Apps
  • Microsoft Flow
  • Azure Functions
  • HTTPS Trigger

Since these events happen in the context of business processes, they are called business events that enable business process integration. External business processes will subscribe to specific business events from Finance and Operations to get notified when they occur. The business events can also be consumed as “triggers” in the Finance and Operations connector.
A custom or OOTB business event can trigger Azure Integration Services to process or forward the trigger to Third-party applications.

Dynamics 365 UO Integration Design Patterns: Business Events

Dynamics 365 UO Custom webservice

In Microsoft Dynamics UO, a developer can create custom services to expose X++ functionality to external clients. Any existing X++ code can be exposed as a custom service by adding an attribute. D365 UO provides standard attributes that can be set on the data contract class and its members to automatically serialize and de-serialize data that is sent and received across a network connection. Many predefined types, such as collections and tables, are also supported. When a developer writes a custom service under a service group, the service group is always deployed on two endpoints:

  • SOAP endpoint
  • JSON endpoint

SOAP-based custom service

SOAP-based services remain the same as they were in Dynamics AX 2012.
Key changes

  • All the service groups under the AOTService group node are automatically deployed.
  • All services that must be deployed must be part of a service group.

Example endpoint for a dev environment

https://XXXX.dynamics.com/soap/services/DevServices?wsdl

JSON-based custom service

This feature enables X++ classes to be consumed as JSON services. In other words, the return data set is in JSON format. JSON, which stands for JavaScript Object Notation, is a compact, lightweight format that is commonly used to communicate data between the client and the server.

The JSON Endpoint is https://host_uri/api/services/service_group_name/service_group_service_name/operation_name.

Bulk or Batch Data Processing

Data Management Framework

Data Management Framework: DMF is the new all-in-one concept introduced by Microsoft in Dynamics 365 for Finance and Operations. It supports and manages all core data management related tasks. This enables asynchronous and high-performing data insertion and extraction scenarios. Here are some examples: Interactive file-based import/export, Recurring integrations (file, queue, and so on)

Data Package: Data Package is a simple .zip file that contains the source (import) or target data(export) itself . The zip file contains three files. The data file and the manifest files which contain metadata information of the Data Entity and the processing instructions for DMF.

Interacting with Dynamics 365 UO DMF REST API

In order to call the D365 F&O APIs, it is necessary to authenticate with a valid access token. The token can be retrieved from Azure Active Directory using a valid Application Id and secret key, which has access to the D365FO environment. The application ID and secret key are created by registering an application in Azure Active directory. Then the DMF REST API can be invoked.

Dynamics 365 UO Integration Design Patterns: DMF REST API
Interaction using REST API to Export Data

The high level interaction of API calls to retieve the data package via REST API is shown below.

Dynamics 365 UO Integration Design Patterns: DMF

The detailed technical implemetation of Dynamics 365 UO DMF interaction using REST API has been descrbed here

Dynamics 365 UO Recurring Integration

Recurring integration does the following things:

  • It builds on data entities and the Data management framework.
  • It enables the exchange of documents or files between Finance and Operations and any third-party application or service.
  • It supports several document formats, source mapping, Extensible Stylesheet Language Transformations (XSLT), and filters.
  • Document/file exchange in several document formats
  • It uses secure REST application programming interfaces (APIs) and authorization mechanisms to receive data from, and send data back to, integration systems.

The complete flow to import job to recurring integration is shown below

Dynamics 365 UO Integration Design Patterns: Recurring Integration
Recurring Integration using REST API
  1. The third party client applications authenticates to the Azure AD token issuance endpoint and requests an access token.
  2. The Azure AD token issuance endpoint issues the access token.
  3. The access token is used to authenticate to the D365FO DMF and initiate the import or Export Job. The endpoints are:

The following set of APIs is used to exchange data between the Dynamics 365 F&O Recurring Integrations client and Finance and Operations.

The detailed Technical implementation of Recurring integration can be found here

Microsoft integration patterns can be found here

Azure Integration: Setting null value in Logic App Action to Dynamics 365

In one of my azure integration involving Dynamics 365, I had to send a null value when a field value is empty. The Dynamics 365 Actions for Update or Create a Record Action in the Logic App was always sending empty string (i.e. “”) instead of null value, which resulted in the integration failure. The sending null value fixed the integration, which was not possible to do via the Designer.

The expression was sending out null, but logic app converted to “”.

@{if(equals(items(‘XXX’)?[‘Type’], null), null,replace(items(‘XXX’)?[‘FieldName’], ‘;’, ‘,’))}

The expression for the field value resulted in the following error “Edm Object passed should have the options selected

{
  "status": 400,
  "message": "--batchresponse_94b2278b-f1fd-4f65-94c9-c3640fba018b\r\nContent-Type: application/http\r\nContent-Transfer-Encoding: binary\r\n\r\nHTTP/1.1 204 No Content\r\nOData-Version: 4.0\r\n\r\n\r\n--batchresponse_94b2278b-f1fd-4f65-94c9-c3640fba018b\r\nContent-Type: application/http\r\nContent-Transfer-Encoding: binary\r\n\r\nHTTP/1.1 400 Bad Request\r\nREQ_ID: 64fce8db-bc4c-4653-92f7-8d976c4da1c7\r\nContent-Type: application/json; odata.metadata=minimal\r\nOData-Version: 4.0\r\n\r\n{\"error\":{\"code\":\"0x0\",\"message\":\"Edm Object passed should have the options selected. \",\"innererror\":{\"message\":\"Edm Object passed should have the options selected. \",\"type\":\"Microsoft.Crm.CrmHttpException\",\"stacktrace\":\"   at Microsoft.Crm.Extensibility.OData.TypeConverters.OptionSetValueCollectionEdmTypeConverter.ConvertToCrmTypeInternal(String edmTypeValue, String operationName)\\r\\n 
  },
  "source": "XXXXXXX.crm4.dynamics.com",
  "errors": [],
  "debugInfo": "clientRequestId: 2e965f81-110b-4f77-964c-057f4651c702"
}

Azure Integration with Dynamics 365: Use case

The use case was to create or update an Address entity in Dynamics 365 based on specific conditions. I used the standard Dynamics 365 action Update or Create a record action. There is a specific field that accepts a specific value or null but nor an empty value.

Azure Integration: Setting null value in Logic App Action to Dynamics 365

Azure Integration with Dynamics 365: Resolution

The Logic App adds curly braces {} in the expression value, the Logic App runtime will convert the null value to an empty string. The trick is to go to the code view of the Logic App designer, find the action and then remove the curly braces {} around the expression:

@{if(equals(items(‘XXX’)?[‘Type’], null), null,replace(items(‘XXX’)?[‘FieldName’], ‘;’, ‘,’))}

to

@if(equals(items(‘XXX’)?[‘Type’], null), null,replace(items(‘XXX’)?[‘FieldName’], ‘;’, ‘,’))

Dynamics 365 UO: Integration using oData/REST API

This blog describes the method to interact with the Dynamics 365 Unified Operation using oData. Dynamics 365 UO provides REST API to interact with Data via Data Entities.

oData: Open Data Protocol (OData) is a standard protocol for consuming data exposed by Dynamics 365 for Operations. OData is a new Representational State Transfer (REST) based protocol for CRUD operations – C-Create, R-Read, U-Update and D-Delete – that allows for integrating with Dynamics 365 for Operations. It is applied to all types of web technologies, such as HTTP and JavaScript Object Notation (JSON).

Data Entity: A data entity in D365 is an abstraction from the physical implementation of database tables. A data entity is a simplified de-normalized representation of underlying tables. A data entity represents a common data concept or functionality, (e.g. Vendors V2  where the details are stored in normalized relational tables) but all details are represented in one flat view in Vendor Details data entity.

The data flow for querying data using oData:

Dynamics 365 UO: Integration using oData/REST API

Dynamics 365UO: oData Features

  1. CRUD operations are handled through HTTP verb support for POST, PATCH, PUT, and DELETE.
  2. The D365 UO for UO supports paging and maximum page size is 1,000.
  3. Available query options are:
  4. Filter Options are: Equals, Not equals, Greater than, Greater than or equal, Less than, Less than or equal, And, Or, Not
  5. D365 FO provides option to query data from Cross-company

URI Conventions for oData in Deatil has been described here

Querying Data Cross-Company

By default, OData returns only data that belongs to the users default company. To query the data from outside the users default company, specify the following keyword ?cross-company=true in the query. This option will return data from all companies that the user has access to.

Example: http://%5BbaseURI%5D/data/PurchaseOrderHeadersV2?cross-company=true

To filter by a particular company that isn’t your default company, use the following syntax:

http://%5BbaseURI%5D/data/PurchaseOrderHeadersV2?$filter=dataAreaId eq 'usrt'&cross-company=true

Azure Active Directory Authentication

In order to call the D365 UO oData EndPoints, it is necessary to authenticate with a valid access token. The token can be retrieved from Azure Active Directory using a valid Application Id and secret key, which has access to the D365FO environment. The application ID and secret key are created by registering an application in Azure Active directory.

Pre-requisite :

  1. Register an application in Azure AD and Grant access to D365FO. The detailed steps are described here. Instead of Dynamics CRM  select Dynamics ERP 
  2. Register the AAD application in D365FO
    • System administration > Setup > Azure Active Directory applications
    • Click “New” -> Enter APP-ID(created as part of the previous step), Meaningful name and User ID (the permission you would like to assign).
Dynamics 365 UO: Integration using oData/REST API
  1. The client application authenticates to the Azure AD token issuance endpoint and requests an access token.
  2. The Azure AD token issuance endpoint issues the access token.
  3. The access token is used to authenticate to the D365FO DMF and initiate DMF Job.
  4. Data from the DMF is returned to the third-party application.
Http Method: POST
Request URL: https://login.microsoftonline.com//oauth2/token 
Parameters : grant_type: client_credentials [Specifies the requested grant type. In a Client Credentials Grant flow, the value must be client_credentials.]
client_id: Registered App ID of the AAD Application 
client_secret: Enter a key of the registered application in AAD.
Resource: Enter the URL of the D365FO Url (e.g. https://dev-d365-fo-ultdeabc5b35da4fe25devaos.cloudax.dynamics.com)

The Resource URL should not have “/” in the end, othwerise you would always get access denied while accessing the target resource

C# Code

//Azure AAD Application settings
//The Tenant URL (use friendlyname or the TenantID
static string aadTenant = "https://login.windows.net/dev.onmicrosoft.com";
//The URL of the resource you would be accessing using the access token.Please ensure / is not there in the end of the URL
static string aadResource = "https://dev-testdevaos.sandbox.ax.dynamics.com";
//APplication ID . Store them securely / Encrypted config file or secure store
static string aadClientAppId = "GUID Of the Azure application";
//Application secret. Store them securely / Encrypted config file or secure store
static string aadClientAppSecret = "Secret of the Azure application"; 
/// Retrieves an authentication header from the service.The authentication header for the Web API call.        private static string GetAuthenticationHeader()
        {
            //using Microsoft.IdentityModel.Clients.ActiveDirectory;
            AuthenticationContext authenticationContext = new AuthenticationContext(aadTenant);
            var creadential = new ClientCredential(aadClientAppId, aadClientAppSecret);
            AuthenticationResult authenticationResult = authenticationContext.AcquireTokenAsync(aadResource, creadential).Result;
            return authenticationResult.AccessToken;
        }

CRUD Operations on Data Entities

The following code shows the example for Creating, Reading, Updating and Deleting a PuchaseOrderHeader entity. More detailed information on oData can be found here

 private static async void CRUDonPurchaseOrderHeader()
        {
            string authHeader = GetAuthenticationHeader();
            HttpClient client = new HttpClient();
            client.BaseAddress = new Uri(aadResource);
            client.DefaultRequestHeaders.Clear();
            client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Bearer", authHeader);

            //Initiate the PurchaseOrderHeader object
            var payload = new PurchaseOrderHeader()
            {
                PurchaseOrderNumber = "001-000234",
                DataAreaId = "001",           
                OrderVendorAccountNumber = "000001",
                DeliveryAddressCountryRegionId = "NL",
                DeliveryAddressDescription = "Business Location",
                AccountingDate = "2019-09-19T12:00:00Z",
                PurchaseOrderName = "JDE Professional",
                RequestedDeliveryDate = "2019-09-19T12:00:00Z",
                ExpectedStoreAvailableSalesDate = "2019-09-19T12:00:00Z",
                ConfirmedDeliveryDate = "2019-09-19T12:00:00Z",
                ExpectedStoreReceiptDate = "2019-09-19T12:00:00Z",
                FixedDueDate = "2019-09-19T12:00:00Z",
                ExpectedCrossDockingDate = "2019-09-19T12:00:00Z"
            };      
            var stringPayload = JsonConvert.SerializeObject(payload);
            var httpContent = new StringContent(stringPayload, Encoding.UTF8, "application/json");
            var result = client.PostAsync("/data/PurchaseOrderHeadersV2", httpContent).Result;
            string resultContent = await result.Content.ReadAsStringAsync();
            JObject joResponse = JObject.Parse(resultContent);

            //Get a Purchase Order
             result = client.GetAsync("/data/PurchaseOrderHeadersV2?$filter=PurchaseOrderNumber eq '001-000234'").Result;
             resultContent = await result.Content.ReadAsStringAsync();
             joResponse = JObject.Parse(resultContent);

            //Update the PurchaseOrderHeader object
            payload = new PurchaseOrderHeader()
            {
                PurchaseOrderNumber = "001-000233",
                DataAreaId = "001",
                OrderVendorAccountNumber = "000001",
                DeliveryAddressCountryRegionId = "NL",
                DeliveryAddressDescription = "Business Location Address changed",
                AccountingDate = "2019-09-19T12:00:00Z",
                PurchaseOrderName = "JDE Professional",
                RequestedDeliveryDate = "2019-09-19T12:00:00Z",
                ExpectedStoreAvailableSalesDate = "2019-09-19T12:00:00Z",
                ConfirmedDeliveryDate = "2019-09-19T12:00:00Z",
                ExpectedStoreReceiptDate = "2019-09-19T12:00:00Z",
                FixedDueDate = "2019-09-19T12:00:00Z",
                ExpectedCrossDockingDate = "2019-09-19T12:00:00Z"
            };
             stringPayload = JsonConvert.SerializeObject(payload);
             httpContent = new StringContent(stringPayload, Encoding.UTF8, "application/json");
             result = client.PatchAsync("/data/PurchaseOrderHeadersV2(dataAreaId='001',PurchaseOrderNumber='001-000234')", httpContent).Result;

            //Delete the PurchaseOrderHeader object          
            result = client.DeleteAsync("/data/PurchaseOrderHeadersV2(dataAreaId='001',PurchaseOrderNumber='001-000234')").Result;

        }

Azure Integration: Tracking, Exception Handling and Monitoring of Azure Logic App

There are times the Azure integration components such as Logic Apps, function Apps, fail due to technical errors, functional errors or data errors. There should be a mechanism to monitor and pro-actively inform the technical owners or functional owners about these errors, to have a successful and reliable integration. This blog provides a mechanism for the following errors:

  1. Monitoring solutions
  2. Handling Functional or Data related Errors
  3. Handling Technical Errors
  4. Tracking the integration flow errors

Azure Integration: Monitoring using Logic App Management solution

Logic Apps Management monitoring solution leverage services in Azure to provide additional insight into the operation of a Logic App execution. This Monitoring solution collects Logic App execution log data and provides queries and views to analyze collected data.

  1. The solution gives a more detailed view and visualization of all your logic apps with the Logic Apps Management Solution on Azure Operations Management Suite (OMS), which includes tracking of custom queries, success and failure graph trends, Power BI report generation and a timeline of your runs, actions, triggers, and failures.
  2. The solution also lets Admins resubmit the Failed Logic App Runs
  3. It enables the technical owner to export all the exceptions to analyze and identify the most common errors. Then Azure Integration owner can come up with remediation action and make the integration more reliable and robust.

The following diagram shows the Logic Management Solution. This shows the summary of the execution of Logic App Runs. It provides an option to see the Logic Apps run for different periods of time such as the last 24 hours, 48 hours, 7 days, 30 days.

Azure Integration: Tracking, Exception Handling and Monitoring  of Azure Logic App

Once you click on the diagram, which takes you to the detailed view of the Runs. It provides options to filter the run based on different options such as status, Logic App Name, Run Id, Execution time. It also provides an option to select Multiple Runs and re-submit the Logic App runs.

Azure Integration: Tracking, Exception Handling and Monitoring  of Azure Logic App

What is Log Analytics Workspace?

Log Analytics Workspace is a service which collects data from a set of different sources. These data are organized into tables created automatically. From that point and forward we can monitor, analyze, visualize and create alerts for that data.

  1. The Agents send telemetry data, Logs from each Data source to Log Analytics service
  2. Log Analytics service collects data and classifies the data into different tables
  3. With several tools, the End User can monitor, analyze, create reports, dashboards, alerts based on the data.

Azure Integration : Setup Logic App Management Solution

The following diagram shows in high level to set up a Logic App Management solution in Azure Portal.

Azure Integration: Tracking, Exception Handling and Monitoring  of Azure Logic App
  1. Create a Log Analytics WorkSpace to store the data.
  2. Connect a Log Analytics workspace to Logic App Azure Subscription to Collect data regarding the Logic Execution Runs.
  3. Add the Diagnostics settings in Logic App to send Logging to Log Analytics works space
  4. After setting up diagnostics, connect the Diagnostic setting to the workspace to send the Logic App execution details
  5. Add the Logic App Management Solution to Log Analytics Workspace. This would collect the data from Azure Logs and visualize the execution Details.

Azure Integration:Handling Functional or Data related Errors

There are times the Azure integration logic fails due to incorrect data on the source or target system. The errors arise due to functional error or data error and it will be ideal to forward these errors to functional leads to correct the data on the source or the target. These errors should be fixed either in the source or target system and re-submit the request again to ensure the successful execution of the integration logic.
The approach is to analyze the most common data errors from the current production system and identify the places where most errors occur. Then implement a centralized exception handling mechanism. The best way to get this information is by using Azure Log Analytics.
All the Logic App Actions which could potentially fail due to incorrect data will have an action to handle the failures. This action would forward the error messages to a central Logic app for handling the exception. The exception handling logic app would log these exceptions to Azure Activity Log for monitoring and optionally would send the mail to corresponding functional heads to correct the data.

Azure Integration: Tracking, Exception Handling and Monitoring  of Azure Logic App

The example of handling an exception while saving information to a third party application is shown below in the diagram. The failure action would call the central exception handling logic app and send the required information.

Azure Integration: Tracking, Exception Handling and Monitoring  of Azure Logic App

The information which needs to be sent can be defined based on the Azure Integration scenarios. I sent the following information, which then can be used to Log in to Log analytics and send emails to Functional or technical owners.
1. Detailed Error Message. This can be retreved using outputs(‘Previous Action Name’)
2. Error Message:This can be retreved using outputs(‘Previous Action Name’)[‘body’]
3. Input Message to the Previous action:This can be retrieved using body(‘Previous Action Name’)
4. Status code of the Previous Execution: outputs(‘Previous Action Name’)[‘StatusCode’]

The exception Handling action has been configured to run only when the previous execution has failed

The Configure Run after option provides the option to run only when the previous action has been failed. The following actions after the execution handling block need to be configured to Run after when the previous action is Successful or is skipped.

Azure Integration: Tracking, Exception Handling and Monitoring  of Azure Logic App

Azure Integration: Technical exception Handling

Once Azure Integration has been deployed, it is necessary to get notifications about failures or other possible problems. The Logic App lets users set up alerts. For example, you can create an alert that detects “when more than five runs fail in an hour”. This alerting capability is limited and needs to be set up per logic. There would need to send out actual error messages, apply some logic and formatting to exceptions. This is not supported by OOTB Alerting functionality. This can be achieved by querying the Azure Log Monitoring in the Logic App and apply the required templating / process logic to send the information. I use the following approach to get notification of the failures of all the Logic Apps and use single Logic APp to handle the exceptions.

Azure Integration: Tracking, Exception Handling and Monitoring  of Azure Logic App

I use the Query Azure Monitoring Log Actions from Logic Apps.

search Category == ‘WorkflowRuntime’ | where status_s == “Failed” | where Level == “Error”

Azure Integration: Tracking, Exception Handling and Monitoring  of Azure Logic App

Tracking Azure Integration Runs

Azure Logic Apps has some additional capabilities beyond the out-of-box capabilities for Tracking custom properties, these are called Tracked Properties. The tracked properties could be a vendor account in Dynamics 365 UO integration and or an account number in Dynamics 365 CE integration, which would help us to follow the end to end integration flow. These additional capabilities are unlocked by enabling diagnostics for a logic app and publishing the data to Log Analytics.
In the Azure Logic App, from the Settings option within each action, there is a Tracked Properties setting where static or dynamic values can be included based upon Logic Apps Workflow Definition Language.

Azure Integration: Tracking, Exception Handling and Monitoring  of Azure Logic App

With a Log Analytics workspace configured, and diagnostics configured within our logic app, we will see diagnostic events emitted to Log Analytics. If the Tracked Properties are configured, then the next time we run our logic app, we will find our custom diagnostics included in our Log Analytics data.

Azure Integration: Tracking, Exception Handling and Monitoring  of Azure Logic App

Dynamics 365 UO: Recurring Integration to avoid DMF parallel execution issue

This Blog describes the limitation of Dynamics 365 UO’s Data Management Framework in Parallel execution and how to get around it using Dynamics 365 UO’s Recurring Integration Module. The blog provides a technical implementation in .NET for queuing and dequeuing of the Jobs using REST API of Dynamics 365 UO’s Recurring Integration Module.

Use Case: Dynamics 365 UO Recurring Integration

Dynamics 365 UO DMF Import and export fails during parallel execution of the Job

Data Management framework works great when a large amount of data should be imported or exported from Dynamics 365 UO. The DMF REST API to import and export doesn’t work when third party applications queue Import and export Jobs in parallel. It results in an unexpected exception from DMF endpoints and failures in the DMF Data Job. The parallel import/export data jobs work for different entities and fail only when the job is for the same entity. E.g. We can import Vendors, Customers and General Journal in Parallel, but we cannot have parallel import of multiple jobs for same the entity.

Dynamics 365 UO Integration Design Patterns: Recurring Integration
DMF Parallel execution issue

The following exceptions were discovered during the import

  • XML is not in correct format and thus 0 General Journal records are inserted in staging.
  • Cannot edit a record in Entities for a processing group (DMFDefinitionGroupEntity).\nThe record has never been selected.
  • Cannot delete a record in Source (DMFDefinitionGroupEntityXMLFields).\nDeadlock, where one or more users have simultaneously locked the whole table or part of it.”,
  • Exception occurred while executing action ImportFromPackage on Entity DataManagementDefinitionGroup: BOX API can’t be used from non-interactive sessions
  • ‘The record already exists’
  • “Cannot create a record in Source (DMFDefinitionGroupEntityXMLFields). Entity: General Journal, ACCOUNTINGDATE.\nDeadlock, where one or more users have simultaneously locked the whole table or part of it.”,​

Resolution : Dynamics 365 UO Recurring Integration

Resolution to the problem is to use the Recurring Integration D365UO module. The recurring integration provides

Queuing mechanism for Data Jobs (import/export)

The module will ensure the sequential execution of the Job.

The module provides opportunity to ordered execution of the Job

Dynamics 365 UO Integration Design Patterns: Recurring Integration
Recurring Integration

Dynamics 365 UO Recurring Integration

Recurring integration does the following things:

  • It builds on data entities and the Data management framework.
  • It enables the exchange of documents or files between Finance and Operations and any third-party application or service.
  • It supports several document formats, source mapping, Extensible Stylesheet Language Transformations (XSLT), and filters.
  • Document/file exchange in several document formats
  • It uses secure REST application programming interfaces (APIs) and authorization mechanisms to receive data from, and send data back to, integration systems.

The complete flow to import job to recurring integration is shown below

Dynamics 365 UO Integration Design Patterns: Recurring Integration
Recurring Integration using REST API
  1. The third party client applications authenticates to the Azure AD token issuance endpoint and requests an access token.
  2. The Azure AD token issuance endpoint issues the access token.
  3. The access token is used to authenticate to the D365FO DMF and initiate the import or Export Job. The endpoints are:

The following set of APIs is used to exchange data between the Dynamics 365 F&O Recurring Integrations client and Finance and Operations.

Dynamics 365 UO Recurring Integration API for Import (enqueue)

Make an HTTP POST call against the following URL.

https://<base URL>/api/connector/enqueue/<activity ID>?entity=<entity name>

In the message body, you can the pass the data as a memory stream.

To get the activity ID, on the Manage scheduled data jobs page, in the ID field, copy the globally unique identifier (GUID).

Dynamics 365 UO Recurring Integration API for Export (dequeue)

To return a data package that contains all the data entities that were defined in the data project, and that the client application can unzip and consume, use the following structure.

https://<base URL>/api/connector/dequeue/<activity ID>

After the client downloads the data, an acknowledgment must be sent back to Finance and Operations, so that you can mark the data as received. In cases when there was no file uploaded to the blob, the dequeue API will return a response indicating as such.

  1. The execution Id of the DMF Job has been returned to the client application, which can be used to monitor the progress of the execution of the Job.

The set up involves following set and the API supports import/export of DMF Data projects

Dynamics 365 UO Integration Design Patterns: Recurring Integration
The Recurring Int set up
Dynamics 365 UO Integration Design Patterns: Recurring Integration

Authorization for the Dynamics 365 UO Recurring Integration API

The integration REST API uses the same OAuth 2.0 authentication model as the other service endpoints. Before the integrating client application can consume this endpoint, you must create an application ID in Microsoft Azure Active Directory (Azure AD) and give it appropriate permission to the application. When you create and enable a recurring job, you’re prompted to enter the Azure AD application ID that will interact with that recurring job. Therefore, be sure to make a note of the application ID.

 internal static class AuthManager
    {      
        static string aadTenant =  "https://login.windows.net/<<TenantName>>";
        internal static string aadResource =  "https://XXXXX.cloudax.dynamics.com";
        static string aadClientAppId = "The client ID";
        static string aadClientAppSecret = "The Client Secret";

        /// <summary>
        /// Retrieves an authentication header from the service.
        /// </summary>
        /// <returns>The authentication header for the Web API call.</returns>
        internal static string GetAuthenticationHeader()
        {
            AuthenticationContext authenticationContext = new AuthenticationContext(aadTenant);
            var creadential = new ClientCredential(aadClientAppId, aadClientAppSecret);
            AuthenticationResult authenticationResult = authenticationContext.AcquireTokenAsync(aadResource, creadential).Result;
            return authenticationResult.AccessToken;
        }
    }

Set up a Dynamics 365 UO data project and Dynamics 365 UO recurring data jobs

Create a data project

  1. On the main dashboard, select the Data management tile to open the Data management workspace.
  2. Select the Import or Export tile to create a new data project.
  3. Enter a valid job name, data source, and entity name.
  4. Upload a data file for one or more entities. Make sure that each entity is added, and that no errors occur.
  5. Select Save.

Create a Dynamics 365 UO recurring data job

  1. On the Data project page, select Create recurring data job.
  2. Enter a valid name and a description for the recurring data job.
  3. On the Set-up authorization policy tab, enter the application ID that was generated for your application, and mark it as enabled.
  4. Expand Advanced options tab and specify either File or Data package.
  5. Select Set processing recurrence, and then, in the Define recurrence dialog box, set up a valid recurrence for your data job
  6. Select OK, and then select Yes in the confirmation message box.

Submitting data to Dynamics 365 UO recurring data jobs

You can use integration REST endpoints to integrate with the client, submit documents (import), or poll available documents for download (export). These endpoints support OAuth.

Queue the Dynamics 365 UO recurring Import Job

Dynamics 365 UO recurring data jobs API for import (enqueue)

Make an HTTP POST call against the following URL and In the message body, you can the pass the data as a memory stream.

https://<base URL>/api/connector/enqueue/<activity ID>?entity=<entity name>

The following code shows the way to queue the import Job to recurring integration. This approach uses data package-based import. Recurring integration supports both Data package import and the file import. The following parameters will be used

  • The D365UO environment
  • The Legal entity Name
  • The ID of the recurring Job which was created in the previous step
  • The entity name which needs to be imported
  • Name or description for the import Job
  public static class RecurringIntegration
    {
        /// <summary>
        /// Post request
        /// </summary>
        /// <param name="uri">Enqueue endpoint URI</param>
        /// <param name="authenticationHeader">Authentication header</param>
        /// <param name="bodyStream">Body stream</param>        
        /// <param name="message">ActivityMessage context</param>
        /// <returns></returns>
        public static async Task<HttpResponseMessage> SendPostRequestAsync(Uri uri, string authenticationHeader, Stream bodyStream, string externalCorrelationHeaderValue = null)
        {
            string externalidentifier = "x-ms-dyn-externalidentifier";
            ServicePointManager.SecurityProtocol = SecurityProtocolType.Tls |
                    SecurityProtocolType.Tls11 |
                    SecurityProtocolType.Tls12;

            using (HttpClientHandler handler = new HttpClientHandler() { UseCookies = false })
            {
                using (HttpClient httpClient = new HttpClient(handler))
                {
                    httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Bearer", authenticationHeader);

                    // Add external correlation id header id specified and valid
                    if (!string.IsNullOrEmpty(externalCorrelationHeaderValue))
                    {
                        httpClient.DefaultRequestHeaders.Add(externalidentifier, externalCorrelationHeaderValue);
                    }

                    if (bodyStream != null)
                    {
                        using (StreamContent content = new StreamContent(bodyStream))
                        {
                            return await httpClient.PostAsync(uri, content);
                        }
                    }
                }
            }

            return new HttpResponseMessage()
            {
                Content = new StringContent("Request failed at client.", Encoding.ASCII),
                StatusCode = System.Net.HttpStatusCode.PreconditionFailed
            };
        }

        /// <summary>
        /// Get the Enqueue URI
        /// </summary>
        /// <returns>Enqueue URI</returns>
        private static Uri GetEnqueueUri(string recurringJobId, string legalEntity, string entityName)
        {
            string enviornmentUrl =  "https://XXXXXXX.cloudax.dynamics.com";
            string enqueueUrl = "/api/connector/enqueue/";           
            //access the Connector API
            UriBuilder enqueueUri = new UriBuilder(enviornmentUrl);
            enqueueUri.Path = enqueueUrl + recurringJobId;
            // Data package        
            string enqueueQuery = "entity=" + entityName;
            if (!string.IsNullOrEmpty(legalEntity))
            {
                enqueueQuery += "&company=" + legalEntity;
            }
            enqueueUri.Query = enqueueQuery;        

            return enqueueUri.Uri;
        }
        public static Stream Read(string fullFilePath)
        {
            if (File.Exists(fullFilePath))
            {
                return new FileStream(fullFilePath,
                            FileMode.Open,
                            FileAccess.Read,
                            FileShare.Read,
                            0x1000,
                            true);
            }
            return null;
        }

        /// <summary>
        // Enqueue the Data package to Recurring integration
        /// </summary>
        /// <returns>Status</returns>
        internal static async void QueueImport()
        {
            Stream stream = Read(@"C:\Temp\GL\General Journal.zip");
            string authHeader = AuthManager.GetAuthenticationHeader();
            Uri enqueueUri =GetEnqueueUri("<<ID of the recurring Job>>", "<<Legal Entity>>", "<<Entity Name>>");
            string jobName = "The name of the Job";
            HttpResponseMessage result = SendPostRequestAsync(enqueueUri, authHeader, stream, jobName).Result;
            string resultContent = await result.Content.ReadAsStringAsync();
            Console.WriteLine("Response is");
            Console.WriteLine(resultContent);
        }
    }

Dynamics 365 FO Integration using Business Events

This blog describes a method to use Azure Integration with Dynamics 365 FO Business Events. The Dynamics 365 FO Business Events can send events/trigger/notification to external applications such as Azure Integrations, which can use this trigger to handle specific integration or business process scenarios.

The Events existed in Finance and Operations were previously confined to use within Finance and Operations. The new capability provides a framework that will allow business processes in Finance and Operations to capture business events as business processes are executed and send the events to an external system or application.

More about business event can be found here

Business events provides a perfect integration scenario when an event occurs in D365FO and requires this information to be passed on to ThirdParty systems.

These business event can be used by

  • Azure Service Bus
  • Azure Logic Apps
  • Microsoft Flow
  • Azure Functions
  • HTTPS Trigger

Since these events happen in the context of business processes, they are called business events that enable business process integration.
External business processes will subscribe to specific business events from Finance and Operations to get notified when they occur. The business events can also be consumed as “triggers” in the Finance and Operations connector.

A Dynamics 365 FO Integration usecase scenario

Use case: Trigger a Third party application when a Vendor Record is created

In high level what i am trying to achieve is shown below. A custom business event will be created to trigger logic app to forward the trigger to Third party application.

Dynamics 365 FO Integration Design Pattern: Business Events

Creating a custom Dynamics 365 FO business event

In this demo I will create a new business event from scratch to show the steps involved in creating and consuming business event via Logic App. As a trigger data source, I will use Vendor Table (VendTable) as source of the business event. To create a custom business event, we would need following three artifacts.

  • BusinessEventsContract class
  • BusinessEventsBase class 
  • A Trigger Class to send the business Event

More information on creating business event can be found here

BusinessEventContract Class

BusinessEventsContract Class creates the business data contract class. A business event contract class extends the BusinessEventsContract class. It defines and populates the payload of the business event. The process of implementing a business event contract involves extending the BusinessEventContract class, defining internal state, implementing an initialization method, implementing a static constructor method, and implementing parm methods to access the contract state.

/// <summary>
/// The data contract for the <c>VendorCreatedBusinessEvent</c>,business events.
/// </summary>
[DataContract]
public  class DevVendorCreatedBusinessEventContract extends BusinessEventsContract
{   
    private VendAccount vendAccount;
    /// <summary>
    /// Initializes the field values.
    /// </summary>
    private void initialize(VendTable _vendTable)
    {
        vendAccount = _vendTable.AccountNum;
      
    }
    /// <summary>
    /// Creates a <c>VendorCreatedBusinessEventContract</c> from a <c>VendTable</c> record.
    /// </summary>
    /// <param name = "_VendTable">A <c>VendTable</c> record.</param>
    /// <returns>A <c>VendorCreatedBusinessEventContract</c>.</returns>
    public static DevVendorCreatedBusinessEventContract newFromVendTable(VendTable _vendTable)
    {
        var contract =  DevVendorCreatedBusinessEventContract::construct();
        contract.initialize(_vendTable);
        contract.parmVendAccount(_vendTable.AccountNum);
        return contract;
    }

    [DataMember('AccountNumber'), BusinessEventsDataMember("@Dev:AccountNumber")]
    public VendAccount parmVendAccount(VendAccount _vendAccount = vendAccount)
    {
        vendAccount = _vendAccount;

        return vendAccount;
    }
   
    private void new()
    {
    }

    public static DevVendorCreatedBusinessEventContract construct()
    {
        DevVendorCreatedBusinessEventContract retVal = new DevVendorCreatedBusinessEventContract();
        return retVal;
    }    
}
BusinessEventsBase extension

The process of implementing an extension of the BusinessEventsBase class involves extending the BusinessEventsBase class, and implementing a static constructor method, a private new method, methods to maintain internal state, and the buildContract method.

[BusinessEvents(classStr(DevVendorCreatedBusinessEventContract),
"Dev:VendorCreatedEvent","Dev:VendorCreatedEventDescription",ModuleAxapta::Vendor)]
public final class DevVendorCreatedBusinessEvent extends BusinessEventsBase
{
    private VendTable vendTable;
    private VendTable parmVendTable(VendTable _vendTable = vendTable)
    {
        vendTable = _vendTable;
        return vendTable;
    }

    private void new()
    {
        super();
    }

    public static DevVendorCreatedBusinessEvent construct()
    {
        DevVendorCreatedBusinessEvent retVal = new DevVendorCreatedBusinessEvent();
        return retVal;
    }

    [Wrappable(true), Replaceable(true)]
    public BusinessEventsContract buildContract()
    {
        return DevVendorCreatedBusinessEventContract::newFromVendTable(vendTable);
    }

    static public DevVendorCreatedBusinessEvent newFromVendTable(VendTable _vendTable)
    {
        DevVendorCreatedBusinessEvent businessEvent =  DevVendorCreatedBusinessEvent::construct();
        businessEvent.parmVendTable(_vendTable);
        return businessEvent;
    }
    
}
Sending/Triggering a Dynamics 365 FO business event

The trigger class is responsible for triggering the business event. In my use case, i would like to trigger the business event after the creating on Vendor record in the vendTable. So I will be extending the “VendTable_onInserted” method to send the business event.

public static class DevVendorCreatedBusinessEventTrigger_Extension
{
    
    /// <summary>
    ///Send the business event on vendor record creation.
    /// </summary>
    /// <param name="sender">Vendor Table</param>
    /// <param name="e"></param>
    [DataEventHandler(tableStr(VendTable), DataEventType::Inserted)]
    public static void VendTable_onInserted(Common sender, DataEventArgs e)
    {
      
        VendTable vendTable = sender;
        DevVendorCreatedBusinessEvent businessEvent = DevVendorCreatedBusinessEvent::newFromVendTable(vendTable);
        if(businessEvent)
        {
            businessEvent.send();
        }
    }
}

Activate the custom Dynamics 365 FO business event

The Business Event catalog doesnt get automatically refreshed. To refresh the Business Event catalog, go to
System Administration -> Business Event Catalog -> Manage -> Rebuild business event catalog
Once rebuild is complete, the new business event would be added to the list .

Dynamics 365 FO Integration Design Pattern: Business Events

Activate the Business Event and assign the end point to the busiess event. Once it is activated, the business event should appear in the “Active events” tab.

Consume the Dynamics 365 FO business event in Logic App

The newly created Business event would appear as the Trigger in Logic App under D365 Fin and Ops Module.

Dynamics 365 FO Integration Design Pattern: Business Events

Then the Logic app can be used to get the information about the vendor and forward it to the third party applications.

Dynamics 365 FO Integration Design Pattern: Business Events

    Azure Integration Release Management best practices

    This blog describe the best practises and guidelines for using Azure DevOps for Azure Integration development and release management.

    Introduction

    This blog describe the best practises and guidelines for using Azure DevOps for Azure Integration development and release management.

    Figure 1: High Level view of AIS Release management

    Azure DevOps / Release management

    Branches

    For releasing software to other environments, three different branches are used.

    Figure 2: Branching strategy for AIS

    Dev: The dev branch is used by developers to check-in all pending changes in Azure integration solutions.

    Main: The Main branch is used by the release manager to merge all changes from the Dev-branch once these changes are tested and approved by the Tester. The artifacts from this branch will always be used to deploy the solution to Test, Acceptance and Production environment.

    Release: The release branch is created by the release manager once Azure integration solutions for the current iteration are approved. The latest version of the release-branches can be used to apply hotfixes on the current release.

    Check-in Policy in DevOps: Every check-in (Change set) must be linked to a related Task or Issue in the DevOps

    Naming convention for Release Branch

    Release Branches should be named with the combination Business Release Number and date of the release in the format of YYYYMMDD

    Guidelines to Merge the Code

    The following merging guidelines should be followed during code merge

    Manual Merge of code changes from Release to Dev

    The merging of HotFix changes is merged manually. The HotFix engineer should communicate the fixed to the DevLead of the project. The change information should be clearly communicated with information such as : The actual change, DevOps issue ID, the changeset number. The Hotfix engineer should ensure that he/she receives confirmation from DevLead that the change has been merged. The DevLead of the project should check-in the change with the correct issue id from the DevOps.

     Manual Merge of code changes from Dev to Main

    The merging of Dev changes is merged to Main. The Release manager of the project should merge the changes and link all the changesets and DevOps Tasks/Issues, which are part of the merge. This would help to create a Technical release document with all the features/issues  which are included in the release.

    Build pipelines

    Two different build pipelines are available:

    Build-Main: Manual build for Main-branch.

    Release Build: Manual build for Release-branch.

    Release pipelines

    Two different Release pipelines are available:

    Release-Main: Release pipeline for Main-branch.

    Release Hotfix: Release pipeline for Release(Hotfix)-branch.

    Resource Group management

    Resource group management. The following would be the recommendation for resource group management for Azure integration.

    Azure Integration: ARM visualiser on Azure portal

    Azure Resource Manager visualization is part of Azure Portal now. It provides a visual way of visualizing Azure Resource Manager Templates. It helps to understand the components in the ARM template, how they interact and components used in the template. It provides a way to understand the components in a resource group.

    Azure Integration: ARM Visualizer

    1. Log in to Release candidate of Azure Portal https://rc.portal.azure.com/
    2. Select the correct subscription
    3. Go to one of the resource groups you want to visualize
    4. Click on the Export Template under the setting Tab

    5. Then click on the Visualizer Template Tab to visualize the components.

    6. It also provides option to filter the components and shows the with or without the Labels.

    Azure: Enumerating all the Logic App runs history using PowerShell and REST API

    What Are Logic Apps?

    Logic Apps are a piece of integration workflow hosted on Azure which is used to create scale-able integrations between various systems. These are very easy to design and provide connectivity between various disparate systems using many out of the box connectors as well as with the facility to design custom connectors for specific purposes. This makes integration easier than ever as the design aspect of the earlier complex integrations is made easy with minimum steps required to get a workflow in place and get it running.

    Problem Scenario

    Whenever Logic Apps are Hosted onto the Microsoft Azure Platform for integrating various business flows, it becomes imperative that the Logic App run is monitored on a daily basis to check if there were any errors and resubmit the execution of the Logic Apps. There will be scenarios where Azure Administrators should be able to re-submit all the failed runs of the logic App. The problem with the Azure Powershell Get-AzureRmLogicAppRunHistory is that it returns only the latest 30 items.

    The following Azure Powersehll command ony returns latest 30 runs
    Get-AzureRmLogicAppRunHistory -ResourceGroupName $grpName -Name $logicApp.Name

    To enumerate for all the Logic App Runs, REST API should be used to get all the runs using the paging mechanism. The REST can be called from PowerShell using
    Invoke-RestMethod

    REST APIs allow users to interact with various services over HTTP/HTTPS and follow a common methodology of using methods to read and manipulate information. REST return information in a standard way, typically through JavaScript Object Notation (JSON). The Invoke-RestMethod cmdlet is built with REST in mind. It allows the user to invoke various methods for web service APIs and easily parse the output.

    The Invoke-RestMethod cmdlet sends HTTP and HTTPS requests to Representational State Transfer (REST) web services that return richly structured data. PowerShell formats the response based on the data type. For JavaScript Object Notation (JSON) or XML, PowerShell converts (or deserializes) the content into objects.

    Get Logic App Runs using REST

    Azure exposes REST API to list the workflow runs.

    https://management.azure.com/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Logic/workflows/{workflowName}/runs/{runName}?api-version=2016-06-01

    The API endpoints are protected using oAuth2.0. The endpoints can be accessed using the Bearer token (Access Token). The Bearer authentication (also called token authentication) is an HTTP authentication scheme that involves security tokens called bearer(access) tokens. The client must send the access token in the Authorization header when making requests to protected resources:
    Authorization: Bearer <token>
    The access Token from the interactive login can be accessed using PowerShell as described below. The Access Token can also be retrieved using a registered appliaction credentials in Azure AD.

    Connect-AzureRmAccount
    Get-AzureRmContext
    $subscription = Get-AzureRmSubscription -SubscriptionName $subscriptionName
    $context = $subscription | Set-AzureRmContext
    $tokens = $context.TokenCache.ReadItems() | Where-Object { $_.TenantId -eq $context.Subscription.TenantId } | Sort-Object -Property ExpiresOn -Descending
    $token = $tokens[0].AccessToken

    The Authentication header for Invoke-RestMethod can be passed as described below.

     $headers = @{
        'Authorization' = 'Bearer ' + $token
      }
    Invoke-RestMethod -Method 'POST' -Uri $uri -Headers $headers

    The following code describes the method to get all the Logic App Runs and re-submit the failed one. The Response contains the nextLink property to get the link for the next Paging items.

    function Get-LogicAppHistory {
      param
      (
        [Parameter(Mandatory = $true)]
        $Token,
        [Parameter(Mandatory = $true)]
        $subscriptionId,
        [Parameter(Mandatory = $true)]
        $resourceGroupName,
        [Parameter(Mandatory = $true)]
        $logicAppName,
        [Parameter(Mandatory = $false)]
        $status,
        [Parameter(Mandatory = $true)]
        $startDateTime,
        [Parameter(Mandatory = $false)]
        $endDateTime
      )
      $headers = @{
        'Authorization' = 'Bearer ' + $token
      }
      $uri = 'https://management.azure.com/subscriptions/{0}/resourceGroups/{1}/providers/Microsoft.Logic/workflows/{2}/runs?api-version=2016-06-01' -f $subscriptionId,$resourceGroupName,$logicAppName
      $method = (Invoke-RestMethod -Uri $uri -Headers $headers -Method Get) 
      $output = $method.value
      foreach ($item in $output) {
        if ((($item.properties.status -eq $status) -and ($item.properties.startTime -ge $startDateTime)) -and ($item.properties.startTime -le  $endDateTime ))
        {
          $uri = 'https://management.azure.com/subscriptions/{0}/resourceGroups/{1}/providers/Microsoft.Logic/workflows/{2}/triggers/{3}/histories/{4}/resubmit?api-version=2016-06-01' -f $subscriptionId,$resourceGroupName,$logicAppName,$item.properties.Trigger.Name,$item.Name
          Write-Host "Submitting" $uri
          Invoke-RestMethod -Method 'POST' -Uri $uri -Headers $headers
        }
      }
      while ($method.nextLink)
      {
        $nextLink = $method.nextLink; 
        Write-Host $nextLink
        $method = (Invoke-RestMethod -Uri $nextLink -Headers $headers -Method Get)
        $output = $method.value
        foreach ($item in $output) {
          if (($item.properties.status -eq $status) -and ([DateTime]$item.properties.startTime -ge $startDateTime) -and ([DateTime]$item.properties.startTime -le $endDateTime))
          {
            $uri = 'https://management.azure.com/subscriptions/{0}/resourceGroups/{1}/providers/Microsoft.Logic/workflows/{2}/triggers/{3}/histories/{4}/resubmit?api-version=2016-06-01' -f $subscriptionId,$resourceGroupName,$logicAppName,$item.properties.Trigger.Name,$item.Name
            Write-Host "Submitting" $uri
            Invoke-RestMethod -Method 'POST' -Uri $uri -Headers $headers
          }
        }
      }
    }

    The complete Code

    The complete code is below.

    function Get-LogicAppHistory {
      param
      (
        [Parameter(Mandatory = $true)]
        $Token,
        [Parameter(Mandatory = $true)]
        $subscriptionId,
        [Parameter(Mandatory = $true)]
        $resourceGroupName,
        [Parameter(Mandatory = $true)]
        $logicAppName,
        [Parameter(Mandatory = $false)]
        $status,
        [Parameter(Mandatory = $true)]
        $startDateTime,
        [Parameter(Mandatory = $false)]
        $endDateTime
      )
      $headers = @{
        'Authorization' = 'Bearer ' + $token
      }
      $uri = 'https://management.azure.com/subscriptions/{0}/resourceGroups/{1}/providers/Microsoft.Logic/workflows/{2}/runs?api-version=2016-06-01' -f $subscriptionId,$resourceGroupName,$logicAppName
      $method = (Invoke-RestMethod -Uri $uri -Headers $headers -Method Get) 
      $output = $method.value
      foreach ($item in $output) {
        if ((($item.properties.status -eq $status) -and ($item.properties.startTime -ge $startDateTime)) -and ($item.properties.startTime -le  $endDateTime ))
        {
          $uri = 'https://management.azure.com/subscriptions/{0}/resourceGroups/{1}/providers/Microsoft.Logic/workflows/{2}/triggers/{3}/histories/{4}/resubmit?api-version=2016-06-01' -f $subscriptionId,$resourceGroupName,$logicAppName,$item.properties.Trigger.Name,$item.Name
          Write-Host "Submitting" $uri
          Invoke-RestMethod -Method 'POST' -Uri $uri -Headers $headers
        }
      }
      while ($method.nextLink)
      {
        $nextLink = $method.nextLink; 
        Write-Host $nextLink
        $method = (Invoke-RestMethod -Uri $nextLink -Headers $headers -Method Get)
        $output = $method.value
        foreach ($item in $output) {
          if (($item.properties.status -eq $status) -and ([DateTime]$item.properties.startTime -ge $startDateTime) -and ([DateTime]$item.properties.startTime -le $endDateTime))
          {
            $uri = 'https://management.azure.com/subscriptions/{0}/resourceGroups/{1}/providers/Microsoft.Logic/workflows/{2}/triggers/{3}/histories/{4}/resubmit?api-version=2016-06-01' -f $subscriptionId,$resourceGroupName,$logicAppName,$item.properties.Trigger.Name,$item.Name
            Write-Host "Submitting" $uri
            Invoke-RestMethod -Method 'POST' -Uri $uri -Headers $headers
          }
        }
      }
    }
    function ResubmitFailedLogicApp {
      param(
        [Parameter(Mandatory = $true)]
        [string]$subscriptionName,
        [Parameter(Mandatory = $true)]
        [string]$resourceGroupName,
        [Parameter(Mandatory = $true)]
        [string]$logicAppName,
        [Parameter(Mandatory = $true)]
        [string]$status
      )
      $currentAzureContext = Get-AzureRmContext
      if (!$currentAzureContext)
      {
        Connect-AzureRmAccount
        $currentAzureContext = Get-AzureRmContext
      }
      $startDateTime = Get-Date -Date '2019-10-14'
      $endDateTime = Get-Date -Date '2019-10-23'
      $subscription = Get-AzureRmSubscription -SubscriptionName $subscriptionName
      $context = $subscription | Set-AzureRmContext
      $tokens = $context.TokenCache.ReadItems() | Where-Object { $_.TenantId -eq $context.Subscription.TenantId } | Sort-Object -Property ExpiresOn -Descending
      $token = $tokens[0].AccessToken
      $subscriptionId = $subscription.Id;
      Write-Host $subscriptionId
      Get-LogicAppHistory -Token $token -SubscriptionId $subscriptionId -resourceGroupName $resourceGroupName -logicAppName $logicAppName -Status $status -startDateTime $startDateTime -endDateTime $endDateTime
    }
    Write-Host "#######  Example  #######"
    Write-Host "ResubmitFailedLogicApp -subscriptionName 'New ENT Subscription' -resourceGroupName 'resourceName' -logicAppName 'LogicAppName' -status 'Failed'"
    Write-Host "#######  Example  #######"
    ResubmitFailedLogicApp

    View Post