Wednesday, July 5, 2023

Code to read CSV files from blob to D365 using X++

 My requirement is when payment vendor journal is posted, the file is being uploaded to Azure blob storage like in previous post.

When user modifies the file data from blob. It should also update in D365.

When journal is created and posted, the values are not allocated to DaxPaymentStatus,DaxRemarks,DaxUTRNumber fields.

After the file uploaded in blob storage. Manually i gave values to these fields in blob.

By using Batch Job i need to update the particular journal in D365.

Code :

using Microsoft.Azure;

using Microsoft.WindowsAzure.Storage;

using Microsoft.WindowsAzure.Storage.Blob;

using Microsoft.WindowsAzure.Storage.File;

internal final class DaxSysOperationServiceClass extends SysOperationServiceBase

{

    CloudStorageAccount                                 storageAccount;

    public void blobAccess(PersonnelIntegrationStorageAccountConnectionString   azureStorageKey)

    {

        storageAccount  = Microsoft.WindowsAzure.Storage.CloudStorageAccount::Parse(azureStorageKey);

    }

    public void getFileFromBlob(str containerName,str dirPath)

    {

        System.IO.MemoryStream  memoryStream;

        CloudBlobClient         cloudBlobClient;

        CloudBlobContainer      cloudBlobContainer;

        CloudBlobDirectory      cbDir;

        container               fileNameCon, memoryStreamCon;

        if (StorageAccount)

        {

            cloudBlobClient     = StorageAccount.CreateCloudBlobClient();

            cloudBlobContainer  = cloudBlobClient.GetContainerReference(containerName);

            cbDir = cloudBlobContainer.GetDirectoryReference(dirPath);

            System.Collections.IEnumerable lstbolbEnumarable = cbDir.ListBlobs(false,0,null,null);

            System.Collections.IEnumerator lstbolbEnumarator = lstbolbEnumarable.GetEnumerator();

            while(lstbolbEnumarator.MoveNext())

            {

                IListBlobItem item = lstbolbEnumarator.Current;

                if(item is CloudBlockBlob)

                {

                    CloudBlockBlob blob = item;

                     container   filecon = str2con(blob.Name, '/');

                    str         filName = conPeek(filecon, conLen(filecon));

                    memoryStream = new System.IO.MemoryStream();

                    blob.DownloadToStream(memoryStream, null, null, null);

                     this.csvFileRead( memoryStream);

                }

            }

        }

    }

    public void csvFileRead( System.IO.Stream memoryStream)

    {

    LedgerJournalTrans      ledgerJournalTrans;

    #define.delimiterField(',');

    CommaTextStreamIo       textStreamIo;

    counter                 counter;

    container               rec;

    str                     journalNum,Voucher;

    textStreamIo = CommaTextStreamIo::constructForRead(memoryStream);

    textStreamIo.inFieldDelimiter(#delimiterField);

    textStreamIo.inRecordDelimiter('\r\n');

    counter = 0;

     rec = textStreamIo.read();

    while (!textStreamIo.status())

    {

        rec = textStreamIo.read();

        journalNum = conPeek(rec, 1);

        Voucher = conPeek(rec, 2);

        if (rec)

        {

            select forupdate ledgerJournalTrans

                where ledgerJournalTrans.journalnum == journalNum

                && ledgerJournalTrans.Voucher == Voucher;

                ttsbegin;

                ledgerJournalTrans.DaxPaymentStatus =  conPeek(rec, 7);

                ledgerJournalTrans.DaxRemarks      =  conPeek(rec, 9);

                ledgerJournalTrans.DaxUTRNumber    =   conPeek(rec, 8);

                ledgerJournalTrans.update();

                ttscommit;

        }

    }

 }

      public void process()

    {

         this.blobAccess("DefaultEndpointsProtocol=https;AccountName=lohithstorageaccount123;AccountKey=BzXOh6xHERvdl1H6eZ43qMH/qlkCP+F0uKF2bZ3fOcyniSBiWrsYnVW4j7Bu3GEfnKcE8prXZK/2+AStrnbi5w==;EndpointSuffix=core.windows.net");

        this.getFileFromBlob("lohithcontainer","newfolder");

    }

  }







No comments:

Post a Comment