Thursday, May 2, 2024

sending API header with encrypted credentials( encryption/decryption included)

 private static bool IsValidRequestAsync(HttpRequest request, string configValue)

{

   bool isValidRequest = false; 

    try

    {

        const string headerKeyName = "header";

        string headerValue = request.Headers[headerKeyName].First();

        string credentials = EncryptionDecryption.DecryptString(configValue, headerValue);

        isValidRequest = credentials == "User ID=nsouser;Password=nsouser123;" ? true : false;


    }

    catch (Exception ex)

    {

        return isValidRequest;

    }


    return isValidRequest;

  

}



==================



 internal static  IResult GetNSOReqDataSummary(string nsoRequestID, string status, INSOReqSummaryService NSOReqSummaryService,HttpRequest request)

 {

    bool isValid  =  IsValidRequestAsync(request, NSOReqSummaryService.GetConfigValue());

     if (isValid)

     {

         var responceData = "Invalid Parameter";

         if (nsoRequestID != null || status != null)

         {

             var nsorequestdetails = NSOReqSummaryService.GetNSOReqDataSummary(nsoRequestID, status);

             return nsorequestdetails is not null ? Results.Ok(JsonConvert.SerializeObject(nsorequestdetails)) : Results.NotFound();

         }

         else

         {

             return Results.Json(JsonConvert.SerializeObject(responceData), null, null, (int)HttpStatusCode.ExpectationFailed);

         }

     }

     else

     {

         var responceInvalidUserData = "Invalid credentials";

         return Results.Json(JsonConvert.SerializeObject(responceInvalidUserData), null, null, (int)HttpStatusCode.ExpectationFailed);

     }

 }





=======================================================================



  public class EncryptionDecryption

  {

      public static string EncryptString(string key, string plainText)

      {

          byte[] iv = new byte[16];

          byte[] array;


          using (Aes aes = Aes.Create())

          {

              aes.Key = Encoding.UTF8.GetBytes(key);

              aes.IV = iv;


              ICryptoTransform encryptor = aes.CreateEncryptor(aes.Key, aes.IV);


              using (MemoryStream memoryStream = new MemoryStream())

              {

                  using (CryptoStream cryptoStream = new CryptoStream(memoryStream, encryptor, CryptoStreamMode.Write))

                  {

                      using (StreamWriter streamWriter = new StreamWriter(cryptoStream))

                      {

                          streamWriter.Write(plainText);

                      }


                      array = memoryStream.ToArray();

                  }

              }

          }


          return Convert.ToBase64String(array);

      }


      public static string DecryptString(string key, string cipherText)

      {

          byte[] iv = new byte[16];

          byte[] buffer = Convert.FromBase64String(cipherText);


          using (Aes aes = Aes.Create())

          {

              aes.Key = Encoding.UTF8.GetBytes(key);

              aes.IV = iv;

              ICryptoTransform decryptor = aes.CreateDecryptor(aes.Key, aes.IV);


              using (MemoryStream memoryStream = new MemoryStream(buffer))

              {

                  using (CryptoStream cryptoStream = new CryptoStream(memoryStream, decryptor, CryptoStreamMode.Read))

                  {

                      using (StreamReader streamReader = new StreamReader(cryptoStream))

                      {

                          return streamReader.ReadToEnd();

                      }

                  }

              }

          }

      }

  }

Tuesday, January 16, 2024

Add file from API, in Sharepoint doc library

public async Task<IResult> UploadFile(HttpRequest request)

{

    CSOMHelper helper = null;

    ClientContext clientContext = null;

    string _libraryName = string.Empty;

    string fileName = string.Empty;

    bool result = false;

    string SPItemId = string.Empty;

    try

    {

        helper = new CSOMHelper();

        clientContext = await helper.GetAppOnlyContext(_config.GetValue<string>("SharePointSiteUrl"), _config.GetValue<string>("clientId"), _config.GetValue<string>("clientSecret"));

        byte[] binaryData;

        var context = request; //_httpContext.HttpContext.Request;

        string returnString = String.Empty;

        string opportunityNumber = string.Empty;//Request.Form.Files[0].FileName.Split('#')[1];

        string attachmentConfigId = string.Empty;//Request.Form.Files[0].FileName.Split('#')[2];


        if (context.Headers.ContainsKey("foldername"))

        {

            _libraryName = context.Headers["foldername"];

        }


        if (context.Form.Files.Count > 0)

        {

            Stream stream = context.Form.Files[0].OpenReadStream();

            BinaryReader br = new BinaryReader(stream);

            binaryData = br.ReadBytes((int)context.Form.Files[0].Length);

            //string timeStamp = helper.GetTimestamp(DateTime.Now);

            string timeStamp = DateTime.Now.ToShortDateString();

            Guid uploadId = Guid.NewGuid();

            fileName = uploadId + "_" + context.Form.Files[0].FileName;

            FileEntity fileEntity = new FileEntity()

            {

                FileName = fileName,

                FileStream = binaryData

            };


            string year = DateTime.Now.ToString("yyyy");


            List<string> lstOfFolders = new List<string>();

            lstOfFolders.Add(year);

            Folder folder = await helper.AddNestedFolders(clientContext, _libraryName, lstOfFolders, true);


            ListItem lisItemToAdd = await helper.UploadFileBySlices(clientContext, fileEntity, _libraryName, null, 100, folder);


            SPItemId = Convert.ToString(lisItemToAdd["ID"]);


            return await Task.FromResult(Results.Ok(new { id = SPItemId, filename = fileName }));

        }

    }

    catch (Exception ex)

    {

        _logger.LogEventError(new LogMessage { Message = ex.Message, StackTrace = ex.StackTrace });

        return await Task.FromResult(Results.BadRequest(ex.Message));

    }

    finally

    {

        helper.DisposeContext(clientContext);

    }

    return await Task.FromResult(Results.Ok(new { id = SPItemId, filename = fileName }));

}

 



public async Task<Folder> AddNestedFolders(ClientContext clientContext, string strLibrary, List<string> lstFolders, bool isExecute = true)

{

    try

    {

        List oList = clientContext.Web.Lists.GetByTitle(strLibrary);

        var folder = oList.RootFolder;

        clientContext.Load(folder);

        clientContext.ExecuteQuery();

        foreach (string item in lstFolders)

        {

            folder = folder.Folders.Add(item);

        }

        clientContext.Load(folder);


        if (isExecute)

            clientContext.ExecuteQueryAsync().Wait();

        return folder;

    }

    catch (Exception ex)

    {

        throw ex;

    }

}


/// <summary>

/// Upload large files to a library by slices of 2mb by default

/// </summary>

/// <param name="clientContext"></param>

/// <param name="fileEntity"></param>

/// <param name="libraryName"></param>

/// <param name="filePropertiesToUpdate"></param>

/// <param name="FirstTimeUploadSize"></param>

/// <param name="foldertoUpload"></param>

/// <returns></returns>

public async Task<ListItem> UploadFileBySlices(ClientContext clientContext, FileEntity fileEntity, string libraryName, List<ListDataEntitiy> filePropertiesToUpdate, int FirstTimeUploadSize = 2, Folder foldertoUpload = null)

{

    ListItem oFileItem = null;

    try

    {


        var dataStream = fileEntity.FileStream;

        var fileName = fileEntity.FileName;

        var oWeb = clientContext.Web;


        clientContext.RequestTimeout = Timeout.Infinite;


        // Each sliced upload requires a unique ID.

        Guid uploadId = Guid.NewGuid();


        var dateTime = DateTime.Now.ToString("yyyy_MM_dd_HH_mm_ss_fff");

        // Get the name of the file.

        string uniqueFileName = fileName;// oFileInputInMemory.DemoNumber + "_" + oFileInputInMemory.RunID + "_" + oFileInputInMemory.Chamber + "_"+ strUserId+"_"+ dateTime+"_" + fileName;// Path.GetFileName(fileName);

        List docLibrary = clientContext.Web.Lists.GetByTitle(libraryName);

        if (foldertoUpload == null)

            foldertoUpload = docLibrary.RootFolder;


        // File object.

        Microsoft.SharePoint.Client.File uploadFile = null;


        // Calculate block size in bytes.

        decimal blockSizeFirst = FirstTimeUploadSize * 1024 * 1024;

        int blockSize = FirstTimeUploadSize * 1024 * 1024;



        // Get the size of the file.

        long fileSize = dataStream.Length;//new FileInfo(fileName).Length;

        if (fileSize <= blockSize)

        {

            int tempblockSize = Convert.ToInt32(fileSize / 2);

            blockSize = tempblockSize + 1;

        }



        if (fileSize <= blockSizeFirst)

        {

            using (Stream stream = new MemoryStream(dataStream))

            {

                FileCreationInformation fileInfo = new FileCreationInformation();

                fileInfo.ContentStream = stream;

                fileInfo.Url = uniqueFileName;

                fileInfo.Overwrite = true;

                uploadFile = foldertoUpload.Files.Add(fileInfo);

                clientContext.Load(uploadFile);

                oFileItem = uploadFile.ListItemAllFields;

                clientContext.Load(oFileItem);

                clientContext.ExecuteQuery();

                //if (filePropertiesToUpdate != null)

                //{

                //    this.AssignValuesToListItem(filePropertiesToUpdate, oWeb, oFileItem);

                //    oFileItem = await this.SaveListItem(clientContext, oFileItem, true);

                //}

            }

        }

        else

        {

            // Use large file upload approach.

            ClientResult<long> bytesUploaded = null;


            //FileStream fs = null;

            Stream stream = new MemoryStream(dataStream);

            try

            {

                using (BinaryReader br = new BinaryReader(stream))

                {

                    byte[] buffer = new byte[blockSize];

                    Byte[] lastBuffer = null;

                    long fileoffset = 0;

                    long totalBytesRead = 0;

                    int bytesRead;

                    bool first = true;

                    bool last = false;


                    // Read data from file system in blocks.

                    while ((bytesRead = br.Read(buffer, 0, buffer.Length)) > 0)

                    {

                        totalBytesRead = totalBytesRead + bytesRead;


                        // You've reached the end of the file.

                        if (totalBytesRead == fileSize)

                        {

                            last = true;

                            //   first = false;

                            // Copy to a new buffer that has the correct size.

                            lastBuffer = new byte[bytesRead];

                            Array.Copy(buffer, 0, lastBuffer, 0, bytesRead);

                        }


                        if (first)

                        {

                            using (MemoryStream contentStream = new MemoryStream())

                            {

                                // Add an empty file.

                                FileCreationInformation fileInfo = new FileCreationInformation();

                                fileInfo.ContentStream = contentStream;

                                fileInfo.Url = uniqueFileName;

                                fileInfo.Overwrite = true;

                                uploadFile = docLibrary.RootFolder.Files.Add(fileInfo);

                                // Start upload by uploading the first slice.

                                using (MemoryStream s = new MemoryStream(buffer))

                                {

                                    // Call the start upload method on the first slice.

                                    bytesUploaded = uploadFile.StartUpload(uploadId, s);



                                    oFileItem = uploadFile.ListItemAllFields;

                                    clientContext.Load(oFileItem);

                                    if (filePropertiesToUpdate != null)

                                    {

                                        this.AssignValuesToListItem(filePropertiesToUpdate, oWeb, oFileItem);

                                        oFileItem = await this.SaveListItem(clientContext, oFileItem, true);

                                    }


                                    // fileoffset is the pointer where the next slice will be added.

                                    fileoffset = bytesUploaded.Value;

                                }


                                // You can only start the upload once.

                                first = false;

                            }

                        }

                        else

                        {

                            if (last)

                            {

                                // Is this the last slice of data?

                                using (MemoryStream s = new MemoryStream(lastBuffer))

                                {

                                    // End sliced upload by calling FinishUpload.

                                    uploadFile = uploadFile.FinishUpload(uploadId, fileoffset, s);

                                    clientContext.ExecuteQuery();

                                }

                            }

                            else

                            {

                                //  clsLogger.Log("SensorsController", "UploadFile", "middle: " + buffer.Length.ToString());


                                using (MemoryStream s = new MemoryStream(buffer))

                                {

                                    // Continue sliced upload.

                                    bytesUploaded = uploadFile.ContinueUpload(uploadId, fileoffset, s);

                                    clientContext.ExecuteQuery();

                                    // Update fileoffset for the next slice.

                                    fileoffset = bytesUploaded.Value;

                                }

                            }

                        }

                    } // while ((bytesRead = br.Read(buffer, 0, buffer.Length)) > 0)

                }

            }

            catch (Exception ex)

            {

                throw ex;

            }

            finally

            {

                if (stream != null)

                {

                    stream.Dispose();

                }

            }

        }

    }

    catch (Exception ex)

    {

        throw ex;

    }

    return oFileItem;

}

Wednesday, August 23, 2023

SP Example for JSON object

 USE [AGSBIM_APPS_DEV]

GO


SET ANSI_NULLS ON

GO

SET QUOTED_IDENTIFIER ON

GO

ALTER PROCEDURE [AGS_BIM_GLL].[USP_SAVE_USER_DELEGATION](

@employeeId NVARCHAR(10),

@action NVARCHAR(10),

@requestObj NVARCHAR(MAX)

)

AS

BEGIN

DECLARE @id INT, @targetEmployeeId NVARCHAR(10), @startDate NVARCHAR(50), @endDate NVARCHAR(50), @reason NVARCHAR(MAX);


SELECT @id = id, @targetEmployeeId = targetEmployeeId, @startDate = startDate, @endDate = endDate, @reason = reason 

FROM OPENJSON(@requestObj) WITH

(  

        id INT '$.id',

targetEmployeeId NVARCHAR(10) '$.targetEmployeeId',

        startDate NVARCHAR(50) '$.startDate',

        endDate NVARCHAR(50) '$.endDate',

        reason NVARCHAR(MAX) '$.reason'

    )

    DECLARE @count INT;

   

IF (@action = 'NEW')

BEGIN

SELECT @count = COUNT(*) FROM [AGS_BIM_GLL].[user_delegation] WHERE source_employee_id = @employeeId AND  active = 1

AND ((@startDate BETWEEN start_date AND end_date) OR (@endDate BETWEEN start_date AND end_date))

IF(@count > 0)

BEGIN

RAISERROR('Delegation was done for this time period already', 16, 1)

RETURN

END

INSERT INTO [AGS_BIM_GLL].[user_delegation](source_employee_id, target_employee_id, start_date, end_date, active, created_at, updated_at)

SELECT @employeeId, @targetEmployeeId, @startDate, @endDate, 1, GETUTCDATE(), GETUTCDATE()

END

ELSE IF (@action = 'UPDATE')

BEGIN

SELECT @count = COUNT(*) FROM [AGS_BIM_GLL].[user_delegation] WHERE source_employee_id = @employeeId AND  active = 1

AND ((@startDate BETWEEN start_date AND end_date) OR (@endDate BETWEEN start_date AND end_date))

AND id != @id


IF(@count > 0)

BEGIN

RAISERROR('Delegation was done for this time period already', 16, 1)

RETURN

END

UPDATE [AGS_BIM_GLL].[user_delegation] SET target_employee_id = @targetEmployeeId, start_date = @startDate, end_date = @endDate, updated_at = GETUTCDATE()

WHERE id = @id

END

ELSE IF (@action = 'CANCEL')

BEGIN

UPDATE [AGS_BIM_GLL].[user_delegation] SET reason = @reason, active = 0, updated_at = GETUTCDATE()

WHERE id = @id

END

END

Monday, May 15, 2023

Method for Managing Sharepoint online throttling for http status code 429 and 503, "Response status code does not indicate success: 429 ()."

 public static void ExecuteQueryWithIncrementalRetry(this ClientContext clientContext, int retryCount, int delay)

{ int retryAttempts = 0; int backoffInterval = delay; int retryAfterInterval = 0; bool retry = false; ClientRequestWrapper wrapper = null; if (retryCount <= 0) throw new ArgumentException("Provide a retry count greater than zero."); if (delay <= 0) throw new ArgumentException("Provide a delay greater than zero."); // Do while retry attempt is less than retry count while (retryAttempts < retryCount) { try { if (!retry) { clientContext.ExecuteQuery(); return; } else { //increment the retry count retryAttempts++; // retry the previous request using wrapper if (wrapper != null && wrapper.Value != null) { clientContext.RetryQuery(wrapper.Value); return; } // retry the previous request as normal else { clientContext.ExecuteQuery(); return; } } } catch (WebException ex) { var response = ex.Response as HttpWebResponse; // Check if request was throttled - http status code 429 // Check is request failed due to server unavailable - http status code 503 if (response != null && (response.StatusCode == (HttpStatusCode)429 || response.StatusCode == (HttpStatusCode)503)) { wrapper = (ClientRequestWrapper)ex.Data["ClientRequest"]; retry = true; // Determine the retry after value - use the `Retry-After` header when available string retryAfterHeader = response.GetResponseHeader("Retry-After"); if (!string.IsNullOrEmpty(retryAfterHeader)) { if (!Int32.TryParse(retryAfterHeader, out retryAfterInterval)) { retryAfterInterval = backoffInterval; } } else { retryAfterInterval = backoffInterval; } // Delay for the requested seconds Thread.Sleep(retryAfterInterval * 1000); // Increase counters backoffInterval = backoffInterval * 2; } else { throw; } } } throw new MaximumRetryAttemptedException($"Maximum retry attempts {retryCount}, has be attempted."); } [Serializable] public class MaximumRetryAttemptedException : Exception { public MaximumRetryAttemptedException(string message) : base(message) { } }

Tuesday, March 28, 2023

Insert SP from Json Object

 USE [AGSBIM_APPS_DEV]

GO

/****** Object:  StoredProcedure [AGS_BIM_GLL].[USP_SAVE_LOANOUT_REQUEST]    Script Date: 3/28/2023 3:43:51 PM ******/

SET ANSI_NULLS ON

GO

SET QUOTED_IDENTIFIER ON

GO

ALTER PROCEDURE [AGS_BIM_GLL].[USP_SAVE_LOANOUT_REQUEST](

@requestedEmployeeId NVARCHAR(10),

@team NVARCHAR(50),

@requestObj VARCHAR(MAX)

)

AS

BEGIN

BEGIN TRANSACTION  

BEGIN TRY

DECLARE @requestType NVARCHAR(10) = 'LOAN_OUT';

DECLARE @count NVARCHAR(10);  

SET @count = (SELECT COUNT(*) FROM [AGS_BIM_GLL].[request]) + 1

DECLARE @requestId NVARCHAR(10);  

SET @requestId = CONCAT('REQ', REPLICATE('0',7-LEN(@count)) + @count);

-- status id 1: pending for approval, status id 2: approved

DECLARE @defaultStatusId INT;

SET @defaultStatusId = CASE 

WHEN ((SELECT COUNT(*) FROM [AGS_BIM_GLL].[team_configuration] WHERE team = @team AND request_type =  @requestType AND auto_approve = 1)) > 0 THEN 2

ELSE 1

END

INSERT INTO [AGS_BIM_GLL].[request] (request_id, request_type, status_id, created_by, created_at, updated_at ) VALUES

(@requestId, @requestType, @defaultStatusId, @requestedEmployeeId, GETUTCDATE(), GETUTCDATE());

INSERT INTO [AGS_BIM_GLL].[available_resource_pool] (request_id, team, employee_id, justification, start_date, end_date, status_id , created_at, updated_at)

SELECT @requestId, team, employee_id, justification, start_date, end_date, @defaultStatusId, GETUTCDATE(), GETUTCDATE() FROM OPENJSON(@requestObj) WITH

(

team NVARCHAR(50) '$.team', 

employee_id NVARCHAR(10) '$.employeeId', 

            justification NVARCHAR(MAX) '$.justification',

            start_date DATE '$.startDate', 

            end_date DATE '$.endDate'

DECLARE @listOfEmployeeIds table (employeeId NVARCHAR(10));

INSERT INTO @listOfEmployeeIds

SELECT employee_id FROM OPENJSON(@requestObj) WITH

(

employee_id NVARCHAR(10) '$.employeeId'

)

-- Skills

DELETE FROM [AGS_BIM_GLL].[employee_skills_mapping] WHERE employee_id IN (SELECT employeeId FROM @listOfEmployeeIds)

INSERT INTO [AGS_BIM_GLL].[employee_skills_mapping] (employee_id, skill_name, created_at, updated_at)

SELECT employee_id, skill_name, GETUTCDATE(), GETUTCDATE() FROM OPENJSON(@requestObj) WITH

(

employee_id NVARCHAR(10) '$.employeeId', 

        skills NVARCHAR(MAX) '$.skills' as JSON

)  

CROSS APPLY OPENJSON (skills) WITH

(

id INT '$.id',  

skill_name NVARCHAR(255) '$.name'

)

-- Languages

DELETE FROM [AGS_BIM_GLL].[employee_languages_mapping] WHERE employee_id IN (SELECT employeeId FROM @listOfEmployeeIds)

INSERT INTO [AGS_BIM_GLL].[employee_languages_mapping] (employee_id, language_name, created_at, updated_at)

SELECT employee_id, language_name, GETUTCDATE(), GETUTCDATE() FROM OPENJSON(@requestObj) WITH

(

employee_id NVARCHAR(10) '$.employeeId', 

        languages NVARCHAR(MAX) '$.languages' as JSON

)  

CROSS APPLY OPENJSON (languages) WITH

(

id INT '$.id',  

language_name NVARCHAR(255) '$.name'

)

-- Certifications

DELETE FROM [AGS_BIM_GLL].[employee_certifications_mapping] WHERE employee_id IN (SELECT employeeId FROM @listOfEmployeeIds)

INSERT INTO [AGS_BIM_GLL].[employee_certifications_mapping] (employee_id, certification_name, created_at, updated_at)

SELECT employee_id, certification_name, GETUTCDATE(), GETUTCDATE() FROM OPENJSON(@requestObj) WITH

(

employee_id NVARCHAR(10) '$.employeeId', 

        certifications NVARCHAR(MAX) '$.certifications' as JSON

)  

CROSS APPLY OPENJSON (certifications) WITH

(

id INT '$.id',  

certification_name NVARCHAR(255) '$.name'

)

-- Passport Details

DELETE FROM [AGS_BIM_GLL].[employee_passport_details] WHERE employee_id IN (SELECT employeeId FROM @listOfEmployeeIds)

INSERT INTO [AGS_BIM_GLL].[employee_passport_details] (employee_id, issued_by, valid_from, valid_to, created_at, updated_at)

SELECT employee_id, issued_by, valid_from, valid_to, GETUTCDATE(), GETUTCDATE() FROM OPENJSON(@requestObj) WITH

(

employee_id NVARCHAR(10) '$.employeeId', 

        passportDetails NVARCHAR(MAX) '$.passportDetails' as JSON

)  

CROSS APPLY OPENJSON (passportDetails) WITH

(

id INT '$.id',  

issued_by INT '$.issued_by',

valid_from DATE '$.valid_from',

valid_to DATE '$.valid_to'

)

-- Visa Details

DELETE FROM [AGS_BIM_GLL].[employee_visa_details] WHERE employee_id IN (SELECT employeeId FROM @listOfEmployeeIds)

INSERT INTO [AGS_BIM_GLL].[employee_visa_details] (employee_id, country_id, visa_type_id, valid_from, valid_to, created_at, updated_at)

SELECT employee_id, country_id, visa_type_id, valid_from, valid_to, GETUTCDATE(), GETUTCDATE() FROM OPENJSON(@requestObj) WITH

(

employee_id NVARCHAR(10) '$.employeeId', 

        visaDetails NVARCHAR(MAX) '$.visaDetails' as JSON

)  

CROSS APPLY OPENJSON (visaDetails) WITH

(

id INT '$.id',  

country_id INT '$.country_id',

visa_type_id INT '$.visa_type_id',

valid_from DATE '$.valid_from',

valid_to DATE '$.valid_to'

)

COMMIT TRANSACTION

END TRY

BEGIN CATCH

  THROW(SELECT ERROR_MESSAGE());

END CATCH

END

Sunday, December 25, 2022

Get User Id from AD

  public static List<UserInformation> GetADUsers(string username)

        {


            List<UserInformation> lstADUsers = new List<UserInformation>();


            if (!string.IsNullOrEmpty(username))

            {

                using (PrincipalContext context = new PrincipalContext(ContextType.Domain))

                {

                    UserPrincipal user = new UserPrincipal(context);

                    user.DisplayName = username + "*";

                    using (PrincipalSearcher srch = new PrincipalSearcher(user))

                    {

                        int i = 0;

                        foreach (var result in srch.FindAll())

                        {

                            DirectoryEntry de = result.GetUnderlyingObject() as DirectoryEntry;

                            if (!String.IsNullOrEmpty((String)de.Properties["displayName"].Value))

                            {

                                i++;

                                UserInformation userInfo = new UserInformation();

                                userInfo.UserName = de.Properties["displayName"].Value.ToString();

                                if (de.Properties["mail"].Value != null)

                                {

                                    userInfo.UserEmail = de.Properties["mail"].Value.ToString();

                                    userInfo.UserEmpID = de.Properties["employeeID"].Value.ToString();//NS

                                    if (de.Properties["manager"].Value != null)

                                    {

                                        string managerValue = de.Properties["manager"].Value.ToString();//NS


                                        DirectoryEntry deManager = new DirectoryEntry(Constants.ldap_AD_Path + managerValue);

                                        userInfo.UserManager = (string)deManager.Properties["displayName"].Value;

                                        userInfo.UserManagerID = (string)deManager.Properties["employeeID"].Value;                                        

                                        userInfo.UserCompanyRegion = (string)de.Properties["company"].Value;


                                    }

                                }


                                lstADUsers.Add(userInfo);

                                if (i == 10) break;


                            }

                        }

                    }

                }

            }

            return lstADUsers;

        }

Wednesday, November 30, 2022