From e7060256125643d6705cfff4c05192716c993ab1 Mon Sep 17 00:00:00 2001 From: fzbm <52308785+fzbm@users.noreply.github.com> Date: Mon, 15 Jan 2024 14:25:44 +0100 Subject: [PATCH 1/8] Use cached "PropertyInfo" in "PendingRequestCount" The extension method "PendingRequestCount" now uses a cached "PropertyInfo" to access the "Actions" property of a "ClientContext". Additional changes: * Changed the code of "PendingRequestCount" to reduce nesting. * Removed unnecessary initializations. * Removed unnecessary references to the class when accessing class-level members. * Fixed naming of static members. --- .../Extensions/ClientContextExtensions.cs | 89 +++++++++++-------- 1 file changed, 51 insertions(+), 38 deletions(-) diff --git a/src/lib/PnP.Framework/Extensions/ClientContextExtensions.cs b/src/lib/PnP.Framework/Extensions/ClientContextExtensions.cs index ed2a873d3..8e146810d 100644 --- a/src/lib/PnP.Framework/Extensions/ClientContextExtensions.cs +++ b/src/lib/PnP.Framework/Extensions/ClientContextExtensions.cs @@ -1,18 +1,9 @@ -using PnP.Framework; -using PnP.Framework.Diagnostics; -using PnP.Framework.Http; -using PnP.Framework.Provisioning.ObjectHandlers; -using PnP.Framework.Sites; -using PnP.Framework.Utilities; -using PnP.Framework.Utilities.Async; -using PnP.Framework.Utilities.Context; using System; using System.Collections.Concurrent; using System.Collections.Generic; using System.Configuration; using System.Diagnostics; using System.IdentityModel.Tokens.Jwt; -using System.IO; using System.Linq; using System.Net; using System.Net.Http; @@ -21,6 +12,14 @@ using System.Text.RegularExpressions; using System.Threading.Tasks; using System.Xml; +using PnP.Framework; +using PnP.Framework.Diagnostics; +using PnP.Framework.Http; +using PnP.Framework.Provisioning.ObjectHandlers; +using PnP.Framework.Sites; +using PnP.Framework.Utilities; +using PnP.Framework.Utilities.Async; +using PnP.Framework.Utilities.Context; namespace Microsoft.SharePoint.Client { @@ -29,10 +28,11 @@ namespace Microsoft.SharePoint.Client /// public static partial class ClientContextExtensions { - private static readonly string userAgentFromConfig = null; + private static readonly string UserAgentFromConfig; + private static readonly Lazy PendingRequestActionsProperty = new Lazy(GetPendingRequestActionsProperty); #pragma warning disable CS0169 - private static ConcurrentDictionary requestDigestInfos = new ConcurrentDictionary(); + private static readonly ConcurrentDictionary RequestDigestInfos = new ConcurrentDictionary(); #pragma warning restore CS0169 //private static bool hasAuthCookies; @@ -45,15 +45,15 @@ static ClientContextExtensions() { try { - ClientContextExtensions.userAgentFromConfig = ConfigurationManager.AppSettings["SharePointPnPUserAgent"]; + UserAgentFromConfig = ConfigurationManager.AppSettings["SharePointPnPUserAgent"]; } catch // throws exception if being called from a .NET Standard 2.0 application { - } - if (string.IsNullOrWhiteSpace(ClientContextExtensions.userAgentFromConfig)) + + if (string.IsNullOrWhiteSpace(UserAgentFromConfig)) { - ClientContextExtensions.userAgentFromConfig = Environment.GetEnvironmentVariable("SharePointPnPUserAgent", EnvironmentVariableTarget.Process); + UserAgentFromConfig = Environment.GetEnvironmentVariable("SharePointPnPUserAgent", EnvironmentVariableTarget.Process); } } #pragma warning restore CA1810 @@ -303,9 +303,9 @@ private static EventHandler AttachRequestUserAgent(string c } if (overrideUserAgent) { - if (string.IsNullOrEmpty(customUserAgent) && !string.IsNullOrEmpty(ClientContextExtensions.userAgentFromConfig)) + if (string.IsNullOrEmpty(customUserAgent) && !string.IsNullOrEmpty(UserAgentFromConfig)) { - customUserAgent = userAgentFromConfig; + customUserAgent = UserAgentFromConfig; } e.WebRequestExecutor.WebRequest.UserAgent = string.IsNullOrEmpty(customUserAgent) ? $"{PnPCoreUtilities.PnPCoreUserAgent}" : customUserAgent; } @@ -518,24 +518,32 @@ internal static ClientContext Clone(this ClientRuntimeContext clientContext, Cli /// /// Client context to check the pending requests for /// The number of pending requests + /// The currently loaded version of CSOM is not supported. public static int PendingRequestCount(this ClientRuntimeContext clientContext) { - int count = 0; + if (!clientContext.HasPendingRequest) + { + return 0; + } - if (clientContext.HasPendingRequest) + PropertyInfo property = PendingRequestActionsProperty.Value; + if (property == null) { - var result = clientContext.PendingRequest.GetType().GetProperty("Actions", BindingFlags.GetProperty | BindingFlags.Instance | BindingFlags.NonPublic); - if (result != null) - { - var propValue = result.GetValue(clientContext.PendingRequest); - if (propValue != null) - { - count = (propValue as List).Count; - } - } + return 0; } - return count; + object rawValue = property.GetValue(clientContext.PendingRequest); + switch (rawValue) + { + case ICollection actions: + return actions.Count; + + case null: + return 0; + + default: + throw new NotSupportedException("The currently loaded version of CSOM is not supported."); + } } /// @@ -783,20 +791,20 @@ public static async Task GetRequestDigestAsync(this ClientContext contex if (cookieContainer != null) { var hostUrl = context.Url; - if (requestDigestInfos.TryGetValue(hostUrl, out (string digestToken, DateTime expiresOn) requestDigestInfo)) + if (RequestDigestInfos.TryGetValue(hostUrl, out (string digestToken, DateTime expiresOn) requestDigestInfo)) { // We only have to add a request digest when running in dotnet core if (DateTime.Now > requestDigestInfo.expiresOn) { requestDigestInfo = await GetRequestDigestInfoAsync(hostUrl, cookieContainer); - requestDigestInfos.AddOrUpdate(hostUrl, requestDigestInfo, (key, oldValue) => requestDigestInfo); + RequestDigestInfos.AddOrUpdate(hostUrl, requestDigestInfo, (key, oldValue) => requestDigestInfo); } } else { // admin url maybe? requestDigestInfo = await GetRequestDigestInfoAsync(hostUrl, cookieContainer); - requestDigestInfos.AddOrUpdate(hostUrl, requestDigestInfo, (key, oldValue) => requestDigestInfo); + RequestDigestInfos.AddOrUpdate(hostUrl, requestDigestInfo, (key, oldValue) => requestDigestInfo); } return requestDigestInfo.digestToken; } @@ -856,20 +864,20 @@ public static async Task GetRequestDigestAsync(this ClientContext contex public static async Task GetRequestDigestAsync(this ClientContext context) { var hostUrl = context.Url; - if (requestDigestInfos.TryGetValue(hostUrl, out (string digestToken, DateTime expiresOn) requestDigestInfo)) + if (RequestDigestInfos.TryGetValue(hostUrl, out (string digestToken, DateTime expiresOn) requestDigestInfo)) { // We only have to add a request digest when running in dotnet core if (DateTime.Now > requestDigestInfo.expiresOn) { requestDigestInfo = await GetRequestDigestInfoAsync(context); - requestDigestInfos.AddOrUpdate(hostUrl, requestDigestInfo, (key, oldValue) => requestDigestInfo); + RequestDigestInfos.AddOrUpdate(hostUrl, requestDigestInfo, (key, oldValue) => requestDigestInfo); } } else { // admin url maybe? requestDigestInfo = await GetRequestDigestInfoAsync(context); - requestDigestInfos.AddOrUpdate(hostUrl, requestDigestInfo, (key, oldValue) => requestDigestInfo); + RequestDigestInfos.AddOrUpdate(hostUrl, requestDigestInfo, (key, oldValue) => requestDigestInfo); } return requestDigestInfo.digestToken; } @@ -932,20 +940,20 @@ public static async Task GetRequestDigestAsync(this ClientContext contex internal static async Task GetOnPremisesRequestDigestAsync(this ClientContext context) { var hostUrl = context.Url; - if (requestDigestInfos.TryGetValue(hostUrl, out (string digestToken, DateTime expiresOn) requestDigestInfo)) + if (RequestDigestInfos.TryGetValue(hostUrl, out (string digestToken, DateTime expiresOn) requestDigestInfo)) { // We only have to add a request digest when running in dotnet core if (DateTime.Now > requestDigestInfo.expiresOn) { requestDigestInfo = await GetOnPremisesRequestDigestInfoAsync(context); - requestDigestInfos.AddOrUpdate(hostUrl, requestDigestInfo, (key, oldValue) => requestDigestInfo); + RequestDigestInfos.AddOrUpdate(hostUrl, requestDigestInfo, (key, oldValue) => requestDigestInfo); } } else { // admin url maybe? requestDigestInfo = await GetOnPremisesRequestDigestInfoAsync(context); - requestDigestInfos.AddOrUpdate(hostUrl, requestDigestInfo, (key, oldValue) => requestDigestInfo); + RequestDigestInfos.AddOrUpdate(hostUrl, requestDigestInfo, (key, oldValue) => requestDigestInfo); } return requestDigestInfo.digestToken; } @@ -1181,5 +1189,10 @@ internal static CookieContainer GetAuthenticationCookies(this ClientContext cont } return authCookiesContainer; } + + private static PropertyInfo GetPendingRequestActionsProperty() + { + return typeof(ClientRequest).GetProperty("Actions", BindingFlags.GetProperty | BindingFlags.Instance | BindingFlags.NonPublic); + } } } From 08ed8d3eb46eebf7987013e8207ae2b35e908e47 Mon Sep 17 00:00:00 2001 From: fzbm <52308785+fzbm@users.noreply.github.com> Date: Mon, 15 Jan 2024 14:27:07 +0100 Subject: [PATCH 2/8] Using "OfType" instead of "Where" and "Cast" "ProvisionObjects" of "ObjectSiteSecurity" now uses "OfType" instead of a combination of "Where" and "Cast". --- .../Provisioning/ObjectHandlers/ObjectSiteSecurity.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/lib/PnP.Framework/Provisioning/ObjectHandlers/ObjectSiteSecurity.cs b/src/lib/PnP.Framework/Provisioning/ObjectHandlers/ObjectSiteSecurity.cs index ef2791859..eb4c86f6e 100644 --- a/src/lib/PnP.Framework/Provisioning/ObjectHandlers/ObjectSiteSecurity.cs +++ b/src/lib/PnP.Framework/Provisioning/ObjectHandlers/ObjectSiteSecurity.cs @@ -267,7 +267,7 @@ public override TokenParser ProvisionObjects(Web web, ProvisioningTemplate templ } } - IEnumerable associatedGroupTokens = parser.Tokens.Where(t => t.GetType() == typeof(AssociatedGroupToken)).Cast(); + IEnumerable associatedGroupTokens = parser.Tokens.OfType(); foreach (AssociatedGroupToken associatedGroupToken in associatedGroupTokens) { associatedGroupToken.ClearCache(); From 8ac1dfbc2de0a206e995b952f6b4d4a6820d6a34 Mon Sep 17 00:00:00 2001 From: fzbm <52308785+fzbm@users.noreply.github.com> Date: Mon, 15 Jan 2024 14:36:32 +0100 Subject: [PATCH 3/8] Some optimizations to "TokenDefinition" The following changes to "TokenDefinition" have been made to optimize the general performance when working with those. * The maximum token length now gets determined once when creating the "TokenDefinition" instance. Tokens can not be changed and therefore determining the maximum length dynamically when calling "GetTokenLength" is not necessary. Also switched from LINQ to a "Math.Max" approach. * A list of by "Regex.Unescaped" processed tokens gets created when creating the "TokenDefinition" instance. This list is also exposed by the new "GetUnescapedTokens" method. * Added a property to return the amount of tokens. * Removed the remaining "this" references, which aligns the affected code with the rest of the class. --- .../TokenDefinitions/TokenDefinition.cs | 81 +++++++++++++------ 1 file changed, 57 insertions(+), 24 deletions(-) diff --git a/src/lib/PnP.Framework/Provisioning/ObjectHandlers/TokenDefinitions/TokenDefinition.cs b/src/lib/PnP.Framework/Provisioning/ObjectHandlers/TokenDefinitions/TokenDefinition.cs index 71b4d7c12..706373a66 100644 --- a/src/lib/PnP.Framework/Provisioning/ObjectHandlers/TokenDefinitions/TokenDefinition.cs +++ b/src/lib/PnP.Framework/Provisioning/ObjectHandlers/TokenDefinitions/TokenDefinition.cs @@ -1,6 +1,6 @@ using Microsoft.SharePoint.Client; using System; -using System.Linq; +using System.Collections.Generic; using System.Text.RegularExpressions; namespace PnP.Framework.Provisioning.ObjectHandlers.TokenDefinitions @@ -10,26 +10,17 @@ namespace PnP.Framework.Provisioning.ObjectHandlers.TokenDefinitions /// public abstract class TokenDefinition { - private bool _isCacheable = true; private ClientContext _context; protected string CacheValue; private readonly string[] _tokens; + private readonly string[] _unescapedTokens; + private readonly int _maximumTokenLength; /// /// Defines if a token is cacheable and should be added to the token cache during initialization of the token parser. This means that the value for a token will be returned from the cache instead from the GetReplaceValue during the provisioning run. Defaults to true. /// - public bool IsCacheable - { - get - { - return _isCacheable; - } - set - { - _isCacheable = value; - } + public bool IsCacheable { get; set; } = true; - } /// /// Constructor /// @@ -37,8 +28,10 @@ public bool IsCacheable /// token public TokenDefinition(Web web, params string[] token) { - this._tokens = token; - this.Web = web; + Web = web; + _tokens = token; + _unescapedTokens = GetUnescapedTokens(token); + _maximumTokenLength = GetMaximumTokenLength(token); } /// @@ -58,6 +51,14 @@ public ClientContext TokenContext } } + /// + /// Gets the amount of tokens hold by this token definition + /// + public int TokenCount + { + get => _tokens.Length; + } + /// /// Gets tokens /// @@ -67,7 +68,15 @@ public string[] GetTokens() return _tokens; } - // public string[] Token { get; private set; } + /// + /// Gets the by processed tokens + /// + /// Returns array string of by processed tokens + public IReadOnlyList GetUnescapedTokens() + { + return _unescapedTokens; + } + /// /// Web is a SiteCollection or SubSite /// @@ -80,16 +89,16 @@ public string[] GetTokens() [Obsolete("No longer in use")] public Regex[] GetRegex() { - var regexs = new Regex[this._tokens.Length]; - for (var q = 0; q < this._tokens.Length; q++) + var regexs = new Regex[_tokens.Length]; + for (var q = 0; q < _tokens.Length; q++) { - regexs[q] = new Regex(this._tokens[q], RegexOptions.IgnoreCase); + regexs[q] = new Regex(_tokens[q], RegexOptions.IgnoreCase); } return regexs; } /// - /// Gets regular expressionf for the given token + /// Gets regular expression for the given token /// /// token string /// Returns RegularExpression @@ -100,12 +109,12 @@ public Regex GetRegexForToken(string token) } /// - /// Gets token length in integer + /// Gets the length of the largest token /// - /// token length in integer + /// Length of the largest token public int GetTokenLength() { - return _tokens.Select(t => t.Length).Concat(new[] { 0 }).Max(); + return _maximumTokenLength; } /// @@ -119,7 +128,31 @@ public int GetTokenLength() /// public void ClearCache() { - this.CacheValue = null; + CacheValue = null; + } + + private static int GetMaximumTokenLength(IReadOnlyList tokens) + { + var result = 0; + + for (var index = 0; index < tokens.Count; index++) + { + result = Math.Max(result, tokens[index].Length); + } + + return result; + } + + private static string[] GetUnescapedTokens(IReadOnlyList tokens) + { + var result = new string[tokens.Count]; + + for (var index = 0; index < tokens.Count; index++) + { + result[index] = Regex.Unescape(tokens[index]); + } + + return result; } } } \ No newline at end of file From f60977305d7814b38af055bda5a35f22a49c2b7b Mon Sep 17 00:00:00 2001 From: fzbm <52308785+fzbm@users.noreply.github.com> Date: Mon, 15 Jan 2024 14:38:22 +0100 Subject: [PATCH 4/8] Some optimizations to "SimpleTokenDefinition" The following changes to "SimpleTokenDefinition" have been made to optimize the general performance when working with those. * The maximum token length now gets determined once when creating the "SimpleTokenDefinition" instance. Tokens can not be changed and therefore determining the maximum length dynamically when calling "GetTokenLength" is not necessary. Also switched from LINQ to a "Math.Max" approach. * A list of by "Regex.Unescaped" processed tokens gets created when creating the "SimpleTokenDefinition" instance. This list is also exposed by the new "GetUnescapedTokens" method. * Added a property to return the amount of tokens. * Removed the remaining "this" references, which aligns the affected code with the rest of the class. --- .../ObjectHandlers/SimpleTokenParser.cs | 153 ++++++------------ .../TokenDefinitions/SimpleTokenDefinition.cs | 57 ++++++- 2 files changed, 103 insertions(+), 107 deletions(-) diff --git a/src/lib/PnP.Framework/Provisioning/ObjectHandlers/SimpleTokenParser.cs b/src/lib/PnP.Framework/Provisioning/ObjectHandlers/SimpleTokenParser.cs index 7ba12d5c9..6b3a56c4d 100644 --- a/src/lib/PnP.Framework/Provisioning/ObjectHandlers/SimpleTokenParser.cs +++ b/src/lib/PnP.Framework/Provisioning/ObjectHandlers/SimpleTokenParser.cs @@ -1,8 +1,8 @@ -using PnP.Framework.Provisioning.ObjectHandlers.TokenDefinitions; -using System; +using System; using System.Collections.Generic; using System.Linq; using System.Text.RegularExpressions; +using PnP.Framework.Provisioning.ObjectHandlers.TokenDefinitions; namespace PnP.Framework.Provisioning.ObjectHandlers { @@ -11,24 +11,14 @@ namespace PnP.Framework.Provisioning.ObjectHandlers /// internal class SimpleTokenParser { - private List _tokens; + private List _tokens = new List(); - public SimpleTokenParser() - { - _tokens = new List(); - } - - /// - /// List of token definitions - /// - public List Tokens - { - get { return _tokens; } - private set - { - _tokens = value; - } - } + private readonly Dictionary _tokenDictionary = new Dictionary(StringComparer.OrdinalIgnoreCase); + + private static readonly Regex ReToken = new Regex(@"(?:(\{(?:\1??[^{]*?\})))", RegexOptions.Compiled | RegexOptions.IgnoreCase); + private static readonly Regex ReTokenFallback = new Regex(@"\{.*?\}", RegexOptions.Compiled); + private static readonly Regex ReGuid = new Regex("(?\\{\\S{8}-\\S{4}-\\S{4}-\\S{4}-\\S{12}?\\})", RegexOptions.Compiled); + private static readonly char[] TokenChars = { '{', '~' }; /// /// adds token definition @@ -36,15 +26,11 @@ private set /// A TokenDefinition object public void AddToken(SimpleTokenDefinition tokenDefinition) { - _tokens.Add(tokenDefinition); - // ORDER IS IMPORTANT! - var sortedTokens = from t in _tokens - orderby t.GetTokenLength() descending - select t; + AddToTokenCache(tokenDefinition); - _tokens = sortedTokens.ToList(); - BuildTokenCache(); + // ORDER IS IMPORTANT! + _tokens = _tokens.OrderByDescending(d => d.GetTokenLength()).ToList(); } /// @@ -57,50 +43,6 @@ public string ParseString(string input) return ParseString(input, null); } - static readonly Regex ReGuid = new Regex("(?\\{\\S{8}-\\S{4}-\\S{4}-\\S{4}-\\S{12}?\\})", RegexOptions.Compiled); - /// - /// Gets left over tokens - /// - /// input string - /// Returns collections of left over tokens - public static IEnumerable GetLeftOverTokens(string input) - { - List values = new List(); - var matches = ReGuid.Matches(input).OfType().Select(m => m.Value); - foreach (var match in matches) - { - Guid gout; - if (!Guid.TryParse(match, out gout)) - { - values.Add(match); - } - } - return values; - } - - - private void BuildTokenCache() - { - foreach (var tokenDefinition in _tokens) - { - foreach (string token in tokenDefinition.GetTokens()) - { - var tokenKey = Regex.Unescape(token); - if (TokenDictionary.ContainsKey(tokenKey)) continue; - - string value = tokenDefinition.GetReplaceValue(); - - TokenDictionary[tokenKey] = value; - } - } - } - - private static readonly Regex ReToken = new Regex(@"(?:(\{(?:\1??[^{]*?\})))", RegexOptions.Compiled | RegexOptions.IgnoreCase); - private static readonly Regex ReTokenFallback = new Regex(@"\{.*?\}", RegexOptions.Compiled); - - private static readonly char[] TokenChars = { '{', '~' }; - private readonly Dictionary TokenDictionary = new Dictionary(StringComparer.OrdinalIgnoreCase); - /// /// Parses given string /// @@ -109,14 +51,13 @@ private void BuildTokenCache() /// Returns parsed string public string ParseString(string input, params string[] tokensToSkip) { - if (string.IsNullOrWhiteSpace(input)) return input; - - if (string.IsNullOrEmpty(input) || input.IndexOfAny(TokenChars) == -1) return input; - - BuildTokenCache(); + if (string.IsNullOrWhiteSpace(input) || input.IndexOfAny(TokenChars) == -1) + { + return input; + } // Optimize for direct match with string search - if (TokenDictionary.TryGetValue(input, out string directMatch)) + if (_tokenDictionary.TryGetValue(input, out string directMatch)) { return directMatch; } @@ -130,60 +71,68 @@ public string ParseString(string input, params string[] tokensToSkip) output = ReToken.Replace(output, match => { string tokenString = match.Groups[0].Value; - if (TokenDictionary.TryGetValue(tokenString, out string val)) + + if (!_tokenDictionary.TryGetValue(tokenString, out string val)) { - hasMatch = true; - return val; + return tokenString; } - return match.Groups[0].Value; + + hasMatch = true; + return val; }); } while (hasMatch && input != output); - if (hasMatch) return output; + if (hasMatch) + { + return output; + } var fallbackMatches = ReTokenFallback.Matches(output); - if (fallbackMatches.Count == 0) return output; + if (fallbackMatches.Count == 0) + { + return output; + } // If all token constructs {...} are GUID's, we can skip the expensive fallback bool needFallback = false; foreach (Match match in fallbackMatches) { - if (!ReGuid.IsMatch(match.Value)) needFallback = true; + if (!ReGuid.IsMatch(match.Value)) + { + needFallback = true; + } + } + + if (!needFallback) + { + return output; } - if (!needFallback) return output; // Fallback for tokens which may contain { or } as part of their name - foreach (var pair in TokenDictionary) + foreach (var pair in _tokenDictionary) { int idx = output.IndexOf(pair.Key, StringComparison.CurrentCultureIgnoreCase); if (idx != -1) { output = output.Remove(idx, pair.Key.Length).Insert(idx, pair.Value); } - if (!ReTokenFallback.IsMatch(output)) break; + + if (!ReTokenFallback.IsMatch(output)) + { + break; + } } + return output; } - internal void RemoveToken(T oldToken) where T : TokenDefinition + private void AddToTokenCache(SimpleTokenDefinition definition) { - for (int i = 0; i < _tokens.Count; i++) + IReadOnlyList tokens = definition.GetUnescapedTokens(); + for (var index = 0; index < tokens.Count; index++) { - var tokenDefinition = _tokens[i]; - if (tokenDefinition.GetTokens().SequenceEqual(oldToken.GetTokens())) - { - _tokens.RemoveAt(i); - - foreach (string token in tokenDefinition.GetTokens()) - { - var tokenKey = Regex.Unescape(token); - TokenDictionary.Remove(tokenKey); - } - - break; - } + _tokenDictionary[tokens[index]] = definition.GetReplaceValue(); } } } } - diff --git a/src/lib/PnP.Framework/Provisioning/ObjectHandlers/TokenDefinitions/SimpleTokenDefinition.cs b/src/lib/PnP.Framework/Provisioning/ObjectHandlers/TokenDefinitions/SimpleTokenDefinition.cs index 36e5d9579..b69221065 100644 --- a/src/lib/PnP.Framework/Provisioning/ObjectHandlers/TokenDefinitions/SimpleTokenDefinition.cs +++ b/src/lib/PnP.Framework/Provisioning/ObjectHandlers/TokenDefinitions/SimpleTokenDefinition.cs @@ -1,4 +1,6 @@ -using System.Linq; +using System.Collections.Generic; +using System; +using System.Text.RegularExpressions; namespace PnP.Framework.Provisioning.ObjectHandlers.TokenDefinitions { @@ -9,14 +11,26 @@ public abstract class SimpleTokenDefinition { protected string CacheValue; private readonly string[] _tokens; + private readonly string[] _unescapedTokens; + private readonly int _maximumTokenLength; /// /// Constructor - /// + /// /// token public SimpleTokenDefinition(params string[] token) { - this._tokens = token; + _tokens = token; + _unescapedTokens = GetUnescapedTokens(token); + _maximumTokenLength = GetMaximumTokenLength(token); + } + + /// + /// Gets the amount of tokens hold by this token definition + /// + public int TokenCount + { + get => _tokens.Length; } /// @@ -28,13 +42,22 @@ public string[] GetTokens() return _tokens; } + /// + /// Gets the by processed tokens + /// + /// Returns array string of by processed tokens + public IReadOnlyList GetUnescapedTokens() + { + return _unescapedTokens; + } + /// /// Gets token length in integer /// /// token length in integer public int GetTokenLength() { - return _tokens.Select(t => t.Length).Concat(new[] { 0 }).Max(); + return _maximumTokenLength; } /// @@ -48,7 +71,31 @@ public int GetTokenLength() /// public void ClearCache() { - this.CacheValue = null; + CacheValue = null; + } + + private static int GetMaximumTokenLength(IReadOnlyList tokens) + { + var result = 0; + + for (var index = 0; index < tokens.Count; index++) + { + result = Math.Max(result, tokens[index].Length); + } + + return result; + } + + private static string[] GetUnescapedTokens(IReadOnlyList tokens) + { + var result = new string[tokens.Count]; + + for (var index = 0; index < tokens.Count; index++) + { + result[index] = Regex.Unescape(tokens[index]); + } + + return result; } } } \ No newline at end of file From 811fa2c2efd067880e52f52b334ff729da00c746 Mon Sep 17 00:00:00 2001 From: fzbm <52308785+fzbm@users.noreply.github.com> Date: Tue, 16 Jan 2024 10:03:16 +0100 Subject: [PATCH 5/8] Several optimizations to "TokenParser" The "TokenParser" had some performance issues and required a rework of some parts of the code. The main changes are: * The token cache now only gets created when using "Rebase", the private constructor (for cloning) and creating the instance. * Added a cache for non-cacheable "TokenDefinition" containing the actual "TokenDefinition" instead of the value. This has been added to eliminate the need of creating a dictionary with all non-cacheable definitions most of the times "ParseString" gets called. * The normal and non-cacheable defintions cache gets updated by just processing the affected "TokenDefinition" when calling "AddToken" or "RebuildListTokens". In most cases the cache dictionaries are initialized with a calculated capacity to avoid resizing operations. * Changed "AddResourceTokens" to use a dictionary instead of a list to avoid costly lookup operations when adding "LocalizationToken". Additionally the capacity of the "_tokens" list gets extended to a calculated amount beforehand to avoid resizing operations. * "GetListTitleForMainLanguage" and "_listsTitles" are now instance members. This fixes issues with environments where multiple threads might deploy a template at the same time. * "GetListTitleForMainLanguage" now uses a batch approach for retrieving the titles of the lists in order to reduce the time and amount of server calls required to retrieve them. * Removed the sorting of "_tokens" when adding a token or creating a "TokenParser" instance. It looks like sorting the list is not/no longer required. This might be added back later. Additional changes have been made: * Formatted some code to enhance the readability. * Replaced some LINQ methods with native methods provided by the collection type. * Changed the code of some methods to exit them early, which reduces the nesting. * Removed unused parameters from private methods. * Removed some commented code. * Moved all variable declarations to the top of the class. --- .../TokenDefinitions/LocalizationToken.cs | 54 +- .../TokenDefinitions/ResourceEntry.cs | 2 +- .../ObjectHandlers/TokenParser.cs | 943 ++++++++++-------- 3 files changed, 575 insertions(+), 424 deletions(-) diff --git a/src/lib/PnP.Framework/Provisioning/ObjectHandlers/TokenDefinitions/LocalizationToken.cs b/src/lib/PnP.Framework/Provisioning/ObjectHandlers/TokenDefinitions/LocalizationToken.cs index e39403667..26a843b93 100644 --- a/src/lib/PnP.Framework/Provisioning/ObjectHandlers/TokenDefinitions/LocalizationToken.cs +++ b/src/lib/PnP.Framework/Provisioning/ObjectHandlers/TokenDefinitions/LocalizationToken.cs @@ -1,8 +1,7 @@ -using Microsoft.SharePoint.Client; -using PnP.Framework.Attributes; using System.Collections.Generic; -using System.Linq; using System.Text.RegularExpressions; +using Microsoft.SharePoint.Client; +using PnP.Framework.Attributes; namespace PnP.Framework.Provisioning.ObjectHandlers.TokenDefinitions { @@ -13,45 +12,38 @@ namespace PnP.Framework.Provisioning.ObjectHandlers.TokenDefinitions Returns = "My List Title")] internal class LocalizationToken : TokenDefinition { - private readonly List _resourceEntries; - private readonly int? _defaultLCID; + private readonly int _webLanguage; + private readonly int? _defaultLcid; + private readonly Dictionary _entriesByLanguage; - public LocalizationToken(Web web, string key, List resourceEntries, int? defaultLCID) + public IReadOnlyList ResourceEntries { get; } + + public LocalizationToken(Web web, string key, List resourceEntries, int? defaultLcid) : base(web, $"{{loc:{Regex.Escape(key)}}}", $"{{localize:{Regex.Escape(key)}}}", $"{{localization:{Regex.Escape(key)}}}", $"{{resource:{Regex.Escape(key)}}}", $"{{res:{Regex.Escape(key)}}}") { - _resourceEntries = resourceEntries; - _defaultLCID = defaultLCID; + ResourceEntries = resourceEntries; + _defaultLcid = defaultLcid; + _webLanguage = (int)web.Language; + _entriesByLanguage = new Dictionary(capacity: resourceEntries.Count + 1); + + for (var index = 0; index < resourceEntries.Count; index++) + { + var entry = resourceEntries[index]; + _entriesByLanguage[entry.LCID] = entry; + } } public override string GetReplaceValue() { - var entry = _resourceEntries.FirstOrDefault(r => r.LCID == this.Web.Language); - if (entry != null) + if (_entriesByLanguage.TryGetValue(_webLanguage, out ResourceEntry entry) + // Fallback to default LCID. + || (_defaultLcid.HasValue && _entriesByLanguage.TryGetValue(_defaultLcid.Value, out entry))) { return entry.Value; } - else - { - // fallback to default LCID or to the first resource string - var defaultEntry = _defaultLCID.HasValue ? - _resourceEntries.FirstOrDefault(r => r.LCID == _defaultLCID) : - _resourceEntries.First(); - - if (defaultEntry != null) - { - return defaultEntry.Value; - } - else - { - return _resourceEntries.First().Value; //fallback to old logic as for me _defaultLCID has always a Value i.e. 0 or the correct LCID - } - } - } - - public List ResourceEntries - { - get { return _resourceEntries; } + // Fallback to old logic as for me _defaultLCID has always a Value i.e. 0 or the correct LCID. + return ResourceEntries[0].Value; } } } \ No newline at end of file diff --git a/src/lib/PnP.Framework/Provisioning/ObjectHandlers/TokenDefinitions/ResourceEntry.cs b/src/lib/PnP.Framework/Provisioning/ObjectHandlers/TokenDefinitions/ResourceEntry.cs index d51767da5..559953a98 100644 --- a/src/lib/PnP.Framework/Provisioning/ObjectHandlers/TokenDefinitions/ResourceEntry.cs +++ b/src/lib/PnP.Framework/Provisioning/ObjectHandlers/TokenDefinitions/ResourceEntry.cs @@ -2,7 +2,7 @@ { internal class ResourceEntry { - public uint LCID { get; set; } + public int LCID { get; set; } public string Value { get; set; } } } diff --git a/src/lib/PnP.Framework/Provisioning/ObjectHandlers/TokenParser.cs b/src/lib/PnP.Framework/Provisioning/ObjectHandlers/TokenParser.cs index ddd0667d3..7bfe81a48 100644 --- a/src/lib/PnP.Framework/Provisioning/ObjectHandlers/TokenParser.cs +++ b/src/lib/PnP.Framework/Provisioning/ObjectHandlers/TokenParser.cs @@ -1,4 +1,10 @@ -using Microsoft.Online.SharePoint.TenantAdministration; +using System; +using System.Collections.Generic; +using System.Globalization; +using System.Linq; +using System.Text.RegularExpressions; +using System.Xml.Linq; +using Microsoft.Online.SharePoint.TenantAdministration; using Microsoft.SharePoint.Client; using Microsoft.SharePoint.Client.Taxonomy; using Newtonsoft.Json; @@ -9,14 +15,6 @@ using PnP.Framework.Provisioning.Model; using PnP.Framework.Provisioning.ObjectHandlers.TokenDefinitions; using PnP.Framework.Utilities; -using System; -using System.Collections.Generic; -using System.Globalization; -using System.Linq; -using System.Text.RegularExpressions; -using System.Xml.Linq; -//using System.Web.UI.WebControls.WebParts; -//using System.Web.UI.WebControls; namespace PnP.Framework.Provisioning.ObjectHandlers { @@ -25,11 +23,25 @@ namespace PnP.Framework.Provisioning.ObjectHandlers /// public class TokenParser : ICloneable { - public Web _web; - + private Web _web; private List _tokens; + private Dictionary _tokenDictionary; + private Dictionary _nonCacheableTokenDictionary; + private Dictionary _listTokenDictionary; + private readonly Dictionary _listsTitles = new Dictionary(StringComparer.OrdinalIgnoreCase); private readonly bool _initializedFromHierarchy; + private readonly Action _addTokenWithCacheUpdateDelegate; + private readonly Action _addTokenToListDelegate; + + // First group supports tokens in form '{param:value}' , second group supports nested parameters in form '{param:{xxx..' + private static readonly Regex ReToken = new Regex(@"(?:(\{(?:\1??[^{]*?\})))|(?:(\{(?:\1??[^{]*?:)))", RegexOptions.Compiled | RegexOptions.IgnoreCase); + private static readonly Regex ReTokenFallback = new Regex(@"\{.*?\}", RegexOptions.Compiled); + private static readonly Regex ReGuid = new Regex("(?\\{\\S{8}-\\S{4}-\\S{4}-\\S{4}-\\S{12}?\\})", RegexOptions.Compiled); + private static readonly char[] InternalTokenDelimiters = { ':' }; + private static readonly char[] TokenChars = { '{', '~' }; + private static readonly char[] TokenBoundaryChars = { '{', '}' }; + private static readonly char[] UrlSeparators = { '/' }; /// /// List of token definitions @@ -49,14 +61,7 @@ private set /// A TokenDefinition object public void AddToken(TokenDefinition tokenDefinition) { - _tokens.Add(tokenDefinition); - // ORDER IS IMPORTANT! - var sortedTokens = from t in _tokens - orderby t.GetTokenLength() descending - select t; - - _tokens = sortedTokens.ToList(); - BuildTokenCache(); + AddTokenWithCacheUpdate(tokenDefinition); } // Lightweight rebase @@ -67,46 +72,58 @@ public void Rebase(Web web) token.ClearCache(); } } + // Heavy rebase for switching templates public void Rebase(Web web, ProvisioningTemplate template, ProvisioningTemplateApplyingInformation applyingInformation = null) { var tokenIds = ParseTemplate(template); _web = web; - foreach (var token in _tokens.Where(t => t is VolatileTokenDefinition)) + foreach (var token in _tokens.OfType()) { - ((VolatileTokenDefinition)token).ClearVolatileCache(web); + token.ClearVolatileCache(web); } - _tokens.RemoveAll(t => t is SiteToken); + _tokens.RemoveAll(t => t is SiteToken); _tokens.Add(new SiteToken(web)); // remove list tokens - if (tokenIds.Contains("listid") || tokenIds.Contains("listurl") || tokenIds.Contains("viewid") || tokenIds.Contains("listcontenttypeid")) + if (tokenIds.Contains("listid") + || tokenIds.Contains("listurl") + || tokenIds.Contains("viewid") + || tokenIds.Contains("listcontenttypeid")) { - RebuildListTokens(web); + RebuildListTokens(web, _addTokenToListDelegate); } + // remove content type tokens if (tokenIds.Contains("contenttypeid")) AddContentTypeTokens(web); + // remove field tokens if (tokenIds.Contains("fieldid")) { _tokens.RemoveAll(t => t is FieldTitleToken || t is FieldIdToken); AddFieldTokens(web); } + // remove group tokens - if (tokenIds.Contains("groupid") || tokenIds.FirstOrDefault(t => t.StartsWith("associated")) != null) + if (tokenIds.Contains("groupid") + || tokenIds.FindIndex(t => t.StartsWith("associated", StringComparison.OrdinalIgnoreCase)) > -1) { _tokens.RemoveAll(t => t is GroupIdToken || t is AssociatedGroupToken); - AddGroupTokens(web, applyingInformation); + AddGroupTokens(web); } + // remove role definition tokens if (tokenIds.Contains("roledefinition")) { _tokens.RemoveAll(t => t is RoleDefinitionToken || t is RoleDefinitionIdToken); AddRoleDefinitionTokens(web); } + + CalculateTokenCount(_tokens, out int cacheableCount, out int nonCacheableCount); + BuildTokenCache(cacheableCount, nonCacheableCount); } /// @@ -118,36 +135,46 @@ private TokenParser(Web web, List tokens) { _web = web; _tokens = tokens; + _addTokenWithCacheUpdateDelegate = AddTokenWithCacheUpdate; + _addTokenToListDelegate = AddTokenToList; + + CalculateTokenCount(_tokens, out int cacheableCount, out int nonCacheableCount); + BuildTokenCache(cacheableCount, nonCacheableCount); } - public TokenParser(Tenant tenant, Model.ProvisioningHierarchy hierarchy) : + public TokenParser(Tenant tenant, ProvisioningHierarchy hierarchy) : this(tenant, hierarchy, null) { } - public TokenParser(Tenant tenant, Model.ProvisioningHierarchy hierarchy, ProvisioningTemplateApplyingInformation applyingInformation) + public TokenParser(Tenant tenant, ProvisioningHierarchy hierarchy, ProvisioningTemplateApplyingInformation applyingInformation) { - // CHANGED: To avoid issues with low privilege users - Web web = null; + _addTokenWithCacheUpdateDelegate = AddTokenWithCacheUpdate; + _addTokenToListDelegate = AddTokenToList; + // CHANGED: To avoid issues with low privilege users + Web web; if (TenantExtensions.IsCurrentUserTenantAdmin((ClientContext)tenant.Context)) { web = ((ClientContext)tenant.Context).Web; } else { - var rootSiteUrl = tenant.Context.Url.Replace("-admin", ""); + var rootSiteUrl = tenant.Context.Url.Replace("-admin", string.Empty); var context = ((ClientContext)tenant.Context).Clone(rootSiteUrl); web = context.Web; } web.EnsureProperties(w => w.ServerRelativeUrl, w => w.Url, w => w.Language); + _web = web; _tokens = new List(); - foreach (var parameter in hierarchy.Parameters) + + foreach (KeyValuePair parameter in hierarchy.Parameters) { _tokens.Add(new ParameterToken(null, parameter.Key, parameter.Value ?? string.Empty)); } + _tokens.Add(new GuidToken(null)); _tokens.Add(new CurrentUserIdToken(web)); _tokens.Add(new CurrentUserLoginNameToken(web)); @@ -155,7 +182,12 @@ public TokenParser(Tenant tenant, Model.ProvisioningHierarchy hierarchy, Provisi _tokens.Add(new AuthenticationRealmToken(web)); _tokens.Add(new HostUrlToken(web)); _tokens.Add(new FqdnToken(web)); + AddResourceTokens(web, hierarchy.Localizations, hierarchy.Connector); + + CalculateTokenCount(_tokens, out int cacheableCount, out int nonCacheableCount); + BuildTokenCache(cacheableCount, nonCacheableCount); + _initializedFromHierarchy = true; } @@ -182,8 +214,9 @@ public TokenParser(Web web, ProvisioningTemplate template, ProvisioningTemplateA web.EnsureProperties(w => w.ServerRelativeUrl, w => w.Url, w => w.Language); _web = web; - _tokens = new List(); + _addTokenWithCacheUpdateDelegate = AddTokenWithCacheUpdate; + _addTokenToListDelegate = AddTokenToList; if (tokenIds.Contains("sitecollection")) _tokens.Add(new SiteCollectionToken(web)); @@ -249,23 +282,19 @@ public TokenParser(Web web, ProvisioningTemplate template, ProvisioningTemplateA _tokens.Add(new EveryoneButExternalUsersToken(web)); if (tokenIds.Contains("listid") || tokenIds.Contains("listurl") || tokenIds.Contains("viewid")) - RebuildListTokens(web); + RebuildListTokens(web, _addTokenToListDelegate); if (tokenIds.Contains("contenttypeid")) AddContentTypeTokens(web); - if (!_initializedFromHierarchy) + if (!_initializedFromHierarchy && tokenIds.Contains("parameter")) { - if (tokenIds.Contains("parameter")) + // Add parameters + foreach (var parameter in template.Parameters) { - // Add parameters - foreach (var parameter in template.Parameters) - { - _tokens.Add(new ParameterToken(web, parameter.Key, parameter.Value ?? string.Empty)); - } + _tokens.Add(new ParameterToken(web, parameter.Key, parameter.Value ?? string.Empty)); } } - if (tokenIds.Contains("sitedesignid")) AddSiteDesignTokens(web, applyingInformation); if (tokenIds.Contains("sitescriptid")) @@ -287,109 +316,111 @@ public TokenParser(Web web, ProvisioningTemplate template, ProvisioningTemplateA // Groups if (tokenIds.Contains("groupid")) - AddGroupTokens(web, applyingInformation); + AddGroupTokens(web); // AppPackages tokens if (tokenIds.Contains("apppackageid")) AddAppPackagesTokens(web); if (tokenIds.Contains("pageuniqueid")) - AddPageUniqueIdTokens(web, applyingInformation); + AddPageUniqueIdTokens(web); if (tokenIds.Contains("propertybagvalue")) AddPropertyBagTokens(web); // TermStore related tokens AddTermStoreTokens(web, tokenIds); - var sortedTokens = from t in _tokens - orderby t.GetTokenLength() descending - select t; - - _tokens = sortedTokens.ToList(); + CalculateTokenCount(_tokens, out int cacheableCount, out int nonCacheableCount); + BuildTokenCache(cacheableCount, nonCacheableCount); } private void AddResourceTokens(Web web, LocalizationCollection localizations, FileConnectorBase connector) { + if (localizations == null || localizations.Count == 0) + { + return; + } - if (localizations != null && localizations.Any()) + if (connector == null) { - if (connector == null) - { - throw new ArgumentNullException(nameof(connector), "Template or Hierarchy File Connector cannot be null"); - } + throw new ArgumentNullException(nameof(connector), "Template or Hierarchy File Connector cannot be null"); + } + + //https://github.com/SharePoint/PnP-Provisioning-Schema/issues/301 + //fixing issue to allow multiple resx files in the template. i.e: + // + // + // + // + + var resourceEntries = new Dictionary>(StringComparer.InvariantCulture); + + foreach (var localizationEntry in localizations) + { + string filePath = localizationEntry.ResourceFile; + int lcid = localizationEntry.LCID; - //https://github.com/SharePoint/PnP-Provisioning-Schema/issues/301 - //fixing issue to allow multiple resx files in the template. i.e: - // - // - // - // - var resourcesFilesCount = localizations.GroupBy(l => l.Name).Count(); - - // Read all resource keys in a list - List> resourceEntries = new List>(); - foreach (var localizationEntry in localizations) + if (filePath.EndsWith(".resx", StringComparison.OrdinalIgnoreCase)) { - var filePath = localizationEntry.ResourceFile; - if (filePath.ToLower().EndsWith(".resx")) + using (var stream = connector.GetFileStream(filePath)) { - using (var stream = connector.GetFileStream(filePath)) + if (stream == null) { - if (stream != null) - { - // Use raw XML approach as the .Net Framework resxreader seems to choke on some resx files - // TODO: research this! - var xElement = XElement.Load(stream); - foreach (var dataElement in xElement.Descendants("data")) - { - var key = dataElement.Attribute("name").Value; - var value = dataElement.Descendants().First().Value; - resourceEntries.Add(new Tuple($"{localizationEntry.Name}:{key}", (uint)localizationEntry.LCID, value.ToString().Replace("\"", """))); - resourceEntries.Add(new Tuple(key.ToString(), (uint)localizationEntry.LCID, value.ToString().Replace("\"", """))); - } + continue; + } + + // Use raw XML approach as the .Net Framework resxreader seems to choke on some resx files + // TODO: research this! + + var xElement = XElement.Load(stream); + foreach (var dataElement in xElement.Descendants("data")) + { + var key = dataElement.Attribute("name").Value; + var value = dataElement.Descendants().First().Value; + string escapedValue = value.Replace("\"", """); + + AddResourceEntry($"{localizationEntry.Name}:{key}", lcid, escapedValue, resourceEntries); + AddResourceEntry(key, lcid, escapedValue, resourceEntries); + } - /* - // are we talking a resx file or a json file? - if (filePath.ToLower().EndsWith(".resx")) + /* + using (ResXResourceReader resxReader = new ResXResourceReader(stream)) + { + foreach (DictionaryEntry entry in resxReader) { - using (ResXResourceReader resxReader = new ResXResourceReader(stream)) - { - foreach (DictionaryEntry entry in resxReader) - { - // One can have multiple resource files in a single file, by adding tokens with resource file name and without we allow both scenarios to resolve - resourceEntries.Add(new Tuple($"{localizationEntry.Name}:{entry.Key}", (uint)localizationEntry.LCID, entry.Value.ToString().Replace("\"", """))); - resourceEntries.Add(new Tuple(entry.Key.ToString(), (uint)localizationEntry.LCID, entry.Value.ToString().Replace("\"", """))); - } - } + // One can have multiple resource files in a single file, by adding tokens with resource file name and without we allow both scenarios to resolve + resourceEntries.Add(new Tuple($"{localizationEntry.Name}:{entry.Key}", (uint)localizationEntry.LCID, entry.Value.ToString().Replace("\"", """))); + resourceEntries.Add(new Tuple(entry.Key.ToString(), (uint)localizationEntry.LCID, entry.Value.ToString().Replace("\"", """))); } - */ } - } + */ } - else if (filePath.ToLower().EndsWith(".json")) + } + else if (filePath.EndsWith(".json", StringComparison.OrdinalIgnoreCase)) + { + var jsonString = connector.GetFile(filePath); + if (string.IsNullOrEmpty(jsonString)) { - var jsonString = connector.GetFile(filePath); - if (!string.IsNullOrEmpty(jsonString)) - { - var dict = System.Text.Json.JsonSerializer.Deserialize>(jsonString); - foreach (var entry in dict) - { - resourceEntries.Add(new Tuple($"{localizationEntry.Name}:{entry.Key}", (uint)localizationEntry.LCID, entry.Value.ToString().Replace("\"", """))); - resourceEntries.Add(new Tuple(entry.Key.ToString(), (uint)localizationEntry.LCID, entry.Value.ToString().Replace("\"", """))); - } - } + continue; } - } - var uniqueKeys = resourceEntries.Select(k => k.Item1).Distinct(); - foreach (var key in uniqueKeys) - { - var matches = resourceEntries.Where(k => k.Item1 == key); - var entries = matches.Select(k => new ResourceEntry() { LCID = k.Item2, Value = k.Item3 }).ToList(); - LocalizationToken token = new LocalizationToken(web, key, entries, localizations.DefaultLCID); + var dict = System.Text.Json.JsonSerializer.Deserialize>(jsonString); + foreach (KeyValuePair entry in dict) + { + string escapedValue = entry.Value.Replace("\"", """); - _tokens.Add(token); + AddResourceEntry($"{localizationEntry.Name}:{entry.Key}", lcid, escapedValue, resourceEntries); + AddResourceEntry(entry.Key, lcid, escapedValue, resourceEntries); + } } } + + // Ensure an additional capacity of at least the amount of tokens we want to add. + _tokens.Capacity = _tokens.Count + resourceEntries.Count + 1; + + foreach (KeyValuePair> pair in resourceEntries) + { + _tokens.Add(new LocalizationToken(web, pair.Key, pair.Value, localizations.DefaultLCID)); + } } private void AddFieldTokens(Web web) @@ -398,8 +429,10 @@ private void AddFieldTokens(Web web) // Add all the site columns var fields = web.AvailableFields; + web.Context.Load(fields, flds => flds.Include(f => f.Title, f => f.InternalName, f => f.Id)); web.Context.ExecuteQueryRetry(); + foreach (var field in fields) { _tokens.Add(new FieldTitleToken(web, field.InternalName, field.Title)); @@ -436,10 +469,12 @@ private void AddFieldTokens(Web web) private void AddRoleDefinitionTokens(Web web) { web.EnsureProperty(w => w.RoleDefinitions.Include(r => r.RoleTypeKind, r => r.Name, r => r.Id)); + foreach (var roleDef in web.RoleDefinitions.AsEnumerable().Where(r => r.RoleTypeKind != RoleType.None)) { _tokens.Add(new RoleDefinitionToken(web, roleDef)); } + foreach (var roleDef in web.RoleDefinitions) { _tokens.Add(new RoleDefinitionIdToken(web, roleDef.Name, roleDef.Id)); @@ -456,13 +491,15 @@ private void AddPropertyBagTokens(Web web) } } - private void AddGroupTokens(Web web, ProvisioningTemplateApplyingInformation applyingInformation) + private void AddGroupTokens(Web web) { web.EnsureProperty(w => w.SiteGroups.Include(g => g.Title, g => g.Id)); + foreach (var siteGroup in web.SiteGroups) { _tokens.Add(new GroupIdToken(web, siteGroup.Title, siteGroup.Id.ToString())); } + web.EnsureProperty(w => w.AssociatedVisitorGroup).EnsureProperties(g => g.Id, g => g.Title); web.EnsureProperty(w => w.AssociatedMemberGroup).EnsureProperties(g => g.Id, g => g.Title); web.EnsureProperty(w => w.AssociatedOwnerGroup).EnsureProperties(g => g.Id, g => g.Title); @@ -471,119 +508,136 @@ private void AddGroupTokens(Web web, ProvisioningTemplateApplyingInformation app { _tokens.Add(new GroupIdToken(web, "associatedvisitorgroup", web.AssociatedVisitorGroup.Id.ToString())); } + if (!web.AssociatedMemberGroup.ServerObjectIsNull.Value) { _tokens.Add(new GroupIdToken(web, "associatedmembergroup", web.AssociatedMemberGroup.Id.ToString())); } + if (!web.AssociatedOwnerGroup.ServerObjectIsNull.Value) { _tokens.Add(new GroupIdToken(web, "associatedownergroup", web.AssociatedOwnerGroup.Id.ToString())); } - string accessToken = null; + if (PnPProvisioningContext.Current?.AcquireTokenAsync == null) + { + return; + } + + var microsoftGraphBaseUri = AuthenticationManager.GetGraphBaseEndPoint(web.Context.GetAzureEnvironment()); - if (PnPProvisioningContext.Current?.AcquireTokenAsync != null) + var accessToken = PnPProvisioningContext.Current.AcquireToken(microsoftGraphBaseUri.Authority, "Group.Read.All"); + if (string.IsNullOrEmpty(accessToken)) { - var microsoftGraphBaseUri = AuthenticationManager.GetGraphBaseEndPoint(web.Context.GetAzureEnvironment()); + return; + } - accessToken = PnPProvisioningContext.Current.AcquireToken(microsoftGraphBaseUri.Authority, "Group.Read.All"); - if (!string.IsNullOrEmpty(accessToken)) + try + { + // Get Office 365 Groups + var officeGroups = UnifiedGroupsUtility.GetUnifiedGroups(accessToken, includeSite: false); + foreach (var group in officeGroups) { - try - { - // Get Office 365 Groups - var officeGroups = UnifiedGroupsUtility.GetUnifiedGroups(accessToken, includeSite: false); - foreach (var group in officeGroups) - { - _tokens.Add(new O365GroupIdToken(web, group.DisplayName, group.GroupId)); - if (!group.DisplayName.Equals(group.MailNickname)) - { - _tokens.Add(new O365GroupIdToken(web, group.MailNickname, group.GroupId)); - } - } - } - catch (Microsoft.Graph.ServiceException ex) + _tokens.Add(new O365GroupIdToken(web, group.DisplayName, group.GroupId)); + if (!group.DisplayName.Equals(group.MailNickname)) { - // If we don't have permission to access the O365 groups, just skip it - Log.Warning(Constants.LOGGING_SOURCE, CoreResources.GraphExtensions_ErrorOccured, ex.Error.Message); + _tokens.Add(new O365GroupIdToken(web, group.MailNickname, group.GroupId)); } } } + catch (Microsoft.Graph.ServiceException ex) + { + // If we don't have permission to access the O365 groups, just skip it + Log.Warning(Constants.LOGGING_SOURCE, CoreResources.GraphExtensions_ErrorOccured, ex.Error.Message); + } } private void AddTermStoreTokens(Web web, List tokenIds) { - if (tokenIds.Contains("termstoreid") || tokenIds.Contains("termsetid") || tokenIds.Contains("sitecollectiontermgroupid") || tokenIds.Contains("sitecollectiontermgroupname") || tokenIds.Contains("sitecollectiontermsetid")) + if (!tokenIds.Contains("termstoreid") + && !tokenIds.Contains("termsetid") + && !tokenIds.Contains("sitecollectiontermgroupid") + && !tokenIds.Contains("sitecollectiontermgroupname") + && !tokenIds.Contains("sitecollectiontermsetid")) { - TaxonomySession session = TaxonomySession.GetTaxonomySession(web.Context); + return; + } + + TaxonomySession session = TaxonomySession.GetTaxonomySession(web.Context); - if (tokenIds.Contains("termstoreid")) + if (tokenIds.Contains("termstoreid")) + { + var termStores = session.EnsureProperty(s => s.TermStores); + foreach (var ts in termStores) { - var termStores = session.EnsureProperty(s => s.TermStores); - foreach (var ts in termStores) - { - _tokens.Add(new TermStoreIdToken(web, ts.Name, ts.Id)); - } + _tokens.Add(new TermStoreIdToken(web, ts.Name, ts.Id)); } - var termStore = session.GetDefaultSiteCollectionTermStore(); - web.Context.Load(termStore); - web.Context.ExecuteQueryRetry(); - if (tokenIds.Contains("termsetid")) + } + + var termStore = session.GetDefaultSiteCollectionTermStore(); + web.Context.Load(termStore); + web.Context.ExecuteQueryRetry(); + + if (tokenIds.Contains("termsetid")) + { + if (!termStore.ServerObjectIsNull.Value) { - if (!termStore.ServerObjectIsNull.Value) + web.Context.Load(termStore.Groups, + g => g.Include( + tg => tg.Name, + tg => tg.TermSets.Include( + ts => ts.Name, + ts => ts.Id) + )); + web.Context.ExecuteQueryRetry(); + + foreach (var termGroup in termStore.Groups) { - web.Context.Load(termStore.Groups, - g => g.Include( - tg => tg.Name, - tg => tg.TermSets.Include( - ts => ts.Name, - ts => ts.Id) - )); - web.Context.ExecuteQueryRetry(); - foreach (var termGroup in termStore.Groups) + foreach (var termSet in termGroup.TermSets) { - foreach (var termSet in termGroup.TermSets) - { - _tokens.Add(new TermSetIdToken(web, termGroup.Name, termSet.Name, termSet.Id)); - } + _tokens.Add(new TermSetIdToken(web, termGroup.Name, termSet.Name, termSet.Id)); } } } + } - if (tokenIds.Contains("sitecollectiontermgroupid")) - _tokens.Add(new SiteCollectionTermGroupIdToken(web)); - if (tokenIds.Contains("sitecollectiontermgroupname")) - _tokens.Add(new SiteCollectionTermGroupNameToken(web)); + if (tokenIds.Contains("sitecollectiontermgroupid")) + _tokens.Add(new SiteCollectionTermGroupIdToken(web)); + + if (tokenIds.Contains("sitecollectiontermgroupname")) + _tokens.Add(new SiteCollectionTermGroupNameToken(web)); + + if (!tokenIds.Contains("sitecollectiontermsetid")) + { + return; + } - if (tokenIds.Contains("sitecollectiontermsetid")) + var site = (web.Context as ClientContext).Site; + var siteCollectionTermGroup = termStore.GetSiteCollectionGroup(site, true); + web.Context.Load(siteCollectionTermGroup); + + try + { + web.Context.ExecuteQueryRetry(); + if (null != siteCollectionTermGroup && !siteCollectionTermGroup.ServerObjectIsNull.Value) { - var site = (web.Context as ClientContext).Site; - var siteCollectionTermGroup = termStore.GetSiteCollectionGroup(site, true); - web.Context.Load(siteCollectionTermGroup); - try - { - web.Context.ExecuteQueryRetry(); - if (null != siteCollectionTermGroup && !siteCollectionTermGroup.ServerObjectIsNull.Value) - { - web.Context.Load(siteCollectionTermGroup, group => group.TermSets.Include(ts => ts.Name, ts => ts.Id)); - web.Context.ExecuteQueryRetry(); - foreach (var termSet in siteCollectionTermGroup.TermSets) - { - _tokens.Add(new SiteCollectionTermSetIdToken(web, termSet.Name, termSet.Id)); - } - } - } - catch (ServerUnauthorizedAccessException) + web.Context.Load(siteCollectionTermGroup, group => group.TermSets.Include(ts => ts.Name, ts => ts.Id)); + web.Context.ExecuteQueryRetry(); + foreach (var termSet in siteCollectionTermGroup.TermSets) { - // If we don't have permission to access the TermGroup, just skip it - Log.Warning(Constants.LOGGING_SOURCE, CoreResources.TermGroup_No_Access); - } - catch (NullReferenceException) - { - // If there isn't a default TermGroup for the Site Collection, we skip the terms in token handler + _tokens.Add(new SiteCollectionTermSetIdToken(web, termSet.Name, termSet.Id)); } } } + catch (ServerUnauthorizedAccessException) + { + // If we don't have permission to access the TermGroup, just skip it + Log.Warning(Constants.LOGGING_SOURCE, CoreResources.TermGroup_No_Access); + } + catch (NullReferenceException) + { + // If there isn't a default TermGroup for the Site Collection, we skip the terms in token handler + } } private void AddAppPackagesTokens(Web web) @@ -645,22 +699,26 @@ private void AddStorageEntityTokens(Web web) private static List ParseStorageEntitiesString(string storageEntitiesIndex) { var storageEntitiesDict = JsonConvert.DeserializeObject>>(storageEntitiesIndex); + if (storageEntitiesDict == null) + { + return new List(); + } + + var storageEntities = new List(storageEntitiesDict.Count + 1); - var storageEntities = new List(); - if (storageEntitiesDict != null) + foreach (KeyValuePair> pair in storageEntitiesDict) { - foreach (var key in storageEntitiesDict.Keys) + var storageEntity = new StorageEntity { - var storageEntity = new StorageEntity - { - Key = key, - Value = storageEntitiesDict[key]["Value"], - Comment = storageEntitiesDict[key]["Comment"], - Description = storageEntitiesDict[key]["Description"] - }; - storageEntities.Add(storageEntity); - } + Key = pair.Key, + Value = pair.Value["Value"], + Comment = pair.Value["Comment"], + Description = pair.Value["Description"] + }; + + storageEntities.Add(storageEntity); } + return storageEntities; } @@ -672,8 +730,10 @@ private void AddSiteDesignTokens(Web web, ProvisioningTemplateApplyingInformatio { var tenant = new Tenant(tenantContext); var designs = tenant.GetSiteDesigns(); + tenantContext.Load(designs); tenantContext.ExecuteQueryRetry(); + foreach (var design in designs) { _tokens.Add(new SiteDesignIdToken(web, design.Title, design.Id)); @@ -682,29 +742,31 @@ private void AddSiteDesignTokens(Web web, ProvisioningTemplateApplyingInformatio } catch { - } } - private void AddPageUniqueIdTokens(Web web, ProvisioningTemplateApplyingInformation applyingInformation) + private void AddPageUniqueIdTokens(Web web) { - var pagesList = web.GetListByUrl("SitePages", p => p.RootFolder); var query = new CamlQuery() { - ViewXml = $"100" + ViewXml = "100" }; + do { var items = pagesList.GetItems(query); + web.Context.Load(items); web.Context.ExecuteQueryRetry(); + foreach (var item in items) { _tokens.Add(new PageUniqueIdToken(web, $"SitePages/{item["FileLeafRef"]}", Guid.Parse(item["UniqueId"].ToString()))); _tokens.Add(new PageUniqueIdEncodedToken(web, $"SitePages/{item["FileLeafRef"]}", Guid.Parse(item["UniqueId"].ToString()))); } + query.ListItemCollectionPosition = items.ListItemCollectionPosition; } while (query.ListItemCollectionPosition != null); } @@ -717,8 +779,10 @@ private void AddSiteScriptTokens(Web web, ProvisioningTemplateApplyingInformatio { var tenant = new Tenant(tenantContext); var scripts = tenant.GetSiteScripts(); + tenantContext.Load(scripts); tenantContext.ExecuteQueryRetry(); + foreach (var script in scripts) { _tokens.Add(new SiteScriptIdToken(web, script.Title, script.Id)); @@ -727,7 +791,6 @@ private void AddSiteScriptTokens(Web web, ProvisioningTemplateApplyingInformatio } catch { - } } @@ -737,6 +800,7 @@ private void AddContentTypeTokens(Web web) web.Context.Load(web.AvailableContentTypes, cs => cs.Include(ct => ct.StringId, ct => ct.Name)); web.Context.ExecuteQueryRetry(); + foreach (var ct in web.AvailableContentTypes) { _tokens.Add(new ContentTypeIdToken(web, ct.Name, ct.StringId)); @@ -744,69 +808,85 @@ private void AddContentTypeTokens(Web web) } internal void RebuildListTokens(Web web) + { + RebuildListTokens(web, _addTokenWithCacheUpdateDelegate); + } + + private void RebuildListTokens(Web web, Action addToken) { web.EnsureProperties(w => w.ServerRelativeUrl, w => w.Language); - //Remove tokens from TokenDictionary and ListTokenDictionary - Predicate listTokenTypes = t => (t.GetType() == typeof(ListIdToken) || t.GetType() == typeof(ListUrlToken) || t.GetType() == typeof(ListViewIdToken) || t.GetType() == typeof(ListContentTypeIdToken)); + Predicate listTokenTypes = t => t.GetType() == typeof(ListIdToken) + || t.GetType() == typeof(ListUrlToken) + || t.GetType() == typeof(ListViewIdToken) + || t.GetType() == typeof(ListContentTypeIdToken); + + // Remove tokens from TokenDictionary and ListTokenDictionary foreach (var listToken in _tokens.FindAll(listTokenTypes)) { - foreach (string token in listToken.GetTokens()) + foreach (string token in listToken.GetUnescapedTokens()) { - var tokenKey = Regex.Unescape(token); - TokenDictionary.Remove(tokenKey); + _tokenDictionary.Remove(token); if (listToken is ListIdToken) { - ListTokenDictionary.Remove(tokenKey); + _listTokenDictionary.Remove(token); } } } + _tokens.RemoveAll(listTokenTypes); web.Context.Load(web.Lists, ls => ls.Include(l => l.Id, l => l.Title, l => l.RootFolder.ServerRelativeUrl, l => l.Views, l => l.ContentTypes, l => l.TitleResource)); web.Context.ExecuteQueryRetry(); + foreach (var list in web.Lists) { - _tokens.Add(new ListIdToken(web, list.Title, list.Id)); - // _tokens.Add(new ListIdToken(web, list.Title, Guid.Empty)); + addToken(new ListIdToken(web, list.Title, list.Id)); + var mainLanguageName = GetListTitleForMainLanguage(web, list.Title); if (!string.IsNullOrWhiteSpace(mainLanguageName) && mainLanguageName != list.Title) { - _tokens.Add(new ListIdToken(web, mainLanguageName, list.Id)); + addToken(new ListIdToken(web, mainLanguageName, list.Id)); } - _tokens.Add(new ListUrlToken(web, list.Title, list.RootFolder.ServerRelativeUrl.Substring(web.ServerRelativeUrl.TrimEnd(new char[] { '/' }).Length + 1))); + + addToken(new ListUrlToken(web, list.Title, list.RootFolder.ServerRelativeUrl.Substring(web.ServerRelativeUrl.TrimEnd(UrlSeparators).Length + 1))); foreach (var view in list.Views) { - _tokens.Add(new ListViewIdToken(web, list.Title, view.Title, view.Id)); + addToken(new ListViewIdToken(web, list.Title, view.Title, view.Id)); } foreach (var contentType in list.ContentTypes) { - _tokens.Add(new ListContentTypeIdToken(web, list.Title, contentType)); + addToken(new ListContentTypeIdToken(web, list.Title, contentType)); } } - if (web.IsSubSite()) + if (!web.IsSubSite()) { - // Add lists from rootweb - var rootWeb = (web.Context as ClientContext).Site.RootWeb; - rootWeb.EnsureProperty(w => w.ServerRelativeUrl); - rootWeb.Context.Load(rootWeb.Lists, ls => ls.Include(l => l.Id, l => l.Title, l => l.RootFolder.ServerRelativeUrl, l => l.Views)); - rootWeb.Context.ExecuteQueryRetry(); - foreach (var rootList in rootWeb.Lists) + return; + } + + // Add lists from rootweb + var rootWeb = (web.Context as ClientContext).Site.RootWeb; + rootWeb.EnsureProperty(w => w.ServerRelativeUrl); + rootWeb.Context.Load(rootWeb.Lists, ls => ls.Include(l => l.Id, l => l.Title, l => l.RootFolder.ServerRelativeUrl, l => l.Views)); + rootWeb.Context.ExecuteQueryRetry(); + + foreach (var rootList in rootWeb.Lists) + { + // token already there? Skip the list + if (web.Lists.Any(l => l.Title == rootList.Title)) { - // token already there? Skip the list - if (web.Lists.FirstOrDefault(l => l.Title == rootList.Title) == null) - { - _tokens.Add(new ListIdToken(web, rootList.Title, rootList.Id)); - _tokens.Add(new ListUrlToken(web, rootList.Title, rootList.RootFolder.ServerRelativeUrl.Substring(rootWeb.ServerRelativeUrl.TrimEnd(new char[] { '/' }).Length + 1))); + continue; + } - foreach (var view in rootList.Views) - { - _tokens.Add(new ListViewIdToken(rootWeb, rootList.Title, view.Title, view.Id)); - } - } + addToken(new ListIdToken(web, rootList.Title, rootList.Id)); + addToken(new ListUrlToken(web, rootList.Title, rootList.RootFolder.ServerRelativeUrl.Substring(rootWeb.ServerRelativeUrl.TrimEnd(UrlSeparators).Length + 1))); + + foreach (var view in rootList.Views) + { + addToken(new ListViewIdToken(rootWeb, rootList.Title, view.Title, view.Id)); } } } @@ -818,18 +898,23 @@ internal void RebuildListTokens(Web web) /// Returns list of token resource values public List> GetResourceTokenResourceValues(string tokenValue) { - List> resourceValues = new List>(); - tokenValue = $"{{{Regex.Escape(tokenValue.Trim(new char[] { '{', '}' }))}}}"; // since LocalizationToken are Regex.Escaped before load - var resourceTokens = _tokens.Where(t => t is LocalizationToken && t.GetTokens().Contains(tokenValue)); - foreach (LocalizationToken resourceToken in resourceTokens) + var resourceValues = new List>(); + tokenValue = $"{{{Regex.Escape(tokenValue.Trim(TokenBoundaryChars))}}}"; // since LocalizationToken are Regex.Escaped before load + + foreach (var token in _tokens.OfType()) { - var entries = resourceToken.ResourceEntries; - foreach (var entry in entries) + if (Array.IndexOf(token.GetTokens(), tokenValue) == -1) + { + continue; + } + + foreach (var entry in token.ResourceEntries) { - CultureInfo ci = new CultureInfo((int)entry.LCID); + var ci = new CultureInfo(entry.LCID); resourceValues.Add(new Tuple(ci.Name, ParseString(entry.Value))); } } + return resourceValues; } @@ -843,7 +928,6 @@ public string ParseString(string input) return ParseString(input, null); } - static readonly Regex ReGuid = new Regex("(?\\{\\S{8}-\\S{4}-\\S{4}-\\S{4}-\\S{12}?\\})", RegexOptions.Compiled); /// /// Gets left over tokens /// @@ -851,16 +935,17 @@ public string ParseString(string input) /// Returns collections of left over tokens public IEnumerable GetLeftOverTokens(string input) { - List values = new List(); - var matches = ReGuid.Matches(input).OfType().Select(m => m.Value); - foreach (var match in matches) + var values = new List(); + + foreach (Match match in ReGuid.Matches(input)) { - Guid gout; - if (!Guid.TryParse(match, out gout)) + string value = match.Value; + if (!Guid.TryParse(value, out _)) { - values.Add(match); + values.Add(value); } } + return values; } @@ -875,113 +960,112 @@ public string ParseStringWebPart(string input, Web web, params string[] tokensTo { web.EnsureProperty(x => x.Id); - var tokenChars = new[] { '{', '~' }; - if (string.IsNullOrEmpty(input) || input.IndexOfAny(tokenChars) == -1) return input; - - BuildTokenCache(); + if (string.IsNullOrEmpty(input) || input.IndexOfAny(TokenChars) == -1) + { + return input; + } // Optimize for direct match with string search - if (TokenDictionary.TryGetValue(input, out string directMatch)) + if (_tokenDictionary.TryGetValue(input, out string directMatch)) { return directMatch; } // Support for non cached tokens - var nonCachedTokens = BuildNonCachedTokenCache(); - if (nonCachedTokens.TryGetValue(input, out string directMatchNonCached)) + if (TryGetNonCacheableToken(input, out string directMatchNonCached)) { return directMatchNonCached; } string output = input; bool hasMatch = false; + do { hasMatch = false; output = ReToken.Replace(output, match => { string tokenString = match.Groups[0].Value; - if (TokenDictionary.TryGetValue(tokenString, out string val)) + + if (!_tokenDictionary.TryGetValue(tokenString, out string val)) { - if (tokenString.IndexOf("listid", StringComparison.OrdinalIgnoreCase) != -1) + return tokenString; + } + + if (tokenString.IndexOf("listid", StringComparison.OrdinalIgnoreCase) != -1) + { + var token = _listTokenDictionary[tokenString]; + if (!token.Web.Id.Equals(web.Id)) { - var token = ListTokenDictionary[tokenString]; - if (!token.Web.Id.Equals(web.Id)) - { - return tokenString; - } + return tokenString; } - hasMatch = true; - return val; } - return match.Groups[0].Value; + + hasMatch = true; + return val; }); } while (hasMatch && input != output); return output; } - private void BuildTokenCache() + private void BuildTokenCache(int cacheableCount, int nonCacheableCount) { - foreach (TokenDefinition tokenDefinition in _tokens.Where(t => t.IsCacheable)) - { - foreach (string token in tokenDefinition.GetTokens()) - { - var tokenKey = Regex.Unescape(token); - if (TokenDictionary.ContainsKey(tokenKey)) continue; - - int before = _web.Context.PendingRequestCount(); - string value = tokenDefinition.GetReplaceValue(); - int after = _web.Context.PendingRequestCount(); + _tokenDictionary = new Dictionary(capacity: cacheableCount > 0 ? cacheableCount + 1 : 0, StringComparer.OrdinalIgnoreCase); + _nonCacheableTokenDictionary = new Dictionary(capacity: nonCacheableCount > 0 ? nonCacheableCount + 1 : 0, StringComparer.OrdinalIgnoreCase); + _listTokenDictionary = new Dictionary(StringComparer.OrdinalIgnoreCase); - if (before != after) - { - throw new Exception($"Token {token} triggered an ExecuteQuery on the 'current' context. Please refactor this token to use the TokenContext class."); - } - - TokenDictionary[tokenKey] = value; - if (tokenDefinition is ListIdToken) - { - ListTokenDictionary[tokenKey] = tokenDefinition; - } - } + for (var index = 0; index < _tokens.Count; index++) + { + AddToTokenCache(_tokens[index]); } } - private Dictionary BuildNonCachedTokenCache() + private void AddToTokenCache(TokenDefinition definition) { - Dictionary nonCachedTokenDictionary = new Dictionary(StringComparer.OrdinalIgnoreCase); - - foreach (TokenDefinition tokenDefinition in _tokens.Where(t => !t.IsCacheable)) + IReadOnlyList tokens = definition.GetUnescapedTokens(); + for (var index = 0; index < tokens.Count; index++) { - foreach (string token in tokenDefinition.GetTokens()) + string token = tokens[index]; + + if (!definition.IsCacheable) { - var tokenKey = Regex.Unescape(token); - if (nonCachedTokenDictionary.ContainsKey(tokenKey)) continue; + _nonCacheableTokenDictionary[token] = definition; + continue; + } - int before = _web.Context.PendingRequestCount(); - string value = tokenDefinition.GetReplaceValue(); - int after = _web.Context.PendingRequestCount(); + string value = GetTokenReplacementValue(definition, token, _web.Context); + _tokenDictionary[token] = value; - if (before != after) - { - throw new Exception($"Token {token} triggered an ExecuteQuery on the 'current' context. Please refactor this token to use the TokenContext class."); - } - - nonCachedTokenDictionary[tokenKey] = value; + if (definition is ListIdToken) + { + _listTokenDictionary[token] = definition; } } + } - return nonCachedTokenDictionary; + private bool TryGetNonCacheableToken(string input, out string value) + { + if (_nonCacheableTokenDictionary.TryGetValue(input, out TokenDefinition definition)) + { + value = GetTokenReplacementValue(definition, input, _web.Context); + return true; + } + + value = null; + return false; } - // First group supports tokens in form '{param:value}' , second group supports nested parameters in form '{param:{xxx..' - private static readonly Regex ReToken = new Regex(@"(?:(\{(?:\1??[^{]*?\})))|(?:(\{(?:\1??[^{]*?:)))", RegexOptions.Compiled | RegexOptions.IgnoreCase); - private static readonly Regex ReTokenFallback = new Regex(@"\{.*?\}", RegexOptions.Compiled); + private static string GetTokenReplacementValue(TokenDefinition definition, string tokenName, ClientRuntimeContext context) + { + int before = context.PendingRequestCount(); + string result = definition.GetReplaceValue(); + int after = context.PendingRequestCount(); - private static readonly char[] TokenChars = { '{', '~' }; - private readonly Dictionary TokenDictionary = new Dictionary(StringComparer.OrdinalIgnoreCase); - private readonly Dictionary ListTokenDictionary = new Dictionary(StringComparer.OrdinalIgnoreCase); + return before == after + ? result + : throw new Exception($"Token {tokenName} triggered an ExecuteQuery on the 'current' context. Please refactor this token to use the TokenContext class."); + } /// /// Parses given string @@ -991,21 +1075,19 @@ private Dictionary BuildNonCachedTokenCache() /// Returns parsed string public string ParseString(string input, params string[] tokensToSkip) { - if (string.IsNullOrWhiteSpace(input)) return input; - - if (string.IsNullOrEmpty(input) || input.IndexOfAny(TokenChars) == -1) return input; - - BuildTokenCache(); + if (string.IsNullOrWhiteSpace(input) || input.IndexOfAny(TokenChars) == -1) + { + return input; + } // Optimize for direct match with string search - if (TokenDictionary.TryGetValue(input, out string directMatch)) + if (_tokenDictionary.TryGetValue(input, out string directMatch)) { return directMatch; } // Support for non cached tokens - var nonCachedTokens = BuildNonCachedTokenCache(); - if (nonCachedTokens.TryGetValue(input, out string directMatchNonCached)) + if (TryGetNonCacheableToken(input, out string directMatchNonCached)) { return directMatchNonCached; } @@ -1019,38 +1101,58 @@ public string ParseString(string input, params string[] tokensToSkip) output = ReToken.Replace(output, match => { string tokenString = match.Groups[0].Value; - if (TokenDictionary.TryGetValue(tokenString, out string val)) + + if (!_tokenDictionary.TryGetValue(tokenString, out string val)) { - hasMatch = true; - return val; + return tokenString; } - return match.Groups[0].Value; + + hasMatch = true; + return val; }); } while (hasMatch && input != output); - if (hasMatch) return output; + if (hasMatch) + { + return output; + } var fallbackMatches = ReTokenFallback.Matches(output); - if (fallbackMatches.Count == 0) return output; + if (fallbackMatches.Count == 0) + { + return output; + } // If all token constructs {...} are GUID's, we can skip the expensive fallback bool needFallback = false; foreach (Match match in fallbackMatches) { - if (!ReGuid.IsMatch(match.Value)) needFallback = true; + if (!ReGuid.IsMatch(match.Value)) + { + needFallback = true; + } + } + + if (!needFallback) + { + return output; } - if (!needFallback) return output; // Fallback for tokens which may contain { or } as part of their name - foreach (var pair in TokenDictionary) + foreach (var pair in _tokenDictionary) { int idx = output.IndexOf(pair.Key, StringComparison.CurrentCultureIgnoreCase); if (idx != -1) { output = output.Remove(idx, pair.Key.Length).Insert(idx, pair.Value); } - if (!ReTokenFallback.IsMatch(output)) break; + + if (!ReTokenFallback.IsMatch(output)) + { + break; + } } + return output; } @@ -1127,78 +1229,91 @@ internal void RemoveToken(T oldToken) where T : TokenDefinition for (int i = 0; i < _tokens.Count; i++) { var tokenDefinition = _tokens[i]; - if (tokenDefinition.GetTokens().SequenceEqual(oldToken.GetTokens())) + if (!tokenDefinition.GetTokens().SequenceEqual(oldToken.GetTokens())) { - _tokens.RemoveAt(i); + continue; + } - foreach (string token in tokenDefinition.GetTokens()) - { - var tokenKey = Regex.Unescape(token); - TokenDictionary.Remove(tokenKey); - } + _tokens.RemoveAt(i); - break; + foreach (string token in tokenDefinition.GetUnescapedTokens()) + { + _tokenDictionary.Remove(token); + _nonCacheableTokenDictionary.Remove(token); } + + break; } } - private static readonly Dictionary listsTitles = new Dictionary(StringComparer.OrdinalIgnoreCase); - /// /// This method retrieves the title of a list in the main language of the site /// /// The current Web /// The title of the list in the current user's language /// The title of the list in the main language of the site - private static string GetListTitleForMainLanguage(Web web, String name) + private string GetListTitleForMainLanguage(Web web, string name) { - if (listsTitles.ContainsKey(name)) + if (_listsTitles.TryGetValue(name, out string title)) { - // Return the title that we already have - return (listsTitles[name]); + return title; } - else - { - // Get the default culture for the current web - var ci = new System.Globalization.CultureInfo((int)web.Language); - // Refresh the list of lists with a lock - lock (typeof(ListIdToken)) + // Get the default culture for the current web + var ci = new CultureInfo((int)web.Language); + + // Reset the cache of lists titles + _listsTitles.Clear(); + + const int batchSize = 20; + int listCount = web.Lists.Count; + var titles = new Tuple>[batchSize]; + + // Add the new lists title using the main language of the site + for (var listIndex = 0; listIndex < listCount;) + { + for (int step = 0; step < batchSize; step++) { - // Reset the cache of lists titles - TokenParser.listsTitles.Clear(); + var list = web.Lists[listIndex++]; + titles[step] = Tuple.Create(list.Title, list.TitleResource.GetValueForUICulture(ci.Name)); - // Add the new lists title using the main language of the site - foreach (var list in web.Lists) + if (listIndex == listCount) { - var titleResource = list.TitleResource.GetValueForUICulture(ci.Name); - web.Context.ExecuteQueryRetry(); - if (!TokenParser.listsTitles.ContainsKey(list.Title)) - { - TokenParser.listsTitles.Add(list.Title, titleResource.Value); - } + break; } } - // If now we have the list title ... - if (listsTitles.ContainsKey(name)) - { - // Return the title, if any - return (listsTitles[name]); - } - else + web.Context.ExecuteQueryRetry(); + + for (var index = 0; index < titles.Length; index++) { - return (null); + Tuple> tuple = titles[index]; + if (tuple == null) + { + break; + } + + if (!_listsTitles.ContainsKey(tuple.Item1)) + { + _listsTitles.Add(tuple.Item1, tuple.Item2.Value); + } } + + Array.Clear(titles, 0, titles.Length); } + + // If now we have the list title ... + return _listsTitles.TryGetValue(name, out title) + ? title // Return the title, if any + : null; } private static List ParseTemplate(ProvisioningTemplate template) { - List tokenIds = new List(); + var tokenIds = new List(); // Add parameter tokenid if parameters are specified - if (template.Parameters != null && template.Parameters.Any()) + if (template.Parameters != null && template.Parameters.Count > 0) { tokenIds.Add("parameter"); } @@ -1207,8 +1322,9 @@ private static List ParseTemplate(ProvisioningTemplate template) if (xml.IndexOfAny(TokenChars) == -1) return tokenIds; - bool hasMatch = false; - string tempXml = xml; + bool hasMatch; + string tempXml; + do { hasMatch = false; @@ -1216,40 +1332,84 @@ private static List ParseTemplate(ProvisioningTemplate template) { for (int i = 0; i < match.Groups.Count; i++) { - if (!ReGuid.IsMatch(match.Groups[i].Value)) + if (ReGuid.IsMatch(match.Groups[i].Value)) { - var originalTokenString = match.Groups[i].Value.Replace("{", "").Replace("}", "").ToLower(); + continue; + } - var tokenStringToAdd = originalTokenString; - var colonIndex = tokenStringToAdd.IndexOf(":"); - if (colonIndex > -1) - { - tokenStringToAdd = tokenStringToAdd.Substring(0, colonIndex); - } - if (!tokenIds.Contains(tokenStringToAdd) && !string.IsNullOrEmpty(tokenStringToAdd)) - { - tokenIds.Add(tokenStringToAdd); - } + var originalTokenString = match.Groups[i].Value.Replace("{", "").Replace("}", "").ToLower(); + + var tokenStringToAdd = originalTokenString; + var colonIndex = tokenStringToAdd.IndexOf(":"); + if (colonIndex > -1) + { + tokenStringToAdd = tokenStringToAdd.Substring(0, colonIndex); + } + if (!tokenIds.Contains(tokenStringToAdd) && !string.IsNullOrEmpty(tokenStringToAdd)) + { + tokenIds.Add(tokenStringToAdd); + } - // If sequencesitetoken is used we need to make sure that the corresponding site token is also loaded - if (tokenStringToAdd == "sequencesitetoken") + // If sequencesitetoken is used we need to make sure that the corresponding site token is also loaded + if (string.Equals(tokenStringToAdd, "sequencesitetoken", StringComparison.OrdinalIgnoreCase)) + { + var sequenceSiteTokenArray = originalTokenString.Split(InternalTokenDelimiters); + if (sequenceSiteTokenArray.Length > 2 && !string.IsNullOrWhiteSpace(sequenceSiteTokenArray[2]) && !tokenIds.Contains(sequenceSiteTokenArray[2])) { - var sequenceSiteTokenArray = originalTokenString.Split(':'); - if (sequenceSiteTokenArray.Length > 2 && !string.IsNullOrWhiteSpace(sequenceSiteTokenArray[2]) && !tokenIds.Contains(sequenceSiteTokenArray[2])) - { - tokenIds.Add(sequenceSiteTokenArray[2]); - } + tokenIds.Add(sequenceSiteTokenArray[2]); } } } return "-"; - }); } while (hasMatch && xml != tempXml); + return tokenIds; } + private static void AddResourceEntry(string key, int lcid, string value, Dictionary> dictionary) + { + if (!dictionary.TryGetValue(key, out List entries)) + { + entries = new List(); + dictionary.Add(key, entries); + } + + entries.Add(new ResourceEntry { LCID = lcid, Value = value }); + } + + private static void CalculateTokenCount(IReadOnlyList tokens, out int cacheableCount, out int nonCacheableCount) + { + cacheableCount = 0; + nonCacheableCount = 0; + + for (var index = 0; index < tokens.Count; index++) + { + TokenDefinition definition = tokens[index]; + + if (definition.IsCacheable) + { + cacheableCount += definition.TokenCount; + } + else + { + nonCacheableCount += definition.TokenCount; + } + } + } + + private void AddTokenWithCacheUpdate(TokenDefinition tokenDefinition) + { + _tokens.Add(tokenDefinition); + AddToTokenCache(tokenDefinition); + } + + private void AddTokenToList(TokenDefinition tokenDefinition) + { + _tokens.Add(tokenDefinition); + } + /// /// Clones the current TokenParser instance into a new instance /// @@ -1260,4 +1420,3 @@ public object Clone() } } } - From 9166e5c2792ac68281f09009a491f69b77154b6e Mon Sep 17 00:00:00 2001 From: fzbm <52308785+fzbm@users.noreply.github.com> Date: Tue, 16 Jan 2024 11:43:12 +0100 Subject: [PATCH 6/8] Updated "System.IdentityModel.Tokens.Jwt" Updated the "System.IdentityModel.Tokens.Jwt" NuGet package to 6.34 to address NU1605 (package downgrade). --- src/lib/PnP.Framework/PnP.Framework.csproj | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/lib/PnP.Framework/PnP.Framework.csproj b/src/lib/PnP.Framework/PnP.Framework.csproj index b0c039968..ccdcae3b3 100644 --- a/src/lib/PnP.Framework/PnP.Framework.csproj +++ b/src/lib/PnP.Framework/PnP.Framework.csproj @@ -259,7 +259,7 @@ - + From 734abeb4342ac6094c48f37d2b5fa91d3d88174d Mon Sep 17 00:00:00 2001 From: fzbm <52308785+fzbm@users.noreply.github.com> Date: Tue, 16 Jan 2024 11:49:39 +0100 Subject: [PATCH 7/8] Updated "System.IdentityModel.Tokens.Jwt" Updated the "System.IdentityModel.Tokens.Jwt" NuGet package to 6.34 to address NU1605 (package downgrade). --- .../PnP.Framework.Modernization.Test.csproj | 1 + 1 file changed, 1 insertion(+) diff --git a/src/lib/PnP.Framework.Modernization.Test/PnP.Framework.Modernization.Test.csproj b/src/lib/PnP.Framework.Modernization.Test/PnP.Framework.Modernization.Test.csproj index 2d99efb55..ce80ba61c 100644 --- a/src/lib/PnP.Framework.Modernization.Test/PnP.Framework.Modernization.Test.csproj +++ b/src/lib/PnP.Framework.Modernization.Test/PnP.Framework.Modernization.Test.csproj @@ -15,6 +15,7 @@ + From cd0b17103bb7a83fe0de4254445b6f7364de4e94 Mon Sep 17 00:00:00 2001 From: fzbm <52308785+fzbm@users.noreply.github.com> Date: Mon, 5 Feb 2024 06:32:52 +0100 Subject: [PATCH 8/8] Updated "System.IdentityModel.Tokens.Jwt" Updated the "System.IdentityModel.Tokens.Jwt" NuGet package to 6.35 to address NU1605 (package downgrade) for the "PnP.Framework.Modernization .Test" project. --- .../PnP.Framework.Modernization.Test.csproj | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/lib/PnP.Framework.Modernization.Test/PnP.Framework.Modernization.Test.csproj b/src/lib/PnP.Framework.Modernization.Test/PnP.Framework.Modernization.Test.csproj index ce80ba61c..6f9845d58 100644 --- a/src/lib/PnP.Framework.Modernization.Test/PnP.Framework.Modernization.Test.csproj +++ b/src/lib/PnP.Framework.Modernization.Test/PnP.Framework.Modernization.Test.csproj @@ -15,7 +15,7 @@ - +