I've been writing a program to perform a kind of pattern-matching in XML and text files.
When my program reaches this section of the code, the CPU usage gets very high and the performance slows down to a point where the program seems to be frozen. It actually isn't, but depending on the input (number of text files and their content), it may take several hours to complete the task.
I'm looking for a more efficient way to rewrite this section of the code:
List<string> CandidatesRet = new List<string>();
for (int indexCi = 0; indexCi < Ci.Count - 1; indexCi++)
{
// generate all sub itemset with length-1
string[] allItems = Ci[indexCi].Split(new char[] { ' ' });
for (int i = 0; i < allItems.Length; i++)
{
string tempStr = "";
for (int j = 0; j < allItems.Length; j++)
if (i != j)
tempStr += allItems[j] + " ";
tempStr = tempStr.Trim();
subItemset.Add(tempStr);
}
// THE PROBLEM BEGINS HERE
foreach (string subitem in subItemset)
{
int iFirtS;
for (int indexCommon = indexCi + 1; indexCommon < Ci.Count; indexCommon++)
if ((iFirtS = Ci[indexCommon].IndexOf(subitem)) >= 0)
{
string[] listTempCi = Ci[indexCommon].Split(new char[] { ' ' });
foreach (string itemCi in listTempCi)
if (!subitem.Contains(itemCi))
commonItem.Add(itemCi);
}
allCommonItems.Add(commonItem);
}
// generate condidate from common item
foreach (string item in oldItemsetCi)
{
bool flagCi = true;
foreach (List<string> listCommItem in allCommonItems)
if (!listCommItem.Contains(item))
{
flagCi = false;
break;
}
if (flagCi)
CandidatesRet.Add((Ci[indexCi] + " " + item).Trim());
}
There are many nested loops and I know this is the problem. What do you think should be improved?