I wrote my own maximum matching function in Swift to divide Chinese sentences into words. It works fine, except with abnormally long sentences the memory usage goes up over 1 gb. I need help figuring out how to modify my code so that there isn't this memory problem. I'm not sure if it has to do with how I am using RealmSwift or if it is my algorithm in general.
Here is my code:
func splitSentenceIntoWordsWithDictionaryMaximumMatching(string: String) -> [String] {
var string = string
var foundWordsArray: [String] = []
var position = count(string)
while position > 0
{
var index = advance(string.startIndex, position)
let partialString = string.substringToIndex(index)
if let found = Realm().objects(Word).filter("simplified == '\(partialString)'").first
{
foundWordsArray.append(partialString)
position = position - 1
var partialStringCount = count(partialString)
while partialStringCount > 0
{
string = dropFirst(string)
partialStringCount -= 1
}
position = count(string)
index = advance(string.startIndex, position)
}
else if count(partialString) == 1
{
addNewEntryToDictionaryInTransaction(partialString, "", [partialString], partialString)
foundWordsArray.append(partialString)
var partialStringCount = count(partialString)
while partialStringCount > 0
{
string = dropFirst(string)
partialStringCount -= 1
}
position = count(string)
index = advance(string.startIndex, position)
}
else
{
position = position - 1
index = advance(string.startIndex, position)
}
}
return foundWordsArray
}