I've tried many approachs for parsing a file content line by line, but at the present time is not working and when it runs, uses a lot of memory (more than 16GB).
This is a subset of the file I want to parse http://lpaste.net/144719
I want three kinds of errors:
1) error with backtrace (multiple lines, the first of them is like 3))
2) single error with one more line
3) single line error
Here is my current code:
import qualified Data.ByteString as B
import Data.ByteString.Char8 as B8 hiding (lines, filter, unlines, head, readFile, take, length,
putStrLn, tail, map, concat, or, writeFile, intersperse,
groupBy, hGetContents)
import qualified Data.Text as T
import qualified Data.Text.IO as TIO
import Data.Attoparsec.Text hiding (take)
import Control.Applicative
import Control.Monad (replicateM, mapM)
import Data.Either (either)
import Data.List (intersperse, groupBy)
import System.Environment
import qualified System.IO as SIO
data TimeStamp = MkTimeStamp T.Text
deriving Show
data LogFileInfo = BackTraceLineInfo T.Text
| BackTraceInfo TimeStamp T.Text T.Text [LogFileInfo]
| Error TimeStamp T.Text
| LargeError TimeStamp T.Text T.Text
deriving Show
data LineType = SingleLineError TimeStamp T.Text
| DirectoryInfo T.Text
| ErrorInfo T.Text
| LineBackTraceInfo T.Text
| BackTraceString T.Text
| BackTraceLine T.Text
deriving Show
parseTimeStamp :: Parser TimeStamp
parseTimeStamp = do
year <- many digit
char '-'
month <- many digit
char '-'
day <- many digit
char ' '
hour <- many digit
char ':'
minute <- many digit
char ':'
second <- many digit
char ' '
(return . MkTimeStamp) $ T.pack $ year ++ "-" ++ month ++ "-" ++ day ++ " " ++ hour ++ ":" ++ minute ++ ":" ++ second
parseError :: Parser LineType
parseError = do
string $ T.pack "ERROR - "
timeStamp <- parseTimeStamp
errorInfo <- parseAnyLine
return $ SingleLineError timeStamp errorInfo
parseDirectoryInfo :: Parser LineType
parseDirectoryInfo = do
char '/'
directoryInfo <- parseAnyLine
(return . DirectoryInfo) $ T.append (T.pack "/") directoryInfo
parseErrorInfo :: Parser LineType
parseErrorInfo = do
errorInfo <- parseAnyLine
(return . ErrorInfo) errorInfo
parseBackTraceString :: Parser LineType
parseBackTraceString = do
let backTraceStr = T.pack " Backtrace: "
string backTraceStr
return $ BackTraceString backTraceStr
parseBacktraceLine :: Parser LineType
parseBacktraceLine = do
char '#'
number <- many1 digit
backTraceInfo <- parseAnyLine
let numberPart = T.pack $ '#' : number
return $ LineBackTraceInfo $ T.append numberPart backTraceInfo
parseAnyLine :: Parser T.Text
parseAnyLine = fmap T.pack $ many anyChar
-- Skips n lines for allowing other parsers to succeed
skipNLines n = replicateM n $ manyTill anyChar endOfLine
-- performParser :: Parser a -> T.Text -> BackTraceInfo
performParser = parseOnly
getEitherRight :: Either a b -> b
getEitherRight (Right b) = b
parseLogFile :: [T.Text] -> [LineType]
parseLogFile textxs =
let listaEithers = mapM (parseOnly $
try parseError
<|> try parseDirectoryInfo
<|> try parseBacktraceLine
<|> try parseBackTraceString
<|> parseErrorInfo) textxs
in getEitherRight listaEithers
customUnlines :: [String] -> String
customUnlines [] = []
customUnlines (x:xs) = if x == "\n"
then '\n':customUnlines xs
else x ++ "\n" ++ customUnlines xs
main = do
(fileName : _) <- getArgs
h <- SIO.openFile fileName SIO.ReadMode
SIO.hSetEncoding h SIO.latin1
fileContents <- SIO.hGetContents h
let titleLength = length fileName
titleWithoutExtension = take (titleLength - 4) fileName
allNonEmptyLines = map T.pack $ intersperse "\n" $ tail $ filter (/= "") $ lines fileContents -- [T.Text]
listParseResults = parseLogFile allNonEmptyLines -- [LineType]
-- onlyModelErrors = filter isModelError parseResult -- [LogFileInfo]
-- onlyOneRepresentative = map head $ groupBy equalErrors onlyModelErrors
listOfStrings = map show listParseResults
writeFile (titleWithoutExtension ++ ".logsummary") $ customUnlines listOfStrings
The first problem is that the parser isn't parsing anything. And the second problem is that is using 16GB of RAM. How can improve my approach?