From 077a28b637ff3e6598418cacecd614109856f40f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bart=C5=82omiej=20Przemys=C5=82aw=20Pluta?= Date: Mon, 8 Nov 2021 11:26:24 +0100 Subject: [PATCH] Polish imports --- app/Assembler/Parser.hs | 20 +++++++++-------- app/Assembler/Tokenizer.hs | 46 ++++++++++++++++++++------------------ app/Util.hs | 9 ++++---- app/VirtualMachine.hs | 22 +++++++++--------- 4 files changed, 52 insertions(+), 45 deletions(-) diff --git a/app/Assembler/Parser.hs b/app/Assembler/Parser.hs index 63c7456..4ce4d55 100644 --- a/app/Assembler/Parser.hs +++ b/app/Assembler/Parser.hs @@ -1,13 +1,15 @@ module Assembler.Parser where -import qualified Data.List as List -import qualified Data.Monoid as Monoid -import qualified VirtualMachine as VM -import qualified Assembler.Tokenizer as T -import qualified Util as U +import Data.List (intercalate) +import Data.Monoid (First(..)) + +import qualified Assembler.Tokenizer as T (Token(..)) +import VirtualMachine (Op) +import Util (explode) + data AST = Empty - | Operator VM.Op + | Operator Op | Integer Int | Identifier String | Colon @@ -115,7 +117,7 @@ parseAlt parsers mapper tokens = do -- a | b | c parseAny :: [Parser] -> Parser parseAny _ [] = Nothing -parseAny parsers tokens = Monoid.getFirst . Monoid.mconcat . map Monoid.First $ sequenceA parsers tokens +parseAny parsers tokens = getFirst . mconcat . map First $ sequenceA parsers tokens -- a b c parseSeq :: [Parser] -> ([AST] -> AST) -> Parser @@ -147,10 +149,10 @@ assertConsumed parser tokens = do parse :: [T.Token] -> Either String AST parse tokens = do - let codeLines = U.explode (==T.NewLine) tokens + let codeLines = explode (==T.NewLine) tokens let results = map (assertConsumed parseLine) codeLines let errors = filter ((==Nothing) . snd) $ zipWith (,) codeLines $ results - let errorMsg = "Parse error(s):\n" ++ (List.intercalate "\n" $ map (show . fst) errors) + let errorMsg = "Parse error(s):\n" ++ (intercalate "\n" $ map (show . fst) errors) case sequenceA results of (Just r) -> return $ Program $ map (\(ParseResult ast _) -> ast) r Nothing -> Left errorMsg \ No newline at end of file diff --git a/app/Assembler/Tokenizer.hs b/app/Assembler/Tokenizer.hs index 8612a40..5bf306f 100644 --- a/app/Assembler/Tokenizer.hs +++ b/app/Assembler/Tokenizer.hs @@ -1,12 +1,14 @@ module Assembler.Tokenizer where -import qualified Data.List as List -import qualified Data.Char as Char -import qualified Data.Monoid as Monoid -import qualified VirtualMachine as VM -import qualified Util as U +import Data.List (sortBy) +import Data.Char (ord, isDigit, isSpace, isAlpha, isAlphaNum, isHexDigit) +import Data.Monoid (First(..)) -data Token = Operator VM.Op +import VirtualMachine (Op(..)) +import Util (toLowerCase, controlChar, unescape) + + +data Token = Operator Op | IntLiteral Int | StringLiteral String | Identifier String @@ -30,31 +32,31 @@ keywordTokenizer cs kwd token input | otherwise = Nothing where len = length kwd - mapper = if cs then id else U.toLowerCase + mapper = if cs then id else toLowerCase zipped = zipWith (==) (mapper kwd) (mapper . take len $ input) matches = and zipped && len == length zipped -operatorTokenizer :: VM.Op -> Tokenizer -operatorTokenizer op input = keywordTokenizer False (U.toLowerCase . show $ op) (Operator op) input +operatorTokenizer :: Op -> Tokenizer +operatorTokenizer op input = keywordTokenizer False (toLowerCase . show $ op) (Operator op) input tokenizeOperators :: Tokenizer tokenizeOperators = anyTokenizer $ map operatorTokenizer ops where - ops = List.sortBy cmp [VM.Nop ..] + ops = sortBy cmp [Nop ..] cmp x y = (length . show) y `compare` (length . show) x tokenizeIdentifier :: Tokenizer tokenizeIdentifier [] = Nothing -tokenizeIdentifier input@(x:_) = if null identifier || (not . Char.isAlpha) x +tokenizeIdentifier input@(x:_) = if null identifier || (not . isAlpha) x then Nothing else Just $ TokenizeResult (Identifier identifier) (length identifier) where - identifier = takeWhile (or . sequenceA [Char.isAlphaNum, (=='_')]) input + identifier = takeWhile (or . sequenceA [isAlphaNum, (=='_')]) input tokenizeWhitespace :: Tokenizer tokenizeWhitespace [] = Nothing tokenizeWhitespace (x:_) - | Char.isSpace x = Just $ TokenizeResult WhiteSpace 1 + | isSpace x = Just $ TokenizeResult WhiteSpace 1 | otherwise = Nothing tokenizeDecimal :: Tokenizer @@ -65,7 +67,7 @@ tokenizeDecimal input = if null numberStr where number = read numberStr len = length numberStr - numberStr = takeWhile Char.isDigit input + numberStr = takeWhile isDigit input tokenizeHex :: Tokenizer tokenizeHex [] = Nothing @@ -76,17 +78,17 @@ tokenizeHex input = if isPrefix && len > 0 isPrefix = take 2 input == "0x" number = read . ("0x"++) $ numberStr len = length numberStr - numberStr = takeWhile Char.isHexDigit (drop 2 input) + numberStr = takeWhile isHexDigit (drop 2 input) tokenizeChar :: Tokenizer -tokenizeChar ('\'':'\\':x:'\'':_) = U.controlChar x >>= (\s -> return $ TokenizeResult (IntLiteral s) 4) -tokenizeChar ('\'':x:'\'':_) = Just $ TokenizeResult (IntLiteral . Char.ord $ x) 3 +tokenizeChar ('\'':'\\':x:'\'':_) = controlChar x >>= (\s -> return $ TokenizeResult (IntLiteral s) 4) +tokenizeChar ('\'':x:'\'':_) = Just $ TokenizeResult (IntLiteral . ord $ x) 3 tokenizeChar _ = Nothing tokenizeString :: Tokenizer tokenizeString ('"':xs) = do string <- extractString xs - unescaped <- U.unescape string + unescaped <- unescape string return $ TokenizeResult (StringLiteral unescaped) (length string + 2) where extractString [] = Nothing @@ -117,7 +119,7 @@ sepTokenizer predicate tokenizer input = do anyTokenizer :: [Tokenizer] -> Tokenizer anyTokenizer _ [] = Nothing -anyTokenizer tokenizers input = Monoid.getFirst . Monoid.mconcat . map Monoid.First $ sequenceA tokenizers input +anyTokenizer tokenizers input = getFirst . mconcat . map First $ sequenceA tokenizers input tokenize :: String -> Either String [Token] tokenize [] = Right [] @@ -130,9 +132,9 @@ tokenize input = tokens >>= (\t -> Right $ filter tokenFilter t) [ keywordTokenizer False "\n" NewLine , tokenizeWhitespace , tokenizeComment - , sepTokenizer Char.isSpace tokenizeOperators - , sepTokenizer Char.isSpace tokenizeHex - , sepTokenizer Char.isSpace tokenizeDecimal + , sepTokenizer isSpace tokenizeOperators + , sepTokenizer isSpace tokenizeHex + , sepTokenizer isSpace tokenizeDecimal , tokenizeIdentifier , keywordTokenizer False ":" Colon , keywordTokenizer False "&" Ampersand diff --git a/app/Util.hs b/app/Util.hs index ea8feb1..6e8447f 100644 --- a/app/Util.hs +++ b/app/Util.hs @@ -10,12 +10,13 @@ module Util ( import Prelude hiding (head) import Data.List hiding (head) -import Data.Word +import Data.Word (Word8) +import Data.Char (chr, toLower) import Numeric (showHex) -import qualified Data.Char as Char + toLowerCase :: String -> String -toLowerCase = map Char.toLower +toLowerCase = map toLower bytesStr :: Int -> [Word8] -> String bytesStr sparse = insertAtN '\n' (sparse*3) . intercalate " " . map byteStr @@ -39,7 +40,7 @@ head (x:_) = Just x unescape :: String -> Maybe String unescape ('\\':x:xs) = do - cc <- fmap Char.chr $ controlChar x + cc <- fmap chr $ controlChar x rest <- unescape xs return $ cc : rest unescape (x:xs) = unescape xs >>= (\rest -> return $ x : rest) diff --git a/app/VirtualMachine.hs b/app/VirtualMachine.hs index 3ea4736..ee9f8ba 100644 --- a/app/VirtualMachine.hs +++ b/app/VirtualMachine.hs @@ -10,14 +10,16 @@ module VirtualMachine ( run ) where -import Data.Word -import Data.Foldable -import qualified Data.Char as Char -import qualified Data.Map as Map +import Data.Word (Word8) +import Data.Foldable (toList) +import Data.Char (toLower, toUpper) + +import qualified Data.Map as M import qualified Data.Sequence as S import qualified Data.ByteString as B -import qualified Util as U +import Util (byteStr, bytesStr) + data VM = VM { _pc :: Int , _fp :: Int @@ -104,14 +106,14 @@ jumpIf predicate vm [addr] = Right $ vm { _pc = pc } (top:_) = toList . _stack $ vm pc = if top `predicate` 0 then addr else _pc vm + 1 -instructionByOp :: Map.Map Op Instruction -instructionByOp = Map.fromList $ map (\i -> (_op i, i)) instructions +instructionByOp :: M.Map Op Instruction +instructionByOp = M.fromList $ map (\i -> (_op i, i)) instructions toOp :: String -> Op toOp = read . capitalize where capitalize :: String -> String capitalize [] = [] - capitalize (x:xs) = Char.toUpper x : map Char.toLower xs + capitalize (x:xs) = toUpper x : map toLower xs parse :: B.ByteString -> Either String [Command] parse = parseCommands . B.unpack @@ -120,13 +122,13 @@ parseCommands :: [Word8] -> Either String [Command] parseCommands [] = Right [] parseCommands code@(x:_) = case parseCommand code of Just (cmd, rest) -> parseCommands rest >>= (\r -> return $ cmd : r) - Nothing -> Left $ "Unparseable byte: " ++ U.byteStr x ++ "\nIn following sequence:\n" ++ U.bytesStr 16 code + Nothing -> Left $ "Unparseable byte: " ++ byteStr x ++ "\nIn following sequence:\n" ++ bytesStr 16 code parseCommand :: [Word8] -> Maybe (Command, [Word8]) parseCommand [] = Nothing parseCommand (opByte:xs) = do let op = toEnum . fromIntegral $ opByte :: Op - instruction <- Map.lookup op instructionByOp + instruction <- M.lookup op instructionByOp let paramsNumber = _noParams instruction let params = map fromIntegral $ take paramsNumber xs :: [Int] return (Command instruction params, drop paramsNumber xs)