diff --git a/bench/cardano-timeseries-io/CHANGELOG.md b/bench/cardano-timeseries-io/CHANGELOG.md new file mode 100644 index 00000000000..872cc41f136 --- /dev/null +++ b/bench/cardano-timeseries-io/CHANGELOG.md @@ -0,0 +1,5 @@ +# Revision history for cardano-timeseries-io + +## 1.0.0 -- March 2026 + +* First version. diff --git a/bench/cardano-timeseries-io/LICENSE b/bench/cardano-timeseries-io/LICENSE new file mode 100644 index 00000000000..f433b1a53f5 --- /dev/null +++ b/bench/cardano-timeseries-io/LICENSE @@ -0,0 +1,177 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS diff --git a/bench/cardano-timeseries-io/NOTICE b/bench/cardano-timeseries-io/NOTICE new file mode 100644 index 00000000000..571e2b91fed --- /dev/null +++ b/bench/cardano-timeseries-io/NOTICE @@ -0,0 +1,13 @@ +Copyright 2023 Input Output Global Inc (IOG), 2023-2026 Intersect. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/bench/cardano-timeseries-io/README.md b/bench/cardano-timeseries-io/README.md new file mode 100644 index 00000000000..eba2c9faa8e --- /dev/null +++ b/bench/cardano-timeseries-io/README.md @@ -0,0 +1,28 @@ +# Cardano Timeseries I/O + +## What it does + +The primary goal of the project is to serve as a standalone library for `cardano-tracer`, providing realtime metric storage & query. + +The library exposes the following components: +- An interface of metric stores together with multiple implementations. +- A low-level unambiguous language & its interpreter against a metric store — for querying metrics. +- A high-level user-facing language & its elaborator to the low-level one. + +On top of the library the project provides a simple CLI for reading a store off disk & executing a query against it, optionally interactively. + +## CLI Syntax + +``` +Usage: cardano-timeseries-io FILE ((-x|--execute QUERY) | (-i|--interactive)) + + Run a query against a metric store + +Available options: + -x,--execute QUERY Execute the query + -i,--interactive Enter REPL + -h,--help Show this help text +``` + +## Build flags + - _profiling_ for enabling GHC profiling support. diff --git a/bench/cardano-timeseries-io/app/Cardano/Timeseries.hs b/bench/cardano-timeseries-io/app/Cardano/Timeseries.hs new file mode 100644 index 00000000000..01ce5c33366 --- /dev/null +++ b/bench/cardano-timeseries-io/app/Cardano/Timeseries.hs @@ -0,0 +1,39 @@ +{-# LANGUAGE OverloadedRecordDot #-} + +module Main where + +import Cardano.Timeseries.AsText (showT) +import Cardano.Timeseries.CLI +import Cardano.Timeseries.Common +import Cardano.Timeseries.Domain.Types (Timestamp) +import Cardano.Timeseries.Interface (execute) +import Cardano.Timeseries.Interp.Config (Config (..)) +import Cardano.Timeseries.Store + +import Data.Foldable +import qualified Data.Map as Map +import Data.Text (Text) +import qualified Data.Text.IO as Text +import Options.Applicative + +interpConfig :: Config +interpConfig = Config {defaultRangeSamplingRateMillis = 15 * 1000} + +now :: Timestamp +now = 0 + +main :: IO () +main = do + cmd <- execParser parseCommand + store <- readStore cmd.store + putStrLn "Metrics:" + for_ (Map.keys store) $ \k -> + Text.putStrLn (" — " <> k <> "[" <> showMaybe (earliest store k) <> "ms; " <> showMaybe (latest store k) <> "ms]") + case cmd.mode of + Interactive -> repl store interpConfig now + Execute query -> printExecutionResult $ execute store interpConfig now query + where + showMaybe :: Show a => Maybe a -> Text + showMaybe Nothing = "N/A" + showMaybe (Just x) = showT x + diff --git a/bench/cardano-timeseries-io/app/Cardano/Timeseries/CLI.hs b/bench/cardano-timeseries-io/app/Cardano/Timeseries/CLI.hs new file mode 100644 index 00000000000..490711d3da8 --- /dev/null +++ b/bench/cardano-timeseries-io/app/Cardano/Timeseries/CLI.hs @@ -0,0 +1,26 @@ +module Cardano.Timeseries.CLI(Mode(..), Command(..), parseCommand) where +import Data.Text (Text) +import Options.Applicative + +data Mode = Execute Text | Interactive + +data Command = Command { + store :: FilePath, + mode :: Mode +} + +parseExecute :: Parser Mode +parseExecute = Execute <$> option str (short 'x' <> long "execute" <> metavar "QUERY" <> help "Execute the query") + +parseInteractive :: Parser Mode +parseInteractive = flag' Interactive (short 'i' <> long "interactive" <> help "Enter REPL") + +parseMode :: Parser Mode +parseMode = parseExecute <|> parseInteractive + +parseStore :: Parser FilePath +parseStore = argument str (metavar "FILE") + +parseCommand :: ParserInfo Command +parseCommand = info (Command <$> parseStore <*> parseMode <**> helper) + (fullDesc <> progDesc "Run a query against a metric store") diff --git a/bench/cardano-timeseries-io/app/Cardano/Timeseries/Common.hs b/bench/cardano-timeseries-io/app/Cardano/Timeseries/Common.hs new file mode 100644 index 00000000000..47e7adcfdeb --- /dev/null +++ b/bench/cardano-timeseries-io/app/Cardano/Timeseries/Common.hs @@ -0,0 +1,91 @@ +{-# LANGUAGE FlexibleContexts #-} +{- HLINT ignore "Use print" -} + +module Cardano.Timeseries.Common(readStore, repl, printInterpResult, printExecutionResult) where + +import Cardano.Logging.Resources (ResourceStats, Resources (..)) +import Cardano.Timeseries.AsText +import Cardano.Timeseries.Domain.Types (Timestamp) +import Cardano.Timeseries.Elab (elab, initialSt) +import Cardano.Timeseries.Import.PlainCBOR +import Cardano.Timeseries.Interp (interp) +import Cardano.Timeseries.Interp.Config (Config (..)) +import Cardano.Timeseries.Interp.Types (InterpError) +import Cardano.Timeseries.Interp.Value (Value) +import Cardano.Timeseries.Store +import Cardano.Timeseries.Store.Flat (Flat) +import Cardano.Timeseries.Store.Flat.Parser (double) +import qualified Cardano.Timeseries.Store.Flat.Parser as Flat.Parser +import Cardano.Timeseries.Store.Tree (Tree, fromFlat) +import Cardano.Timeseries.Surface.Expr.Parser (Parser) +import qualified Cardano.Timeseries.Surface.Expr.Parser as Surface.Parser + +import Control.DeepSeq (force) +import Control.Monad (forever) +import Control.Monad.Except (runExceptT) +import Control.Monad.State.Strict (evalState) +import Data.Foldable (traverse_) +import Data.Functor (void) +import qualified Data.Text as Text +import qualified Data.Text.IO as Text +import System.Exit (die) +import System.FilePath (takeExtension) +import System.IO (hFlush, stdout) +import Text.Megaparsec hiding (count) +import Text.Megaparsec.Char (newline, space, space1) +import Cardano.Timeseries.Interface (ExecutionError) + +_printStore :: Flat Double -> IO () +_printStore = traverse_ print + +_printStats :: ResourceStats -> IO () +_printStats stats = + putStrLn $ "Alloc: " <> show ((fromIntegral (rAlloc stats) :: Double) / 1024 / 1024) <> "MB\n" + <> "Live: " <> show ((fromIntegral (rLive stats) :: Double) / 1024 / 1024) <> "MB\n" + <> "Heap: " <> show ((fromIntegral (rHeap stats) :: Double) / 1024 / 1024) <> "MB\n" + <> "RSS: " <> show ((fromIntegral (rRSS stats) :: Double) / 1024 / 1024) <> "MB" + +printInterpResult :: Either InterpError Value -> IO () +printInterpResult (Left err) = Text.putStrLn $ asText err +printInterpResult (Right ok) = print ok + +printExecutionResult :: Either ExecutionError Value -> IO () +printExecutionResult (Left err) = Text.putStrLn $ asText err +printExecutionResult (Right ok) = print ok + +repl :: Store s Double => s -> Config -> Timestamp -> IO () +repl store interpCfg now = forever $ do + -- Just stats <- readResourceStats + -- putStrLn "----------" + -- printStats stats + putStrLn $ "Number of store entries: " <> show (count store) + putStrLn "----------" + putStr "> " + hFlush stdout + queryString <- Text.getLine + case parse (Surface.Parser.expr <* space <* eof) "input" queryString of + Left err -> putStrLn (errorBundlePretty err) + Right surfaceQuery -> do + -- putStrLn ("Surface expr: " <> show surfaceQuery) + case evalState (runExceptT (elab surfaceQuery)) initialSt of + Left err -> Text.putStrLn err + Right query -> do + Text.putStrLn (showT query) + printInterpResult (evalState (runExceptT $ interp interpCfg store mempty query now) 0) + +whitespace :: Parser () +whitespace = skipMany (try space1 <|> void newline) + +readStore :: FilePath -> IO (Tree Double) +readStore path | takeExtension path == ".cbor" = do + content <- readFileSnapshots path + putStrLn "Read the snapshots CBOR file!" + let store = {-# SCC "XXX" #-} force $ fromFlat $ snapshotsToFlatStore content + putStrLn "Created a store from CBOR!" + pure store +readStore path | takeExtension path == ".txt" = do + content <- Text.lines <$> Text.readFile path + case traverse (parse (Flat.Parser.point double <* whitespace <* eof) "input") content of + Left err -> die (errorBundlePretty err) + Right store -> pure $ fromFlat store +readStore path = die $ "Unknown extension: " <> takeExtension path diff --git a/bench/cardano-timeseries-io/bench/Bench.hs b/bench/cardano-timeseries-io/bench/Bench.hs new file mode 100644 index 00000000000..04fe304791e --- /dev/null +++ b/bench/cardano-timeseries-io/bench/Bench.hs @@ -0,0 +1,62 @@ +{-# LANGUAGE BangPatterns #-} +{-# LANGUAGE FlexibleContexts #-} +{-# OPTIONS_GHC -Wno-incomplete-uni-patterns #-} + +import Cardano.Timeseries.AsText (showT) +import Cardano.Timeseries.Elab (elab, initialSt) +import Cardano.Timeseries.Import.PlainCBOR +import Cardano.Timeseries.Interp (interp) +import Cardano.Timeseries.Interp.Config +import Cardano.Timeseries.Interp.Value (Value) +import Cardano.Timeseries.Query.Expr (Expr) +import Cardano.Timeseries.Store +import Cardano.Timeseries.Store.Tree (fromFlat) +import qualified Cardano.Timeseries.Surface.Expr.Parser as Surface.Parser + +import Control.Monad.Except (runExceptT) +import Control.Monad.State.Strict (evalState) +import Data.Text (Text) +import qualified Data.Text.IO as Text +import Text.Megaparsec hiding (count) +import Text.Megaparsec.Char (space) + +import Criterion.Main + +-- Given a snapshots file +-- Given a query string +-- Benchmark evaluation of the query on flat & tree stores. + +snapshotsFile :: FilePath +snapshotsFile = "data/6nodes_4hours_1mininterval.cbor" + +query :: Text +query = "\ + \let start = epoch + 1762173870000ms in \ + \let period = 10m in \ + \let F = Forge_forged_counter[start; start + period] in \ + \increase F" + +interpConfig :: Config +interpConfig = Config {defaultRangeSamplingRateMillis = 15 * 1000} + +action :: Store s Double => (s, Expr) -> Value +action (store, q) = + let Right !x = evalState (runExceptT $ interp interpConfig store mempty q 0) 0 in x + +main :: IO () +main = do + content <- readFileSnapshots snapshotsFile + let flatStore = snapshotsToFlatStore content + let treeStore = fromFlat flatStore + case parse (Surface.Parser.expr <* space <* eof) "input" query of + Left err -> putStrLn (errorBundlePretty err) + Right surfaceQuery -> do + case evalState (runExceptT (elab surfaceQuery)) initialSt of + Left err -> Text.putStrLn err + Right !q -> do + Text.putStrLn (showT q) + defaultMain + [ + bench "flat" $ nf action (flatStore, q), + bench "tree" $ nf action (treeStore, q) + ] diff --git a/bench/cardano-timeseries-io/cardano-timeseries-io.cabal b/bench/cardano-timeseries-io/cardano-timeseries-io.cabal new file mode 100644 index 00000000000..d6835bff764 --- /dev/null +++ b/bench/cardano-timeseries-io/cardano-timeseries-io.cabal @@ -0,0 +1,146 @@ +cabal-version: 3.4 +name: cardano-timeseries-io +description: Cardano Timeseries Input/Output for realtime metric storage & query. +version: 1.0.0 +category: Cardano + Metrics +copyright: 2026 Intersect. +license: Apache-2.0 +license-files: LICENSE + NOTICE +author: Ruslan Feizerakhmanov +maintainer: ruslan.feizerakhmanov@iohk.io +build-type: Simple +extra-doc-files: CHANGELOG.md + README.md + examples/*.txt + docs/*.txt + +flag profiling + description: Enable profiling + default: False + manual: True + +common common + ghc-options: + -Wall + + -- to ease development only + -- -Wno-unused-matches + -- -Wno-unused-top-binds + -- -Wno-unused-local-binds + -- -Wno-unused-imports + + if impl(ghc >= 9.8) + ghc-options: + -Wno-x-partial + + if flag(profiling) + ghc-options: + -fno-prof-auto + + default-language: Haskell2010 + + default-extensions: + LambdaCase + NamedFieldPuns + OverloadedStrings + + -- NB.: This package is strict Haskell! + Strict + +library + import: common + hs-source-dirs: src + + build-depends: + base, + megaparsec, + scientific, + vector, + text, + serialise, + statistics, + time, + containers, + mtl, + deepseq + + exposed-modules: + Cardano.Timeseries.AsText, + Cardano.Timeseries.Domain.Identifier, + Cardano.Timeseries.Domain.Instant, + Cardano.Timeseries.Domain.Interval, + Cardano.Timeseries.Domain.Timeseries, + Cardano.Timeseries.Domain.Types, + Cardano.Timeseries.Elab, + Cardano.Timeseries.Import.PlainCBOR, + Cardano.Timeseries.Interface, + Cardano.Timeseries.Interp, + Cardano.Timeseries.Interp.Config, + Cardano.Timeseries.Interp.Types, + Cardano.Timeseries.Interp.Value, + Cardano.Timeseries.Query.Expr, + Cardano.Timeseries.Store, + Cardano.Timeseries.Store.Flat, + Cardano.Timeseries.Store.Flat.Parser, + Cardano.Timeseries.Store.Tree, + Cardano.Timeseries.Surface.Expr, + Cardano.Timeseries.Surface.Expr.Parser, + + other-modules: + Cardano.Timeseries.Interp.Expect, + Cardano.Timeseries.Interp.Statistics, + Cardano.Timeseries.Query.BinaryArithmeticOp, + Cardano.Timeseries.Query.BinaryRelation, + Cardano.Timeseries.Resolve, + Cardano.Timeseries.Surface.Expr.Head, + Cardano.Timeseries.Typing, + Cardano.Timeseries.Unify, + Cardano.Timeseries.Util, + +executable cardano-timeseries-io + import: common + ghc-options: -rtsopts + hs-source-dirs: app + main-is: Cardano/Timeseries.hs + + other-modules: + Cardano.Timeseries.Common, + Cardano.Timeseries.CLI + + build-depends: + base, + megaparsec, + vector, + text, + serialise, + statistics, + time, + containers, + mtl, + trace-resources, + cardano-timeseries-io, + filepath, + deepseq, + optparse-applicative + +benchmark cardano-timeseries-io-bench + import: common + type: exitcode-stdio-1.0 + hs-source-dirs: bench + main-is: Bench.hs + + build-depends: + base, + vector, + text, + serialise, + megaparsec, + statistics, + time, + containers, + mtl, + trace-resources, + cardano-timeseries-io, + criterion diff --git a/bench/cardano-timeseries-io/data/6nodes_4hours_1mininterval.cbor b/bench/cardano-timeseries-io/data/6nodes_4hours_1mininterval.cbor new file mode 100644 index 00000000000..69d7314c597 Binary files /dev/null and b/bench/cardano-timeseries-io/data/6nodes_4hours_1mininterval.cbor differ diff --git a/bench/cardano-timeseries-io/docs/domain.txt b/bench/cardano-timeseries-io/docs/domain.txt new file mode 100644 index 00000000000..3df20da7f09 --- /dev/null +++ b/bench/cardano-timeseries-io/docs/domain.txt @@ -0,0 +1,19 @@ +An instant of type `a` is the following data: +— A set of labels +— A timestamp +— An element of `a` +The set of labels identifies which timeseries the instant belongs to. +We sometimes call this set of labels a "series identifier". + +An instant vector of type `a` is a set of `instant`s of type `a` where every two elements of the set have distinct: +— Set of labels + +A timeseries of type `a` is: +— A set of labels +— a set of pairs of: + — A timestamp + — An element of `a` + +A timeseries vector of type `a` is a set of timeseries where every two elements of the set have distinct: +— Set of labels + diff --git a/bench/cardano-timeseries-io/docs/elab.txt b/bench/cardano-timeseries-io/docs/elab.txt new file mode 100644 index 00000000000..79090a11506 --- /dev/null +++ b/bench/cardano-timeseries-io/docs/elab.txt @@ -0,0 +1,143 @@ +The set of elaboration rules written out here is incomplete. +The rules here can be seen as an introduction to the haskell implementation. +The judgement forms presented here are complete and in 1-to-1 correspondence with the implementation. + +let x = t in t +\x -> t +t t +fst t +snd t +(t, t) +t == t (~) +t != t (~) +t < t (~) +t <= t (~) +t > t (~) +t >= t (~) +t + t (~) +t - t (~) +t * t (~) +t / t (~) +true +false +!t +t && t +t || t +ms +s +min +h +epoch +now +to_scalar t +t[t; t] +t[t; t : t] +filter_by_label {s = s, ..., s = s} t +max t +min t +avg t +filter t t +join t t +map t t +abs t +increase t +rate t +avg_over_time t +sum_over_time t +quantile_over_time t t +unless t t +quantile_by (s, ..., s) t t +x +metrics + + +// General elaboration jugdement + ~> : + +// Binary relation elaboration jugdement + ⊦ ( : ) ( : ) ~> : + +// Binary arithmetic operator elaboration jugdement + ⊦ ( : ) ( : ) ~> : + +// "to_scalar" elaboration judgement + ⊦ to_scalar ( : ) ~> + + +Γ ⊦ t ~> ?x : A +Γ (x : A) ⊦ e ~> ?y : B +=========================================== +Γ ⊦ let x = t in e ~> let x = ?x in ?y : B + + +Γ (x : A) ⊦ t ~> ?z : B +================================= +Γ ⊦ \x -> t ~> \x -> ?z : A -> B + + +Γ = Γ₀ (x : C) Γ₁ +Γ ⊦ ?x := x : C +================= +Γ ⊦ x ~> ?x : C + + +x ∉ Γ +Γ ⊦ ?x ≔ x : Timestamp -> Scalar // Assumes existence of x in the metric store +================================== +Γ ⊦ x ~> ?x : Timestamp -> Scalar + + +Γ ⊦ t ~> ?z : (A, B) +======================== +Γ ⊦ fst t ~> fst ?z : A + + +Γ ⊦ t ~> ?z : (A, B) +======================== +Γ ⊦ snd t ~> snd ?z : B + + +Γ ⊦ a ~> ?x : A +Γ ⊦ b ~> ?y : B +================================ +Γ ⊦ (a, b) ~> (?x, ?y) : (A, B) + + +Γ ⊦ ?a : ?A +Γ ⊦ ?b : ?B +Γ ⊦ a ~> ?a : ?A +Γ ⊦ b ~> ?b : ?B +Γ ⊦ (?a : ?A) rel (?b : ?B) ~> ?r : C +===================================== +Γ ⊦ a rel b ~> ?r : C + + +Γ ⊦ ?r := ⟦rel⟧ a b : Bool +============================================ +Γ ⊦ ?r : Bool +Γ ⊦ (a : Scalar) rel (b : Scalar) ~> ?r : Bool + + +Γ ⊦ ?r ≔ lte_instant_vector a b : InstantVector Scalar +======================================================================== +Γ ⊦ a : InstantVector Scalar <= b : Scalar ~> ?r : InstantVector Scalar + + +Γ ⊦ ?a : ?A +Γ ⊦ ?b : ?B +Γ ⊦ a ~> ?a : ?A +Γ ⊦ b ~> ?b : ?B +Γ ⊦ (?a : ?A) op (?b : ?B) ~> ?r : C +==================================== +Γ ⊦ a op b ~> ?r : C + + +Γ ⊦ ?r ≔ ⟦rel⟧ a b : Scalar +============================================== +Γ ⊦ (a : Scalar) rel (b : Scalar) ~> ?r : Scalar + + +Γ ⊦ s ~> ?s : ?A +Γ ⊦ to_scalar (?s : ?A) ~> ?r +============================= +Γ ⊦ to_scalar s ~> ?r : Scalar diff --git a/bench/cardano-timeseries-io/docs/sre-expressions.txt b/bench/cardano-timeseries-io/docs/sre-expressions.txt new file mode 100644 index 00000000000..9312b99f127 --- /dev/null +++ b/bench/cardano-timeseries-io/docs/sre-expressions.txt @@ -0,0 +1,47 @@ +// PromQL +( + (abs(max(${blockMetric}{environment="${env}"}) - ${blockMetric}{environment="${env}"}) > bool ${toString lagBlocks}) + - + (abs(max(${slotMetric}{environment="${env}"}) - on() group_right() ${slotMetric}{environment="${env}"}) < bool ${toString lagSeconds}) +) == 1 + +//PromQL +${kesPeriodsRemaining} <= ${toString periodNotice} + + +//PromQL +increase(cardano_node_metrics_Forge_forged_int[24h]) == 0 + +//PromQL +rate(cardano_node_metrics_slotsMissedNum_int[5m]) * 1 > 0.5 + +//PromQL +avg_over_time(netdata_statsd_cardano_node_ping_latency_ms_gauge_value_average[5m]) > 500 + + +//PromQL +avg(quantile_over_time(0.95, cardano_node_metrics_blockadoption_forgeDelay_real[6h])) >= 4.5 + + +//PromQL +100 * avg(avg_over_time(cardano_node_metrics_blockfetchclient_blocksize[6h]) / 90112) > ${highBlockUtilization} + + +//PromQL +cardano_node_metrics_blockfetchclient_blockdelay_cdfFive < 0.90 + + +//PromQL +round(increase((time() - cardano_node_metrics_nodeStartTime_int < bool 300)[1h:1m])) > 1 + + +//PromQL +(sum_over_time((cardano_node_metrics_blockNum_int != bool 0)[360m:1m]) < bool 350) > 0 unless cardano_node_metrics_blockNum_int" + + +//PromQL +100 * quantile by(environment) (0.2, (cardano_node_metrics_density_real * 20)) < ${chainDensityVeryLow} + + +//PromQL +100 * quantile by(environment) (0.2, (cardano_node_metrics_density_real{environment!~"preview"} * 20)) < ${chainDensityLow} diff --git a/bench/cardano-timeseries-io/docs/typing.txt b/bench/cardano-timeseries-io/docs/typing.txt new file mode 100644 index 00000000000..6cc15eaa9e3 --- /dev/null +++ b/bench/cardano-timeseries-io/docs/typing.txt @@ -0,0 +1,484 @@ + +x ::= + +n ::= + +s, s' ::= + +// Label +l ::= s + +// Set of labels +l̄ ::= (l, ..., l) + +// Label constraint +c ::= l = s | l != s + +// Set of label constraints +c̄ ::= {c, ..., c} + +// Types +A, B, C, T ::= InstantVector T | RangeVector T | Scalar | Bool | Timestamp | Duration | (T, T) | T -> T | Text + +// Contexts +Γ ::= ε | Γ (x ≔ t : T) | Γ (x : T) + +// Terms +t, f, e, a, b ::= x + | (t, t) + | \x -> t + | let x = t in t + | fst t + | snd t + | eq_scalar t t + | not_eq_scalar t t + | eq_bool t t + | not_eq_bool t t + | lt_scalar t t + | lte_scalar t t + | gt_scalar t t + | gte_scalar t t + | add_scalar t t + | sub_scalar t t + | mul_scalar t t + | div_scalar t t + | not t + | and t t + | or t t + | milliseconds n + | seconds n + | minutes n + | hours n + | epoch + | now + | rewind t t + | fast_forward t t + | timestamp_to_scalar t + | bool_to_scalar t + | round_scalar t + | abs t + | range t t t + | range t t t t + | filter_by_label t c̄ + | max t + | min t + | avg t + | filter t + | join t t + | map t + | increase t + | rate t + | avg_over_time t + | sum_over_time t + | quantile_over_time t t + | unless t t + | eq_instant_vector_scalar t t + | not_eq_instant_vector_scalar t t + | lt_instant_vector_scalar t t + | lte_instant_vector_scalar t t + | gt_instant_vector_scalar t t + | gte_instant_vector_scalar t t + | add_instant_vector_scalar t t + | sub_instant_vector_scalar t t + | mul_instant_vector_scalar t t + | div_instant_vector_scalar t t + | instant_vector_to_scalar t + | quantile_by t t + | metrics + +┌──────┐ +│A type│ +└──────┘ + +A type +---------------------- +InstantVector A type + + +A type +-------------------- +RangeVector A type + + +Scalar type + + +Bool type + + +A type +B type +----------- +(A, B) type + +A type +B type +------ +A -> B + + +Timestamp type + + +Duration type + +Text type + +┌──────┐ +│Γ ctx │ +└──────┘ + +ε ctx + +Γ ctx +A type +------------- +Γ (x : A) ctx + +Γ ⊦ t : A +----------------- +Γ (x ≔ t : A) ctx + +┌─────────┐ +│Γ ctx │ +│A type │ +│---------│ +│Γ ⊦ t : A│ +└─────────┘ + + +--------------------- ✔ +Γ₀ (x : C) Γ₁ ⊦ x : C + + +------------------------- ✔ +Γ₀ (x ≔ t : C) Γ₁ ⊦ x : C + + +x ∈ metric-store +----------------------------------------- ✔ +Γ ⊦ x : Timestamp -> InstantVector Scalar + + +Γ (x := t : A) ⊦ e : B +---------------------- ✔ +Γ ⊦ let x = t in e : B + + +(x : A) ⊦ t : B +-------------------- ✔ +Γ ⊦ \x -> t : A -> B + + +Γ ⊦ t : (A, B) +-------------- ✔ +Γ ⊦ fst t : A + + +Γ ⊦ t : (A, B) +-------------- ✔ +Γ ⊦ snd t : B + + +Γ ⊦ a : A +Γ ⊦ b : B +------------------- ✔ +Γ ⊦ (a, b) : (A, B) + + +Γ ⊦ a : Bool +Γ ⊦ b : Bool +---------------------- ✔ +Γ ⊦ eq_bool a b : Bool + + +Γ ⊦ a : Bool +Γ ⊦ b : Bool +-------------------------- ✔ +Γ ⊦ not_eq_bool a b : Bool + + +Γ ⊦ a : Scalar +Γ ⊦ b : Scalar +------------------------ ✔ +Γ ⊦ eq_scalar a b : Bool + + +Γ ⊦ a : Scalar +Γ ⊦ b : Scalar +---------------------------- ✔ +Γ ⊦ not_eq_scalar a b : Bool + + +Γ ⊦ a : Scalar +Γ ⊦ b : Scalar +------------------------ ✔ +Γ ⊦ lt_scalar a b : Bool + + +Γ ⊦ a : Scalar +Γ ⊦ b : Scalar +------------------------- ✔ +Γ ⊦ lte_scalar a b : Bool + + +Γ ⊦ a : Scalar +Γ ⊦ b : Scalar +------------------------ ✔ +Γ ⊦ gt_scalar a b : Bool + + +Γ ⊦ a : Scalar +Γ ⊦ b : Scalar +------------------------- ✔ +Γ ⊦ gte_scalar a b : Bool + + +Γ ⊦ a : Scalar +Γ ⊦ b : Scalar +--------------------------- ✔ +Γ ⊦ add_scalar a b : Scalar + + +Γ ⊦ a : Scalar +Γ ⊦ b : Scalar +--------------------------- ✔ +Γ ⊦ sub_scalar a b : Scalar + + +Γ ⊦ a : Scalar +Γ ⊦ b : Scalar +--------------------------- ✔ +Γ ⊦ mul_scalar a b : Scalar + + +Γ ⊦ a : Scalar +Γ ⊦ b : Scalar +--------------------------- ✔ +Γ ⊦ div_scalar a b : Scalar + + +Γ ⊦ a : Bool +---------------- ✔ +Γ ⊦ not a : Bool + + +Γ ⊦ a : Bool +Γ ⊦ b : Bool +------------------ ✔ +Γ ⊦ and a b : Bool + + +Γ ⊦ a : Bool +Γ ⊦ b : Bool +----------------- ✔ +Γ ⊦ or a b : Bool + + +n integer // integer literal +----------------------------- ✔ +Γ ⊦ milliseconds n : Duration + + +n integer // integer literal +---------------------------- ✔ +Γ ⊦ seconds n : Duration + + +n integer // integer literal +---------------------------- ✔ +Γ ⊦ minutes n : Duration + + +n integer // integer literal + // Syntax hugar: h ✗ +----------------------------------- ✔ +Γ ⊦ hours n : Duration + + +Γ ⊦ epoch : Timestamp ✔ + + +Γ ⊦ now : Timestamp ✔ + + +Γ ⊦ t : Timestamp +Γ ⊦ d : Duration +-------------------------- ✔ +Γ ⊦ rewind t d : Timestamp + + +Γ ⊦ t : Timestamp +Γ ⊦ d : Duration +-------------------------------- ✔ +Γ ⊦ fast_forward t d : Timestamp + + +Γ ⊦ t : Timestamp +---------------------------------- ✔ +Γ ⊦ timestamp_to_scalar t : Scalar + + +Γ ⊦ t : Bool +----------------------------- ✔ +Γ ⊦ bool_to_scalar t : Scalar + + +// Given a continuous timeseries vector and an interval computes a discrete timeseries vector (range vector) +Γ ⊦ s : Timestamp -> InstantVector a +Γ ⊦ a : Timestamp +Γ ⊦ b : Timestamp +------------------------------------ ✔ +Γ ⊦ range s a b : RangeVector a + + +// More general version with a sampling rate +Γ ⊦ s : Timestamp -> InstantVector a +Γ ⊦ a : Timestamp +Γ ⊦ b : Timestamp +Γ ⊦ d : Duration +--------------------------------- ✔ +Γ ⊦ range s a b d : RangeVector a + + +// Takes a subset of instant vector `v` by keeping only those instants whose labels +// satisfy the constraints c̄ +Γ ⊦ v : InstantVector a +----------------------------------------- ✔ +Γ ⊦ filter_by_label c̄ v : InstantVector a + + +Γ ⊦ v : InstantVector Scalar +---------------------------- ✔ +Γ ⊦ max v : Scalar + + +Γ ⊦ v : InstantVector Scalar +---------------------------- ✔ +Γ ⊦ min v : Scalar + + +Γ ⊦ v : InstantVector Scalar +--------------------------- ✔ +Γ ⊦ avg v : Scalar + + +Γ ⊦ f : A -> Bool +Γ ⊦ v : InstantVector A +-------------------------------- ✔ +Γ ⊦ filter f v : InstantVector A + + +Γ ⊦ u : InstantVector A +Γ ⊦ v : InstantVector B +----------------------------------- // 1-to-1 match is assumed ✔ +Γ ⊦ join u v : InstantVector (A, B) + + +Γ ⊦ f : A -> B +Γ ⊦ v : InstantVector A +----------------------------- ✔ +Γ ⊦ map f u : InstantVector B + + +Γ ⊦ t : Scalar +--------------------------- ✔ +Γ ⊦ round_scalar t : Scalar + + +Γ ⊦ t : Scalar +------------------ ✔ +Γ ⊦ abs t : Scalar + + +Γ ⊦ r : RangeVector Scalar +------------------------------------ ✔ +Γ ⊦ increase r : InstantVector Scalar + + +Γ ⊦ r : RangeVector Scalar +--------------------------------- ✔ +Γ ⊦ rate r : InstantVector Scalar + + +Γ ⊦ r : RangeVector Scalar +------------------------------------------ ✔ +Γ ⊦ avg_over_time r : InstantVector Scalar + + +Γ ⊦ r : RangeVector Scalar +------------------------------------------ ✔ +Γ ⊦ sum_over_time r : InstantVector Scalar + + +Γ ⊦ q : Scalar // must be in range of [0; 1] +Γ ⊦ r : RangeVector +------------------------------------------------- ✔ +Γ ⊦ quantile_over_time q r : InstantVector Scalar + + +// Returns elements of `u` whose label instance doesn't occur in `v` +Γ ⊦ u : InstantVector a +Γ ⊦ v : InstantVector b +-------------------------------- ✔ +Γ ⊦ unless u v : InstantVector a + + +// meta-level abbreviation +Γ ⊦ a : InstantVector Scalar +Γ ⊦ s : Scalar +------------------------------------------------------------ ✔ +Γ ⊦ lte_instant_vector_scalar a s : InstantVector Scalar + lte_instant_vector_scalar a s ≡ filter (\v -> v <= s) a // same rule holds for other binary relations + + +// meta-level abbreviation +Γ ⊦ v : InstantVector Scalar +Γ ⊦ s : Scalar +------------------------------------ ✗ +Γ ⊦ v <= bool s : InstantVector Bool +v <= bool s ≡ (\x -> x <= s) v + + +// meta-level definition (define via map) +Γ ⊦ v : InstantVector Scalar +Γ ⊦ s : Scalar +-------------------------------------------------------- ✔ +Γ ⊦ add_instant_vector_scalar v s : InstantVector Scalar + + +// meta-level definition (define via map) +Γ ⊦ v : InstantVector Scalar +Γ ⊦ s : Scalar +-------------------------------------------------------- ✔ +Γ ⊦ sub_instant_vector_scalar v s : InstantVector Scalar + + +// meta-level definition (define via map) +Γ ⊦ v : InstantVector Scalar +Γ ⊦ s : Scalar +---------------------------------------------------- ✔ +Γ ⊦ mul_instant_vector_scalar : InstantVector Scalar + + +// meta-level definition (define via map) +Γ ⊦ v : InstantVector Scalar +Γ ⊦ s : Scalar +-------------------------------------------------------- ✔ +Γ ⊦ div_instant_vector_scalar v s : InstantVector Scalar + + +// meta-level definition (define via map) +Γ ⊦ v : InstantVector Bool +----------------------------------------------------- ✔ +Γ ⊦ instant_vector_to_scalar v : InstantVector Scalar + + +Γ ⊦ q : Scalar +Γ ⊦ v : InstantVector Scalar +-------------------------------------------- ✔ +Γ ⊦ quantile_by q v l̄ : InstantVector Scalar + +Γ ctx +------------------ ✔ +Γ ⊦ metrics : Text diff --git a/bench/cardano-timeseries-io/examples/query1.txt b/bench/cardano-timeseries-io/examples/query1.txt new file mode 100644 index 00000000000..6d9ab563986 --- /dev/null +++ b/bench/cardano-timeseries-io/examples/query1.txt @@ -0,0 +1,7 @@ +let lagBlocks = 42 in +let lagSeconds = 42 in +let maxBlock = max ((blockMetric now) {"environment" = "${env}"}) in +let maxSlot = max ((slotMetric now) {"environment" = "${env}"}) in +let block = map (\x -> abs (maxBlock - x) > lagBlocks) ((blockMetric now) {"environment" = "${env}"}) in +let slot = map (\x -> abs (maxSlot - x) < lagSeconds) ((slotMetric now) {"environment" = "${env}"}) in +filter (\x -> fst x && !(snd x)) (join block slot) diff --git a/bench/cardano-timeseries-io/examples/query10.txt b/bench/cardano-timeseries-io/examples/query10.txt new file mode 100644 index 00000000000..b47af4df90d --- /dev/null +++ b/bench/cardano-timeseries-io/examples/query10.txt @@ -0,0 +1,9 @@ +unless + ( + filter (\x -> x) + (map + (\x -> x < 350) + (sum_over_time (\t -> map (\x -> to_scalar (x != 0)) (cardano_node_metrics_blockNum_int t))[now-360m;now:1m]) + ) + ) + (cardano_node_metrics_blockNum_int now) diff --git a/bench/cardano-timeseries-io/examples/query11.txt b/bench/cardano-timeseries-io/examples/query11.txt new file mode 100644 index 00000000000..c4f24b00777 --- /dev/null +++ b/bench/cardano-timeseries-io/examples/query11.txt @@ -0,0 +1,4 @@ +let chainDensityVeryLow = 42 in +100 * quantile_by ("environment") 0.2 (20 * cardano_node_metrics_density_real now) +< +chainDensityVeryLow diff --git a/bench/cardano-timeseries-io/examples/query12.txt b/bench/cardano-timeseries-io/examples/query12.txt new file mode 100644 index 00000000000..99395ddc614 --- /dev/null +++ b/bench/cardano-timeseries-io/examples/query12.txt @@ -0,0 +1,6 @@ +let chainDensityLow = 42 in +100 * quantile_by ("environment") + 0.2 + (20 * (cardano_node_metrics_density_real now) {"environment" != "preview"}) +< +chainDensityLow diff --git a/bench/cardano-timeseries-io/examples/query2.txt b/bench/cardano-timeseries-io/examples/query2.txt new file mode 100644 index 00000000000..d51be02db18 --- /dev/null +++ b/bench/cardano-timeseries-io/examples/query2.txt @@ -0,0 +1,2 @@ +let periodNotice = 42 in +kesPeriodsRemaining now <= periodNotice diff --git a/bench/cardano-timeseries-io/examples/query3.txt b/bench/cardano-timeseries-io/examples/query3.txt new file mode 100644 index 00000000000..97af2df70f9 --- /dev/null +++ b/bench/cardano-timeseries-io/examples/query3.txt @@ -0,0 +1 @@ +increase cardano_node_metrics_Forge_forged_int[now - 24h; now] == 0 diff --git a/bench/cardano-timeseries-io/examples/query4.txt b/bench/cardano-timeseries-io/examples/query4.txt new file mode 100644 index 00000000000..d4e227c499d --- /dev/null +++ b/bench/cardano-timeseries-io/examples/query4.txt @@ -0,0 +1 @@ +rate cardano_node_metrics_slotsMissedNum_int[now - 5m; now] > 0.5 diff --git a/bench/cardano-timeseries-io/examples/query5.txt b/bench/cardano-timeseries-io/examples/query5.txt new file mode 100644 index 00000000000..88144b68140 --- /dev/null +++ b/bench/cardano-timeseries-io/examples/query5.txt @@ -0,0 +1 @@ +avg_over_time netdata_statsd_cardano_node_ping_latency_ms_gauge_value_average[now - 5m; now] > 500 diff --git a/bench/cardano-timeseries-io/examples/query6.txt b/bench/cardano-timeseries-io/examples/query6.txt new file mode 100644 index 00000000000..655b15f35ed --- /dev/null +++ b/bench/cardano-timeseries-io/examples/query6.txt @@ -0,0 +1 @@ +avg (quantile_over_time 0.95 cardano_node_metrics_blockadoption_forgeDelay_real[now - 6h; now]) >= 4.5 diff --git a/bench/cardano-timeseries-io/examples/query7.txt b/bench/cardano-timeseries-io/examples/query7.txt new file mode 100644 index 00000000000..cc25e772207 --- /dev/null +++ b/bench/cardano-timeseries-io/examples/query7.txt @@ -0,0 +1,2 @@ +let highBlockUtilization = 42 in +100.0 * avg (avg_over_time cardano_node_metrics_blockfetchclient_blocksize[now - 6h; now] / 90112) > highBlockUtilization diff --git a/bench/cardano-timeseries-io/examples/query8.txt b/bench/cardano-timeseries-io/examples/query8.txt new file mode 100644 index 00000000000..be240120410 --- /dev/null +++ b/bench/cardano-timeseries-io/examples/query8.txt @@ -0,0 +1 @@ +cardano_node_metrics_blockfetchclient_blockdelay_cdfFive now < 0.9 diff --git a/bench/cardano-timeseries-io/examples/query9.txt b/bench/cardano-timeseries-io/examples/query9.txt new file mode 100644 index 00000000000..5adc53374d6 --- /dev/null +++ b/bench/cardano-timeseries-io/examples/query9.txt @@ -0,0 +1,6 @@ +map (\x -> round x) + (increase + (\t -> map (\x -> to_scalar (x < 300)) (to_scalar t - cardano_node_metrics_nodeStartTime_int t))[now-1h;now:1m] + ) +> +1 diff --git a/bench/cardano-timeseries-io/src/Cardano/Timeseries/AsText.hs b/bench/cardano-timeseries-io/src/Cardano/Timeseries/AsText.hs new file mode 100644 index 00000000000..ea2b04e4fbe --- /dev/null +++ b/bench/cardano-timeseries-io/src/Cardano/Timeseries/AsText.hs @@ -0,0 +1,12 @@ +module Cardano.Timeseries.AsText where + +import Data.Text (Text, pack) + +-- | For the purpose of pretty-printing. +-- Result may include linebreaks. +class AsText a where + asText :: a -> Text + +{-# INLINE showT #-} +showT :: Show a => a -> Text +showT = pack . show diff --git a/bench/cardano-timeseries-io/src/Cardano/Timeseries/Domain/Identifier.hs b/bench/cardano-timeseries-io/src/Cardano/Timeseries/Domain/Identifier.hs new file mode 100644 index 00000000000..840e5c69ef5 --- /dev/null +++ b/bench/cardano-timeseries-io/src/Cardano/Timeseries/Domain/Identifier.hs @@ -0,0 +1,14 @@ +module Cardano.Timeseries.Domain.Identifier(Identifier(..)) where + +import Cardano.Timeseries.AsText + +import Data.Text (Text, cons) + +-- | Identifiers come in two sorts: Userspace and Machine-generated. +-- | The first kind comes from user-typed expressions. +-- | The second kind is used for automatic code-generation for hygienic scoping (i.e. to avoid unintentional variable capture) +data Identifier = User Text | Machine Int deriving (Show, Ord, Eq) + +instance AsText Identifier where + asText (User x) = x + asText (Machine i) = '$' `cons` showT i diff --git a/bench/cardano-timeseries-io/src/Cardano/Timeseries/Domain/Instant.hs b/bench/cardano-timeseries-io/src/Cardano/Timeseries/Domain/Instant.hs new file mode 100644 index 00000000000..5c2f1d7fb90 --- /dev/null +++ b/bench/cardano-timeseries-io/src/Cardano/Timeseries/Domain/Instant.hs @@ -0,0 +1,43 @@ +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE DeriveTraversable #-} +{-# LANGUAGE FlexibleInstances #-} + +module Cardano.Timeseries.Domain.Instant(Instant(..), InstantVector, mostRecent, share, toVector) where + +import Cardano.Timeseries.AsText +import Cardano.Timeseries.Domain.Types (SeriesIdentifier, Timestamp) + +import Control.DeepSeq (NFData) +import qualified Data.Set as Set +import Data.Text as Text (pack, unlines) +import Data.Vector +import GHC.Generics (Generic) + +-- | One datapoint in a series. +data Instant a = Instant { + labels :: SeriesIdentifier, + timestamp :: Timestamp, + value :: a +} deriving (Show, Eq, Functor, Foldable, Traversable, Generic) + +instance NFData a => NFData (Instant a) + +-- | Do the instant vectors share a series? +share :: Instant a -> Instant b -> Bool +share a b = labels a == labels b + +-- | Datapoints from different series. The vector must not contain datapoints sharing a series. +type InstantVector a = [Instant a] + +mostRecent :: Instant a -> Instant a -> Instant a +mostRecent u v = if timestamp u < timestamp v then v else u + +toVector :: InstantVector Double -> Vector Double +toVector = fromList . fmap value + +instance Show a => AsText (Instant a) where + asText (Instant ls t v) = + pack (show (Set.toList ls)) <> " " <> pack (show t) <> " " <> pack (show v) + +instance Show a => AsText (InstantVector a) where + asText = Text.unlines . fmap asText diff --git a/bench/cardano-timeseries-io/src/Cardano/Timeseries/Domain/Interval.hs b/bench/cardano-timeseries-io/src/Cardano/Timeseries/Domain/Interval.hs new file mode 100644 index 00000000000..70916400565 --- /dev/null +++ b/bench/cardano-timeseries-io/src/Cardano/Timeseries/Domain/Interval.hs @@ -0,0 +1,12 @@ +module Cardano.Timeseries.Domain.Interval(Interval(..), duration) where + +import Cardano.Timeseries.Domain.Types (Timestamp) + +-- | A time interval. Assumption: `start` ≤ `end` +data Interval = Interval { + start :: Timestamp, + end :: Timestamp +} deriving (Show, Eq) + +duration :: Interval -> Double +duration (Interval s e) = fromIntegral (e - s) / 2 diff --git a/bench/cardano-timeseries-io/src/Cardano/Timeseries/Domain/Timeseries.hs b/bench/cardano-timeseries-io/src/Cardano/Timeseries/Domain/Timeseries.hs new file mode 100644 index 00000000000..b7db5ad70b2 --- /dev/null +++ b/bench/cardano-timeseries-io/src/Cardano/Timeseries/Domain/Timeseries.hs @@ -0,0 +1,115 @@ +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE DeriveTraversable #-} +{-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE RecordWildCards #-} + +module Cardano.Timeseries.Domain.Timeseries(Timeseries(..), TimeseriesVector, + transpose, toVector, oldest, newest, eachOldest, eachNewest, superseries) where + +import Cardano.Timeseries.AsText +import Cardano.Timeseries.Domain.Instant (Instant (Instant), InstantVector) +import qualified Cardano.Timeseries.Domain.Instant as Instant +import Cardano.Timeseries.Domain.Types + +import Control.DeepSeq (NFData) +import Data.Function (on) +import Data.List (find, maximumBy, minimumBy) +import Data.Set +import qualified Data.Set as Set +import qualified Data.Text as Text +import Data.Vector (Vector) +import qualified Data.Vector as Vector +import GHC.Generics (Generic) + +-- | A collection of datapoints sharing a series. +data Timeseries a = Timeseries { + labels :: SeriesIdentifier, + dat :: [(Timestamp, a)] +} deriving (Show, Functor, Foldable, Traversable, Generic) + +instance NFData a => NFData (Timeseries a) + +oldest :: Timeseries a -> Maybe (Instant a) +oldest Timeseries{..} | Prelude.null dat = Nothing +oldest Timeseries{..} = + let (t, x) = minimumBy (compare `on` fst) dat in + Just (Instant labels t x) + +newest :: Timeseries a -> Maybe (Instant a) +newest Timeseries{..} | Prelude.null dat = Nothing +newest Timeseries{..} = + let (t, x) = maximumBy (compare `on` fst) dat in + Just (Instant labels t x) + +-- | Every two elements in the list must have distinct series identifiers (set of labels), +-- | i.e. the series in the list must be distinct. +type TimeseriesVector a = [Timeseries a] + +eachOldest :: TimeseriesVector a -> Maybe [Instant a] +eachOldest = traverse oldest + +eachNewest :: TimeseriesVector a -> Maybe [Instant a] +eachNewest = traverse newest + +-- | Given a list of range vectors, forms up a timeseries vector. +-- | This operation is, in some sense, transposition: +-- | +-- | ⎴ ⎴ ⎴ ⎴ +-- | series1: ... ◯ ◯ ... +-- | series2: ... ◯ ◯ ... +-- | series3: ... ◯ ◯ ◯ ... +-- | ... ... +-- | ⎵ ⎵ ⎵ ⎵ +-- | -------------------------------> t +-- | t₀ t₁ t₂ t₃ +-- | +-- | =====> +-- | +-- | +-- | +-- | series1: [ ... ◯ ◯ ... ] +-- | series2: [ ... ◯ ◯ ... ] +-- | series3: [ ... ◯ ◯ ◯ ... ] +-- | ... ... +-- | +-- | ----------------------------------------> t +-- t₀ t₁ t₂ t₃ +transpose :: [InstantVector a] -> TimeseriesVector a +transpose instants = + Set.foldl' (\vector ls -> form ls instants : vector) [] (setOfLabels instants) where + + -- | Given a set of labels (identifying a series) form up a series from a list of instant vectors. + form :: SeriesIdentifier -> [InstantVector a] -> Timeseries a + form ls insts_ = Timeseries ls (formInt insts_) where + -- | Extract the data pertaining to the series (identified by the given `SeriesIdentifier`) from the list of + -- | ranges vectors. + formInt :: [InstantVector a] -> [(Timestamp, a)] + formInt [] = [] + formInt (inst : insts) = + case find (\i -> Instant.labels i == ls) inst of + Just i -> (Instant.timestamp i, Instant.value i) : formInt insts + Nothing -> formInt insts + + setOfLabels' :: InstantVector a -> Set SeriesIdentifier + setOfLabels' [] = Set.empty + setOfLabels' (i : is) = Set.insert (Instant.labels i) (setOfLabels' is) + + setOfLabels :: [InstantVector a] -> Set SeriesIdentifier + setOfLabels [] = Set.empty + setOfLabels (v : vs) = setOfLabels' v `Set.union` setOfLabels vs + +toVector :: Timeseries Double -> Vector Double +toVector = Vector.fromList . fmap snd . dat + +-- Widen the series by the given set of labels. +superseries :: Set Label -> SeriesIdentifier -> SeriesIdentifier +superseries ls = Set.filter (\(k, _) -> k `elem` ls) + +instance Show a => AsText (Timeseries a) where + asText (Timeseries ls ps) = + showT (Set.toList ls) + <> "\n" + <> Text.unlines (fmap (\(t, v) -> showT t <> " " <> showT v) ps) + +instance Show a => AsText (TimeseriesVector a) where + asText = Text.unlines . fmap asText diff --git a/bench/cardano-timeseries-io/src/Cardano/Timeseries/Domain/Types.hs b/bench/cardano-timeseries-io/src/Cardano/Timeseries/Domain/Types.hs new file mode 100644 index 00000000000..a5f97d03ba5 --- /dev/null +++ b/bench/cardano-timeseries-io/src/Cardano/Timeseries/Domain/Types.hs @@ -0,0 +1,20 @@ +module Cardano.Timeseries.Domain.Types(MetricIdentifier, Label, Labelled, Timestamp, SeriesIdentifier) where + +import Prelude hiding (length) + +import Data.Set (Set) +import Data.Text (Text) +import Data.Word (Word64) + +-- | Each series in the (metric) store can be identified by a metric name. +type MetricIdentifier = Text + +type Label = Text + +-- | Key-value pair of a label and its value. +type Labelled a = (Label, a) + +-- | Series is identified by a set of labels. Hence the name. +type SeriesIdentifier = Set (Labelled Text) + +type Timestamp = Word64 diff --git a/bench/cardano-timeseries-io/src/Cardano/Timeseries/Elab.hs b/bench/cardano-timeseries-io/src/Cardano/Timeseries/Elab.hs new file mode 100644 index 00000000000..69391d1a170 --- /dev/null +++ b/bench/cardano-timeseries-io/src/Cardano/Timeseries/Elab.hs @@ -0,0 +1,900 @@ +{-# LANGUAGE BangPatterns #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE ViewPatterns #-} + +module Cardano.Timeseries.Elab(initialSt, St(..), ElabM, elab) where +import Cardano.Timeseries.AsText +import Cardano.Timeseries.Domain.Identifier (Identifier) +import Cardano.Timeseries.Query.BinaryArithmeticOp +import qualified Cardano.Timeseries.Query.BinaryArithmeticOp as BinaryArithmeticOp +import Cardano.Timeseries.Query.BinaryRelation (BinaryRelation) +import qualified Cardano.Timeseries.Query.BinaryRelation as BinaryRelation +import Cardano.Timeseries.Query.Expr (HoleIdentifier) +import qualified Cardano.Timeseries.Query.Expr as Semantic +import Cardano.Timeseries.Resolve +import Cardano.Timeseries.Surface.Expr (Loc, getLoc) +import qualified Cardano.Timeseries.Surface.Expr as Surface +import Cardano.Timeseries.Typing (Binding (..), Context, Def (..), Defs, + Ty (Bool, Duration, Fun, Hole, InstantVector, RangeVector, Scalar, Timestamp), + TyPrec (Loose), instantiateExpr, prettyTy) +import qualified Cardano.Timeseries.Typing as Ty +import qualified Cardano.Timeseries.Typing as Types +import Cardano.Timeseries.Unify (UnificationProblem (..), UnifyM) +import qualified Cardano.Timeseries.Unify as Unify + +import Control.Monad (forM_) +import Control.Monad.Except (ExceptT, liftEither, runExceptT, throwError) +import Control.Monad.State.Strict (State, get, modify, put, runState) +import Data.Foldable as Foldable (toList) +import Data.List (find) +import qualified Data.Map.Strict as Map +import Data.Sequence as Seq (Seq (..), fromList, singleton, (><), (|>)) +import Data.Text (Text, pack) +import qualified Data.Text as Text + + +-- | Γ ⊦ s ~> ?x : A +data GeneralElabProblem = GeneralElabProblem { + gamma :: Context, + surface :: Surface.Expr, + hole :: HoleIdentifier, + holeTy :: Ty +} deriving (Show) + +instance AsText GeneralElabProblem where + asText (GeneralElabProblem gam sur _ holeTy) = + asText gam + <> " ⊦ " + <> prettyTy Loose holeTy + <> "\n @ " + <> asText (getLoc sur) + +evalGeneralElabProblem :: Defs -> GeneralElabProblem -> GeneralElabProblem +evalGeneralElabProblem defs (GeneralElabProblem gam tm hole holeTy) = + GeneralElabProblem (resolveContext defs gam) tm hole (resolveTy defs holeTy) + +-- | Γ ⊦ ((t : T) R (t : T)) ~> ? : T +data BinaryRelationElabProblem = BinaryRelationElabProblem { + gamma :: Context, + loc :: Loc, + lhs :: Semantic.Expr, + lhsTy :: Ty, + rel :: BinaryRelation.BinaryRelation, + rhs :: Semantic.Expr, + rhsTy :: Ty, + hole :: HoleIdentifier, + holeTy :: Ty +} deriving (Show) + +prettyBinaryRelationElabProblem :: BinaryRelationElabProblem -> Text +prettyBinaryRelationElabProblem (BinaryRelationElabProblem gam loc _ lhsTy rel _ rhsTy _ holeTy) = + asText gam + <> " ⊦ " + <> prettyTy Loose lhsTy + <> " " + <> asText rel + <> " " + <> prettyTy Loose rhsTy + <> " : " + <> prettyTy Loose holeTy + <> "\n @ " + <> asText loc + +evalBinaryRelationElabProblem :: Defs -> BinaryRelationElabProblem -> BinaryRelationElabProblem +evalBinaryRelationElabProblem defs (BinaryRelationElabProblem gam loc lhs lhsTy rel rhs rhsTy hole holeTy) = + BinaryRelationElabProblem + (resolveContext defs gam) + loc + lhs + (resolveTy defs lhsTy) + rel + rhs + (resolveTy defs rhsTy) + hole + (resolveTy defs holeTy) + +-- | Γ ⊦ (t R t) ~> ? : t +data BinaryArithmeticOpElabProblem = BinaryArithmeticOpElabProblem { + gamma :: Context, + loc :: Loc, + lhs :: Semantic.Expr, + lhsTy :: Ty, + op :: BinaryArithmeticOp.BinaryArithmeticOp, + rhs :: Semantic.Expr, + rhsTy :: Ty, + hole :: HoleIdentifier, + holeTy :: Ty +} deriving (Show) + +prettyBinaryArithmeticOpElabProblem :: BinaryArithmeticOpElabProblem -> Text +prettyBinaryArithmeticOpElabProblem (BinaryArithmeticOpElabProblem gam loc _ lhsTy op _ rhsTy _ holeTy) = + asText gam + <> " ⊦ " + <> prettyTy Loose lhsTy + <> " " + <> asText op + <> " " + <> prettyTy Loose rhsTy + <> " : " + <> prettyTy Loose holeTy + <> "\n @ " + <> asText loc + +evalBinaryArithmethicOpElabProblem :: Defs -> BinaryArithmeticOpElabProblem -> BinaryArithmeticOpElabProblem +evalBinaryArithmethicOpElabProblem defs (BinaryArithmeticOpElabProblem gam loc lhs lhsTy op rhs rhsTy hole holeTy) = + BinaryArithmeticOpElabProblem + (resolveContext defs gam) + loc + lhs + (resolveTy defs lhsTy) + op + rhs + (resolveTy defs rhsTy) + hole + (resolveTy defs holeTy) + +-- | Γ ⊦ to_scalar (t : T) ~> ? +data ToScalarElabProblem = ToScalarElabProblem { + gamma :: Context, + loc :: Loc, + expr :: Semantic.Expr, + ty :: Ty, + hole :: HoleIdentifier +} deriving (Show) + +instance AsText ToScalarElabProblem where + asText (ToScalarElabProblem gam loc _ ty _) = + asText gam + <> " ⊦ to_scalar " + <> prettyTy Loose ty + <> "\n @ " + <> asText loc + +evalToScalarElabProblem :: Defs -> ToScalarElabProblem -> ToScalarElabProblem +evalToScalarElabProblem defs (ToScalarElabProblem gam loc expr exprTy hole) = + ToScalarElabProblem + (resolveContext defs gam) + loc + expr + (resolveTy defs exprTy) + hole + +data ElabProblem = General GeneralElabProblem + | BinaryRelation BinaryRelationElabProblem + | BinaryArithmeticOp BinaryArithmeticOpElabProblem + | ToScalar ToScalarElabProblem deriving (Show) + +instance AsText ElabProblem where + asText (General p) = asText p + asText (BinaryRelation p) = prettyBinaryRelationElabProblem p + asText (BinaryArithmeticOp p) = prettyBinaryArithmeticOpElabProblem p + asText (ToScalar p) = asText p + +evalElabProblem :: Defs -> ElabProblem -> ElabProblem +evalElabProblem defs (General p) = General (evalGeneralElabProblem defs p) +evalElabProblem defs (BinaryRelation p) = BinaryRelation (evalBinaryRelationElabProblem defs p) +evalElabProblem defs (BinaryArithmeticOp p) = BinaryArithmeticOp (evalBinaryArithmethicOpElabProblem defs p) +evalElabProblem defs (ToScalar p) = ToScalar (evalToScalarElabProblem defs p) + + +data St = St { + defs :: Defs, + nextHoleIdentifier :: HoleIdentifier +} + +initialSt :: St +initialSt = St mempty 0 + +updateDefs :: (Defs -> Defs) -> St -> St +updateDefs f (St ds x) = St (f ds) x + +getDefs :: St -> Defs +getDefs = defs + +setDefs :: Defs -> St -> St +setDefs v = updateDefs (const v) + +updateNextHoleIdentifier :: (HoleIdentifier -> HoleIdentifier) -> St -> St +updateNextHoleIdentifier f (St ds x) = St ds (f x) + +runUnifyM :: UnifyM a -> ElabM a +runUnifyM f = do + st <- get + let ds = getDefs st + let !(!r, Unify.St ds') = runState (runExceptT f) (Unify.St ds) + put (setDefs ds' st) + liftEither r + + +type ElabError = Text + +type ElabM a = ExceptT ElabError (State St) a + +freshHoleIdentifier :: ElabM HoleIdentifier +freshHoleIdentifier = do + x <- (.nextHoleIdentifier) <$> get + modify (updateNextHoleIdentifier (+ 1)) + pure x + +freshTyHole :: ElabM HoleIdentifier +freshTyHole = do + x <- freshHoleIdentifier + modify (updateDefs (Map.insert x TyHoleDecl)) + pure x + +freshExprHole :: Ty -> ElabM HoleIdentifier +freshExprHole typ = do + x <- freshHoleIdentifier + modify (updateDefs (Map.insert x (ExprHoleDecl typ))) + pure x + +mbBinaryRelation :: Surface.Expr -> Maybe (Loc, Surface.Expr, BinaryRelation.BinaryRelation, Surface.Expr) +mbBinaryRelation (Surface.Lte l a b) = Just (l, a, BinaryRelation.Lte, b) +mbBinaryRelation (Surface.Lt l a b) = Just (l, a, BinaryRelation.Lt, b) +mbBinaryRelation (Surface.Gte l a b) = Just (l, a, BinaryRelation.Gte, b) +mbBinaryRelation (Surface.Gt l a b) = Just (l, a, BinaryRelation.Gt, b) +mbBinaryRelation (Surface.Eq l a b) = Just (l, a, BinaryRelation.Eq, b) +mbBinaryRelation (Surface.NotEq l a b) = Just (l, a, BinaryRelation.NotEq, b) +mbBinaryRelation _ = Nothing + +mbBinaryArithmeticOp :: Surface.Expr -> Maybe (Loc, Surface.Expr, BinaryArithmeticOp.BinaryArithmeticOp, Surface.Expr) +mbBinaryArithmeticOp (Surface.Add l a b) = Just (l, a, BinaryArithmeticOp.Add, b) +mbBinaryArithmeticOp (Surface.Sub l a b) = Just (l, a, BinaryArithmeticOp.Sub, b) +mbBinaryArithmeticOp (Surface.Mul l a b) = Just (l, a, BinaryArithmeticOp.Mul, b) +mbBinaryArithmeticOp (Surface.Div l a b) = Just (l, a, BinaryArithmeticOp.Div, b) +mbBinaryArithmeticOp _ = Nothing + +checkFresh :: Context -> Identifier -> ElabM () +checkFresh ctx v = + forM_ (find (\b -> Types.identifier b == v) ctx) $ \found -> + throwError $ pack $ "Reused variable name: " <> show (Types.identifier found) + +-- | Γ ⊦ to_scalar (t : T) ~> ? +-- Assumes that `Ty` is normal w.r.t. hole substitution. +solveToScalarElabProblem :: Context + -> Loc + -> Semantic.Expr + -> Ty + -> HoleIdentifier + -> ElabM (Maybe ([UnificationProblem], [ElabProblem])) +solveToScalarElabProblem _ _ expr Scalar hole = do + modify $ updateDefs $ instantiateExpr hole expr + pure $ Just ([], []) +solveToScalarElabProblem _ _ expr Bool hole = do + modify $ updateDefs $ instantiateExpr hole (Semantic.BoolToScalar expr) + pure $ Just ([], []) +solveToScalarElabProblem _ _ expr Duration hole = do + modify $ updateDefs $ instantiateExpr hole (Semantic.DurationToScalar expr) + pure $ Just ([], []) +solveToScalarElabProblem _ _ expr Timestamp hole = do + modify $ updateDefs $ instantiateExpr hole (Semantic.TimestampToScalar expr) + pure $ Just ([], []) +solveToScalarElabProblem _ _ _ (Hole _) _ = pure Nothing +solveToScalarElabProblem _ loc _ badType _ = throwError $ + "to_scalar can't be applied to an expression of type " + <> prettyTy Loose badType + <> "\n @ " + <> asText loc + +-- | Σ Γ ⊦ InstantVector Scalar `rel` Scalar ~> ? : InstantVector Scalar +-- | Σ Γ ⊦ Scalar `rel` Scalar ~> ? : Bool +-- | Σ Γ ⊦ Bool == Bool ~> ? : Bool +-- | Σ Γ ⊦ Bool != Bool ~> ? : Bool +solveCanonicalBinaryRelationElabProblem :: Context + -> Loc + -> Semantic.Expr + -> Ty + -> BinaryRelation + -> Semantic.Expr + -> Ty + -> HoleIdentifier + -> Ty + -> ElabM (Maybe ([UnificationProblem], [ElabProblem])) +solveCanonicalBinaryRelationElabProblem _ _ lhs (InstantVector Scalar) rel rhs Scalar hole (InstantVector Scalar) = do + modify $ updateDefs $ + instantiateExpr hole + (BinaryRelation.embedInstantVectorScalar rel lhs rhs) + pure $ Just ([], []) +solveCanonicalBinaryRelationElabProblem _ _ lhs Scalar rel rhs Scalar hole Bool = do + modify $ updateDefs $ + instantiateExpr hole + (BinaryRelation.embedScalar rel lhs rhs) + pure $ Just ([], []) +solveCanonicalBinaryRelationElabProblem _ _ lhs Bool BinaryRelation.Eq rhs Bool hole Bool = do + modify $ updateDefs $ + instantiateExpr hole + (Semantic.EqBool lhs rhs) + pure $ Just ([], []) +solveCanonicalBinaryRelationElabProblem _ _ lhs Bool BinaryRelation.NotEq rhs Bool hole Bool = do + modify $ updateDefs $ + instantiateExpr hole + (Semantic.NotEqBool lhs rhs) + pure $ Just ([], []) +solveCanonicalBinaryRelationElabProblem _ _ _ _ _ _ _ _ _ = pure Nothing + +solveNoncanonicalBinaryRelationElabProblem :: Context + -> Loc + -> Semantic.Expr + -> Ty + -> BinaryRelation + -> Semantic.Expr + -> Ty + -> HoleIdentifier + -> Ty + -> ElabM (Maybe ([UnificationProblem], [ElabProblem])) +solveNoncanonicalBinaryRelationElabProblem gam loc lhs (InstantVector Scalar) rel rhs Scalar hole typ = do + pure $ Just ([UnificationProblem loc typ (InstantVector Scalar)], + [BinaryRelation $ + BinaryRelationElabProblem gam loc lhs (InstantVector Scalar) + rel rhs Scalar hole (InstantVector Scalar)]) +solveNoncanonicalBinaryRelationElabProblem gam loc lhs Scalar rel rhs (InstantVector Scalar) hole typ = do + pure $ Just ([UnificationProblem loc typ (InstantVector Scalar)], + [BinaryRelation $ + BinaryRelationElabProblem gam loc rhs (InstantVector Scalar) + (BinaryRelation.swapInstantVectorScalar rel) lhs Scalar hole (InstantVector Scalar)]) +solveNoncanonicalBinaryRelationElabProblem gam loc lhs Scalar rel rhs Scalar hole typ = do + pure $ Just ([UnificationProblem loc typ Bool], + [BinaryRelation $ BinaryRelationElabProblem gam loc lhs Scalar rel rhs Scalar hole Bool]) +solveNoncanonicalBinaryRelationElabProblem gam loc lhs lhsTy rel rhs rhsTy hole Bool = do + pure $ Just ([UnificationProblem loc lhsTy Scalar, UnificationProblem loc rhsTy Scalar], + [BinaryRelation $ BinaryRelationElabProblem gam loc lhs Scalar rel rhs Scalar hole Bool]) +solveNoncanonicalBinaryRelationElabProblem gam loc lhs lhsTy rel rhs rhsTy hole typ | + (lhsTy == Bool || rhsTy == Bool) && (rel == BinaryRelation.Eq || rel == BinaryRelation.NotEq) = do + pure $ Just ([UnificationProblem loc lhsTy Bool, UnificationProblem loc rhsTy Bool, UnificationProblem loc typ Bool], + [BinaryRelation $ BinaryRelationElabProblem gam loc lhs Bool rel rhs Bool hole Bool]) +solveNoncanonicalBinaryRelationElabProblem _ _ _ _ _ _ _ _ _ = pure Nothing + +-- | Σ Γ ⊦ (a : A) `rel` (b : B) ~> ?x : C +-- Assumes that all given `Ty` are normal w.r.t. hole substitution. +-- TODO: Check completeness +solveBinaryRelationElabProblem :: Context + -> Loc + -> Semantic.Expr + -> Ty + -> BinaryRelation + -> Semantic.Expr + -> Ty + -> HoleIdentifier + -> Ty + -> ElabM (Maybe ([UnificationProblem], [ElabProblem])) +solveBinaryRelationElabProblem gam loc lhs lhsTy rel rhs rhsTy hole holeTy = + solveCanonicalBinaryRelationElabProblem gam loc lhs lhsTy rel rhs rhsTy hole holeTy >>= \case + Nothing -> solveNoncanonicalBinaryRelationElabProblem gam loc lhs lhsTy rel rhs rhsTy hole holeTy + Just ok -> pure (Just ok) + +-- | Σ Γ ⊦ Timestamp + Duration ~> ? : Timestamp +-- | Σ Γ ⊦ Timestamp - Duration ~> ? : Timestamp +-- | Σ Γ ⊦ Duration + Duration ~> ? : Duration +-- | Σ Γ ⊦ Scalar `op` Scalar ~> ? : Scalar +-- | Σ Γ ⊦ InstantVector Scalar `op` Scalar ~> ? : InstantVector Scalar +solveCanonicalBinaryArithmeticOpElabProblem :: Context + -> Loc + -> Semantic.Expr + -> Ty + -> BinaryArithmeticOp + -> Semantic.Expr + -> Ty + -> HoleIdentifier + -> Ty + -> ElabM (Maybe ([UnificationProblem], [ElabProblem])) +solveCanonicalBinaryArithmeticOpElabProblem _ _ lhs Timestamp BinaryArithmeticOp.Add rhs Duration hole Timestamp = do + modify $ updateDefs $ + instantiateExpr hole + (Semantic.FastForward lhs rhs) + pure $ Just ([], []) +solveCanonicalBinaryArithmeticOpElabProblem _ _ lhs Duration BinaryArithmeticOp.Add rhs Duration hole Duration = do + modify $ updateDefs $ + instantiateExpr hole + (Semantic.AddDuration lhs rhs) + pure $ Just ([], []) +solveCanonicalBinaryArithmeticOpElabProblem _ _ lhs Timestamp BinaryArithmeticOp.Sub rhs Duration hole Timestamp = do + modify $ updateDefs $ + instantiateExpr hole + (Semantic.Rewind lhs rhs) + pure $ Just ([], []) +solveCanonicalBinaryArithmeticOpElabProblem _ _ lhs Scalar op rhs Scalar hole Scalar = do + modify $ updateDefs $ + instantiateExpr hole + (BinaryArithmeticOp.embedScalar op lhs rhs) + pure $ Just ([], []) +solveCanonicalBinaryArithmeticOpElabProblem _ _ lhs (InstantVector Scalar) + op rhs Scalar hole (InstantVector Scalar) = do + modify $ updateDefs $ + instantiateExpr hole + (BinaryArithmeticOp.embedInstantVectorScalar op lhs rhs) + pure $ Just ([], []) +solveCanonicalBinaryArithmeticOpElabProblem _ _ _ _ _ _ _ _ _ = pure Nothing + +solveNoncanonicalBinaryArithmeticOpElabProblem :: + Context + -> Loc + -> Semantic.Expr + -> Ty + -> BinaryArithmeticOp + -> Semantic.Expr + -> Ty + -> HoleIdentifier + -> Ty + -> ElabM (Maybe ([UnificationProblem], [ElabProblem])) +solveNoncanonicalBinaryArithmeticOpElabProblem gam loc lhs Duration BinaryArithmeticOp.Add rhs Timestamp hole typ = do + pure $ Just ([UnificationProblem loc typ Timestamp], [BinaryArithmeticOp $ + BinaryArithmeticOpElabProblem gam loc rhs Timestamp BinaryArithmeticOp.Add lhs Duration hole Timestamp]) +solveNoncanonicalBinaryArithmeticOpElabProblem gam loc lhs Timestamp BinaryArithmeticOp.Add rhs rhsTy hole typ = do + pure $ Just ([UnificationProblem loc rhsTy Duration, UnificationProblem loc typ Timestamp], + [BinaryArithmeticOp $ + BinaryArithmeticOpElabProblem gam loc lhs Timestamp BinaryArithmeticOp.Add rhs Duration hole Timestamp]) +solveNoncanonicalBinaryArithmeticOpElabProblem gam loc lhs lhsTy BinaryArithmeticOp.Add rhs Timestamp hole typ = do + pure $ Just ([UnificationProblem loc lhsTy Duration, UnificationProblem loc typ Timestamp], + [BinaryArithmeticOp $ + BinaryArithmeticOpElabProblem gam loc lhs Duration BinaryArithmeticOp.Add rhs Timestamp hole Timestamp]) +solveNoncanonicalBinaryArithmeticOpElabProblem gam loc lhs Timestamp BinaryArithmeticOp.Sub rhs rhsTy hole typ = do + pure $ Just ([UnificationProblem loc rhsTy Duration, UnificationProblem loc typ Timestamp], + [BinaryArithmeticOp $ + BinaryArithmeticOpElabProblem gam loc lhs Timestamp BinaryArithmeticOp.Sub rhs Duration hole Timestamp]) +solveNoncanonicalBinaryArithmeticOpElabProblem gam loc lhs Scalar op rhs (InstantVector Scalar) hole typ = do + pure (Just ([UnificationProblem loc typ (InstantVector Scalar)], [BinaryArithmeticOp $ + BinaryArithmeticOpElabProblem gam loc rhs (InstantVector Scalar) op lhs Scalar hole (InstantVector Scalar)])) +solveNoncanonicalBinaryArithmeticOpElabProblem gam loc lhs lhsTy op rhs _ hole Scalar = do + pure $ Just ([UnificationProblem loc lhsTy Scalar, UnificationProblem loc lhsTy Scalar], + [BinaryArithmeticOp $ BinaryArithmeticOpElabProblem gam loc lhs Scalar op rhs Scalar hole Scalar]) +solveNoncanonicalBinaryArithmeticOpElabProblem gam loc lhs Scalar op rhs Scalar hole holeTy = do + pure $ Just ([UnificationProblem loc holeTy Scalar], + [BinaryArithmeticOp $ BinaryArithmeticOpElabProblem gam loc lhs Scalar op rhs Scalar hole Scalar]) +solveNoncanonicalBinaryArithmeticOpElabProblem gam loc lhs lhsTy op rhs rhsTy hole holeTy + | lhsTy == InstantVector Scalar || rhsTy == InstantVector Scalar = + pure $ Just + ( + [UnificationProblem loc holeTy (InstantVector Scalar)], + [BinaryArithmeticOp $ + BinaryArithmeticOpElabProblem + gam + loc + lhs + lhsTy + op + rhs + rhsTy + hole + (InstantVector Scalar) + ] + ) +solveNoncanonicalBinaryArithmeticOpElabProblem _ _ _ _ _ _ _ _ _ = pure Nothing + +-- | Σ Γ ⊦ (a : A) `op` (b : B) ~> ?x : C +-- Assumes that all given `Ty` are normal w.r.t. hole substitution. +-- TODO: Check completeness +solveBinaryArithmeticOpElabProblem :: + Context + -> Loc + -> Semantic.Expr + -> Ty + -> BinaryArithmeticOp + -> Semantic.Expr + -> Ty + -> HoleIdentifier + -> Ty + -> ElabM (Maybe ([UnificationProblem], [ElabProblem])) +solveBinaryArithmeticOpElabProblem gam loc lhs lhsTy op rhs rhsTy hole holeTy = + solveCanonicalBinaryArithmeticOpElabProblem gam loc lhs lhsTy op rhs rhsTy hole holeTy >>= \case + Nothing -> solveNoncanonicalBinaryArithmeticOpElabProblem gam loc lhs lhsTy op rhs rhsTy hole holeTy + Just ok -> pure (Just ok) + +-- | Σ Γ ⊦ s ~> ?x : A +-- Assumes that the given `Ty` is normal w.r.t. hole substitution. +solveGeneralElabProblem :: Context -> Surface.Expr -> HoleIdentifier -> Ty -> ElabM ([UnificationProblem], [ElabProblem]) +solveGeneralElabProblem gam (mbBinaryRelation -> Just (l, a, r, b)) x typ = do + expectedA <- freshTyHole + expectedB <- freshTyHole + ah <- freshExprHole (Hole expectedA) + bh <- freshExprHole (Hole expectedB) + let e1 = General $ GeneralElabProblem gam a ah (Hole expectedA) + let e2 = General $ GeneralElabProblem gam b bh (Hole expectedB) + let e3 = BinaryRelation $ + BinaryRelationElabProblem + gam + l + (Semantic.Hole ah) + (Hole expectedA) + r + (Semantic.Hole bh) + (Hole expectedB) + x + typ + pure ([], [e1, e2, e3]) +solveGeneralElabProblem _ (Surface.Metrics l) x typ = do + let u = UnificationProblem l typ Types.Text + modify (updateDefs $ instantiateExpr x Semantic.Metrics) + pure ([u], []) +solveGeneralElabProblem _ (Surface.Number l f) x typ = do + let u = UnificationProblem l typ Scalar + modify (updateDefs $ instantiateExpr x (Semantic.Number f)) + pure ([u], []) +solveGeneralElabProblem _ (Surface.Truth l) x typ = do + let u = UnificationProblem l typ Bool + modify (updateDefs $ instantiateExpr x Semantic.True) + pure ([u], []) +solveGeneralElabProblem _ (Surface.Falsity l) x typ = do + let u = UnificationProblem l typ Bool + modify (updateDefs $ instantiateExpr x Semantic.False) + pure ([u], []) +solveGeneralElabProblem _ (Surface.Epoch l) x typ = do + let u = UnificationProblem l typ Timestamp + modify (updateDefs $ instantiateExpr x Semantic.Epoch) + pure ([u], []) +solveGeneralElabProblem _ (Surface.Now l) x typ = do + let u = UnificationProblem l typ Timestamp + modify (updateDefs $ instantiateExpr x Semantic.Now) + pure ([u], []) +solveGeneralElabProblem _ (Surface.Milliseconds l n) x typ = do + let u = UnificationProblem l typ Duration + modify $ updateDefs $ + instantiateExpr x $ Semantic.Milliseconds n + pure ([u], []) +solveGeneralElabProblem _ (Surface.Seconds l n) x typ = do + let u = UnificationProblem l typ Duration + modify $ updateDefs $ + instantiateExpr x $ Semantic.Seconds n + pure ([u], []) +solveGeneralElabProblem _ (Surface.Minutes l n) x typ = do + let u = UnificationProblem l typ Duration + modify $ updateDefs $ + instantiateExpr x $ Semantic.Minutes n + pure ([u], []) +solveGeneralElabProblem _ (Surface.Hours l n) x typ = do + let u = UnificationProblem l typ Duration + modify $ updateDefs $ + instantiateExpr x $ Semantic.Hours n + pure ([u], []) +solveGeneralElabProblem gam (Surface.Or l a b) x typ = do + let u = UnificationProblem l typ Bool + ah <- freshExprHole Bool + bh <- freshExprHole Bool + let e1 = General $ GeneralElabProblem gam a ah Bool + let e2 = General $ GeneralElabProblem gam b bh Bool + modify $ updateDefs $ + instantiateExpr x $ Semantic.Or (Semantic.Hole ah) (Semantic.Hole bh) + pure ([u], [e1, e2]) +solveGeneralElabProblem gam (Surface.And l a b) x typ = do + let u = UnificationProblem l typ Bool + ah <- freshExprHole Bool + bh <- freshExprHole Bool + let e1 = General $ GeneralElabProblem gam a ah Bool + let e2 = General $ GeneralElabProblem gam b bh Bool + modify $ updateDefs $ + instantiateExpr x $ Semantic.And (Semantic.Hole ah) (Semantic.Hole bh) + pure ([u], [e1, e2]) +solveGeneralElabProblem gam (Surface.Not l a) x typ = do + let u = UnificationProblem l typ Bool + x' <- freshExprHole Bool + let e1 = General $ GeneralElabProblem gam a x' Bool + modify $ updateDefs $ + instantiateExpr x $ Semantic.Not (Semantic.Hole x') + pure ([u], [e1]) +solveGeneralElabProblem gam (Surface.Abs l a) x typ = do + let u = UnificationProblem l typ Scalar + x' <- freshExprHole Scalar + let e1 = General $ GeneralElabProblem gam a x' Scalar + modify $ updateDefs $ + instantiateExpr x $ Semantic.Abs (Semantic.Hole x') + pure ([u], [e1]) +solveGeneralElabProblem gam (Surface.Round l a) x typ = do + let u = UnificationProblem l typ Scalar + x' <- freshExprHole Scalar + let e1 = General $ GeneralElabProblem gam a x' Scalar + modify $ updateDefs $ + instantiateExpr x $ Semantic.RoundScalar (Semantic.Hole x') + pure ([u], [e1]) +solveGeneralElabProblem gam (Surface.Increase l a) x typ = do + let u = UnificationProblem l typ (InstantVector Scalar) + x' <- freshExprHole (RangeVector Scalar) + let e1 = General $ GeneralElabProblem gam a x' (RangeVector Scalar) + modify $ updateDefs $ + instantiateExpr x $ Semantic.Increase (Semantic.Hole x') + pure ([u], [e1]) +solveGeneralElabProblem gam (Surface.Rate l a) x typ = do + let u = UnificationProblem l typ (InstantVector Scalar) + x' <- freshExprHole (RangeVector Scalar) + let e1 = General $ GeneralElabProblem gam a x' (RangeVector Scalar) + modify $ updateDefs $ + instantiateExpr x $ Semantic.Rate (Semantic.Hole x') + pure ([u], [e1]) +solveGeneralElabProblem gam (Surface.Max l a) x typ = do + let u = UnificationProblem l typ Scalar + x' <- freshExprHole (InstantVector Scalar) + let e1 = General $ GeneralElabProblem gam a x' (InstantVector Scalar) + modify $ updateDefs $ + instantiateExpr x $ Semantic.Max (Semantic.Hole x') + pure ([u], [e1]) +solveGeneralElabProblem gam (Surface.Min l v) x typ = do + let u = UnificationProblem l typ Scalar + vh <- freshExprHole (InstantVector Scalar) + let e1 = General $ GeneralElabProblem gam v vh (InstantVector Scalar) + modify $ updateDefs $ + instantiateExpr x $ Semantic.Min (Semantic.Hole vh) + pure ([u], [e1]) +solveGeneralElabProblem gam (Surface.Avg l v) x typ = do + let u = UnificationProblem l typ Scalar + vh <- freshExprHole (InstantVector Scalar) + let e1 = General $ GeneralElabProblem gam v vh (InstantVector Scalar) + modify $ updateDefs $ + instantiateExpr x $ Semantic.Avg (Semantic.Hole vh) + pure ([u], [e1]) +solveGeneralElabProblem gam (Surface.AvgOverTime l r) x typ = do + let u = UnificationProblem l typ (InstantVector Scalar) + rh <- freshExprHole (RangeVector Scalar) + let e1 = General $ GeneralElabProblem gam r rh (RangeVector Scalar) + modify $ updateDefs $ + instantiateExpr x $ Semantic.AvgOverTime (Semantic.Hole rh) + pure ([u], [e1]) +solveGeneralElabProblem gam (Surface.SumOverTime l r) x typ = do + let u = UnificationProblem l typ (InstantVector Scalar) + rh <- freshExprHole (RangeVector Scalar) + let e1 = General $ GeneralElabProblem gam r rh (RangeVector Scalar) + modify $ updateDefs $ + instantiateExpr x $ Semantic.AvgOverTime (Semantic.Hole rh) + pure ([u], [e1]) +solveGeneralElabProblem gam (Surface.QuantileOverTime l k r) x typ = do + let u = UnificationProblem l typ (InstantVector Scalar) + rh <- freshExprHole (RangeVector Scalar) + let e1 = General $ GeneralElabProblem gam r rh (RangeVector Scalar) + kh <- freshExprHole Scalar + let e2 = General $ GeneralElabProblem gam k kh Scalar + modify $ updateDefs $ + instantiateExpr x $ Semantic.QuantileOverTime (Semantic.Hole kh) (Semantic.Hole rh) + pure ([u], [e1, e2]) +solveGeneralElabProblem gam (Surface.QuantileBy loc ls k v) x typ = do + let u = UnificationProblem loc typ (InstantVector Scalar) + vh <- freshExprHole (InstantVector Scalar) + let e1 = General $ GeneralElabProblem gam v vh (InstantVector Scalar) + kh <- freshExprHole Scalar + let e2 = General $ GeneralElabProblem gam k kh Scalar + modify $ updateDefs $ + instantiateExpr x $ Semantic.QuantileBy + ls (Semantic.Hole kh) (Semantic.Hole vh) + pure ([u], [e1, e2]) +solveGeneralElabProblem gam (Surface.Range l expr t0 t1 Nothing) x typ = do + tyh <- freshTyHole + let u = UnificationProblem l typ (RangeVector (Hole tyh)) + exprh <- freshExprHole (Fun Timestamp (InstantVector (Hole tyh))) + t0h <- freshExprHole Timestamp + t1h <- freshExprHole Timestamp + let e1 = General $ GeneralElabProblem gam expr exprh (Fun Timestamp (InstantVector (Hole tyh))) + let e2 = General $ GeneralElabProblem gam t0 t0h Timestamp + let e3 = General $ GeneralElabProblem gam t1 t1h Timestamp + modify $ updateDefs $ + instantiateExpr x $ + Semantic.Range + (Semantic.Hole exprh) + (Semantic.Hole t0h) + (Semantic.Hole t1h) + Nothing + pure ([u], [e1, e2, e3]) +solveGeneralElabProblem gam (Surface.Range l expr t0 t1 (Just step)) x typ = do + tyh <- freshTyHole + let u = UnificationProblem l typ (RangeVector (Hole tyh)) + exprh <- freshExprHole (Fun Timestamp (InstantVector (Hole tyh))) + t0h <- freshExprHole Timestamp + t1h <- freshExprHole Timestamp + steph <- freshExprHole Duration + let e1 = General $ GeneralElabProblem gam expr exprh (Fun Timestamp (InstantVector (Hole tyh))) + let e2 = General $ GeneralElabProblem gam t0 t0h Timestamp + let e3 = General $ GeneralElabProblem gam t1 t1h Timestamp + let e4 = General $ GeneralElabProblem gam step steph Duration + modify $ updateDefs $ + instantiateExpr x $ + Semantic.Range + (Semantic.Hole exprh) + (Semantic.Hole t0h) + (Semantic.Hole t1h) + (Just (Semantic.Hole steph)) + pure ([u], [e1, e2, e3, e4]) +solveGeneralElabProblem gam (Surface.Fst l t) x typ = do + tyah <- freshTyHole + tybh <- freshTyHole + let u = UnificationProblem l typ (Hole tyah) + th <- freshExprHole (Ty.Pair (Hole tyah) (Hole tybh)) + let e = General $ GeneralElabProblem gam t th (Ty.Pair (Hole tyah) (Hole tybh)) + modify $ updateDefs $ + instantiateExpr x $ Semantic.Fst (Semantic.Hole th) + pure ([u], [e]) +solveGeneralElabProblem gam (Surface.Snd l t) x typ = do + tyah <- freshTyHole + tybh <- freshTyHole + let u = UnificationProblem l typ (Hole tybh) + th <- freshExprHole (Ty.Pair (Hole tyah) (Hole tybh)) + let e = General $ GeneralElabProblem gam t th (Ty.Pair (Hole tyah) (Hole tybh)) + modify $ updateDefs $ + instantiateExpr x $ Semantic.Snd (Semantic.Hole th) + pure ([u], [e]) +solveGeneralElabProblem gam (Surface.MkPair l a b) x typ = do + tyah <- freshTyHole + tybh <- freshTyHole + let u = UnificationProblem l typ (Ty.Pair (Hole tyah) (Hole tybh)) + ah <- freshExprHole (Hole tyah) + bh <- freshExprHole (Hole tybh) + let e1 = General $ GeneralElabProblem gam a ah (Hole tyah) + let e2 = General $ GeneralElabProblem gam b bh (Hole tybh) + modify $ updateDefs $ + instantiateExpr x $ + Semantic.MkPair + (Semantic.Hole ah) + (Semantic.Hole bh) + pure ([u], [e1, e2]) +solveGeneralElabProblem gam (Surface.Lambda l v scope) x typ = do + tyah <- freshTyHole + tybh <- freshTyHole + checkFresh gam v + let u = UnificationProblem l typ (Fun (Hole tyah) (Hole tybh)) + scopeh <- freshExprHole (Hole tybh) + let e = General $ GeneralElabProblem (gam |> LambdaBinding v (Hole tyah)) scope scopeh (Hole tybh) + modify $ updateDefs $ + instantiateExpr x $ + Semantic.Lambda + v + (Semantic.Hole scopeh) + pure ([u], [e]) +solveGeneralElabProblem gam (Surface.Let l v rhs scope) x typ = do + tyah <- freshTyHole + tybh <- freshTyHole + checkFresh gam v + let u = UnificationProblem l typ (Hole tybh) + rhsh <- freshExprHole (Hole tyah) + scopeh <- freshExprHole (Hole tybh) + let e1 = General $ GeneralElabProblem gam rhs rhsh (Hole tyah) + let e2 = General $ GeneralElabProblem (gam |> LetBinding v (Semantic.Hole rhsh) (Hole tyah)) scope scopeh (Hole tybh) + modify $ updateDefs $ + instantiateExpr x $ + Semantic.Let + v + (Semantic.Hole rhsh) + (Semantic.Hole scopeh) + pure ([u], [e1, e2]) +solveGeneralElabProblem gam (mbBinaryArithmeticOp -> Just (loc, left, op, right)) x typ = do + tyah <- freshTyHole + tybh <- freshTyHole + lefth <- freshExprHole typ + righth <- freshExprHole typ + let e1 = General $ GeneralElabProblem gam left lefth (Hole tyah) + let e2 = General $ GeneralElabProblem gam right righth (Hole tybh) + let e3 = BinaryArithmeticOp $ + BinaryArithmeticOpElabProblem gam loc (Semantic.Hole lefth) (Hole tyah) op (Semantic.Hole righth) (Hole tybh) x typ + pure ([], [e1, e2, e3]) +solveGeneralElabProblem gam (Surface.Variable l v) x typ | Just b <- find (\b -> Types.identifier b == v) gam = do + modify $ updateDefs $ instantiateExpr x $ Semantic.Variable v + pure ([UnificationProblem l typ (Types.ty b)], []) +-- Assumes that all variables into the store (= metrics) have type (Timestamp -> Scalar) +solveGeneralElabProblem _ (Surface.Variable l v) x typ = do + modify $ updateDefs $ instantiateExpr x $ Semantic.Variable v + pure ([UnificationProblem l typ (Fun Timestamp (InstantVector Scalar))], []) +solveGeneralElabProblem gam (Surface.Filter l f v) h hty = do + argTy <- freshTyHole + fh <- freshExprHole (Fun (Hole argTy) Bool) + vh <- freshExprHole (InstantVector (Hole argTy)) + modify $ updateDefs $ instantiateExpr h $ + Semantic.Filter (Semantic.Hole fh) (Semantic.Hole vh) + pure ([UnificationProblem l hty (InstantVector (Hole argTy))], + [General $ GeneralElabProblem gam f fh (Fun (Hole argTy) Bool), + General $ GeneralElabProblem gam v vh (InstantVector (Hole argTy)) + ] + ) +solveGeneralElabProblem gam (Surface.FilterByLabel loc ls v) h hty = do + argTy <- freshTyHole + vh <- freshExprHole (InstantVector (Hole argTy)) + modify $ updateDefs $ instantiateExpr h $ + Semantic.FilterByLabel ls (Semantic.Hole vh) + pure ([UnificationProblem loc hty (InstantVector (Hole argTy))], + [General $ GeneralElabProblem gam v vh (InstantVector (Hole argTy))] + ) +solveGeneralElabProblem gam (Surface.Map l f v) h hty = do + aTy <- freshTyHole + bTy <- freshTyHole + fh <- freshExprHole (Fun (Hole aTy) (Hole bTy)) + vh <- freshExprHole (InstantVector (Hole aTy)) + modify $ updateDefs $ instantiateExpr h $ + Semantic.Map (Semantic.Hole fh) (Semantic.Hole vh) + pure ([UnificationProblem l hty (InstantVector (Hole bTy))], + [General $ GeneralElabProblem gam f fh (Fun (Hole aTy) (Hole bTy)), + General $ GeneralElabProblem gam v vh (InstantVector (Hole aTy)) + ] + ) +solveGeneralElabProblem gam (Surface.Join l v u) h hty = do + aTy <- freshTyHole + bTy <- freshTyHole + vh <- freshExprHole (InstantVector (Hole aTy)) + uh <- freshExprHole (InstantVector (Hole bTy)) + modify $ updateDefs $ instantiateExpr h $ + Semantic.Join (Semantic.Hole vh) (Semantic.Hole uh) + pure ([UnificationProblem l hty (InstantVector (Ty.Pair (Hole aTy) (Hole bTy)))], + [General $ GeneralElabProblem gam v vh (InstantVector (Hole aTy)), + General $ GeneralElabProblem gam u uh (InstantVector (Hole bTy)) + ] + ) +solveGeneralElabProblem gam (Surface.Unless l v u) h hty = do + aTy <- freshTyHole + bTy <- freshTyHole + vh <- freshExprHole (InstantVector (Hole aTy)) + uh <- freshExprHole (InstantVector (Hole bTy)) + modify $ updateDefs $ instantiateExpr h $ + Semantic.Unless (Semantic.Hole vh) (Semantic.Hole uh) + pure ([UnificationProblem l hty (InstantVector (Hole aTy))], + [General $ GeneralElabProblem gam v vh (InstantVector (Hole aTy)), + General $ GeneralElabProblem gam u uh (InstantVector (Hole bTy)) + ] + ) +solveGeneralElabProblem gam (Surface.App l f e) h hty = do + aTy <- freshTyHole + bTy <- freshTyHole + fh <- freshExprHole (Fun (Hole aTy) (Hole bTy)) + eh <- freshExprHole (Hole aTy) + modify $ updateDefs $ instantiateExpr h $ + Semantic.Application (Semantic.Hole fh) (Semantic.Hole eh) + pure ([UnificationProblem l hty (Hole bTy)], + [General $ GeneralElabProblem gam f fh (Fun (Hole aTy) (Hole bTy)), + General $ GeneralElabProblem gam e eh (Hole aTy) + ] + ) +solveGeneralElabProblem gam (Surface.ToScalar l t) h hty = do + tTy <- freshTyHole + th <- freshExprHole (Hole tTy) + pure ([UnificationProblem l hty Scalar], + [ + General $ GeneralElabProblem gam t th (Hole tTy) + , + ToScalar $ ToScalarElabProblem gam l (Semantic.Hole th) (Hole tTy) h + ]) +solveGeneralElabProblem _ s _ _ = throwError $ + "Do not know how to elaborate: " <> showT s + +solveElabProblem :: ElabProblem -> ElabM (Maybe ([UnificationProblem], [ElabProblem])) +solveElabProblem (General (GeneralElabProblem gam s h typ)) = do + defs <- defs <$> get + let typ' = resolveTy defs typ + let gam' = resolveContext defs gam + Just <$> solveGeneralElabProblem gam' s h typ' +solveElabProblem (BinaryArithmeticOp (BinaryArithmeticOpElabProblem gam loc lhs lhsTy op rhs rhsTy hole holeTy)) = do + defs <- defs <$> get + let gam' = resolveContext defs gam + let lhsTy' = resolveTy defs lhsTy + let rhsTy' = resolveTy defs rhsTy + let holeTy' = resolveTy defs holeTy + solveBinaryArithmeticOpElabProblem gam' loc lhs lhsTy' op rhs rhsTy' hole holeTy' +solveElabProblem (BinaryRelation (BinaryRelationElabProblem gam loc lhs lhsTy rel rhs rhsTy hole holeTy)) = do + defs <- defs <$> get + let gam' = resolveContext defs gam + let lhsTy' = resolveTy defs lhsTy + let rhsTy' = resolveTy defs rhsTy + let holeTy' = resolveTy defs holeTy + solveBinaryRelationElabProblem gam' loc lhs lhsTy' rel rhs rhsTy' hole holeTy' +solveElabProblem (ToScalar (ToScalarElabProblem gam loc t tTy hole)) = do + defs <- defs <$> get + let gam' = resolveContext defs gam + let tTy' = resolveTy defs tTy + solveToScalarElabProblem gam' loc t tTy' hole + +solveH :: Bool -> Seq ElabProblem -> Seq ElabProblem -> ElabM (Bool, Seq ElabProblem) +solveH progress stuck Empty = pure (progress, stuck) +solveH progress stuck (p :<| ps) = do + solveElabProblem p >>= \case + Nothing -> solveH progress (stuck |> p) ps + Just (us, es) -> runUnifyM (Unify.solve us) >> solveH True stuck (fromList es >< ps) + +solve :: Seq ElabProblem -> ElabM () +solve Empty = pure () +solve problems = + solveH False Empty problems >>= \case + (True, stuck) -> solve stuck + (False, stuck) -> do + defs <- defs <$> get + let toShow = fmap (asText . evalElabProblem defs) stuck + throwError $ "Can't solve elaboration problems:\n" <> + Text.unlines (Foldable.toList toShow) + +-- | Given a surface query `Surface.Expr` elaborate it into a regular "semantic" query `Semantic.Expr`. +elab :: Surface.Expr -> ElabM Semantic.Expr +elab expr = do + typ <- freshTyHole + t <- freshExprHole (Hole typ) + solve $ Seq.singleton (General $ GeneralElabProblem Seq.Empty expr t (Hole typ)) + ds <- getDefs <$> get + pure $ resolveExpr' ds (Semantic.Hole t) diff --git a/bench/cardano-timeseries-io/src/Cardano/Timeseries/Import/PlainCBOR.hs b/bench/cardano-timeseries-io/src/Cardano/Timeseries/Import/PlainCBOR.hs new file mode 100644 index 00000000000..87a57291d00 --- /dev/null +++ b/bench/cardano-timeseries-io/src/Cardano/Timeseries/Import/PlainCBOR.hs @@ -0,0 +1,58 @@ +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE TypeSynonymInstances #-} + +{-# OPTIONS_GHC -Wno-type-defaults -Wno-orphans #-} + +module Cardano.Timeseries.Import.PlainCBOR where + +import Cardano.Timeseries.Domain.Instant (Instant (..)) +import Cardano.Timeseries.Store.Flat (Flat, Point (..)) + +import Codec.Serialise +import Data.Map.Strict as Map (Map) +import qualified Data.Map.Strict as Map +import qualified Data.Set as Set +import Data.Text (Text, unpack) +import Data.Time.Clock.POSIX (POSIXTime) +import GHC.Generics (Generic) + + +data NumericValue = + NVInt Int + | NVDouble Double + deriving (Generic, Show, Serialise) + + +data Snapshot = Snapshot + { singletonLabel :: Text + , timeStamp :: POSIXTime + , scrape :: Map Text NumericValue + } + deriving (Generic, Serialise) + +instance Show Snapshot where + show (Snapshot l t s) = "Snapshot{" ++ unpack l ++ "} @ " ++ show t ++ ", entries: " ++ show s + + +instance Serialise POSIXTime where + encode = encode . toInteger . floor + decode = fromInteger <$> decode + + +readFileSnapshots :: FilePath -> IO [Snapshot] +readFileSnapshots = readFileDeserialise + +numericValueToDouble :: NumericValue -> Double +numericValueToDouble (NVInt x) = fromIntegral x +numericValueToDouble (NVDouble x) = x + +scrapeDatapointToPoint :: Text -> POSIXTime -> Text -> NumericValue -> Point Double +scrapeDatapointToPoint node t metric v = + Point metric (Instant (Set.fromList [("node", node)]) (floor (t * 1000)) (numericValueToDouble v)) + +snapshotToFlatStore :: Snapshot -> Flat Double +snapshotToFlatStore (Snapshot l t s) = Map.foldlWithKey' (\acc k v -> scrapeDatapointToPoint l t k v : acc) [] s + +snapshotsToFlatStore :: [Snapshot] -> Flat Double +snapshotsToFlatStore = (>>= snapshotToFlatStore) diff --git a/bench/cardano-timeseries-io/src/Cardano/Timeseries/Interface.hs b/bench/cardano-timeseries-io/src/Cardano/Timeseries/Interface.hs new file mode 100644 index 00000000000..2f88f6d845a --- /dev/null +++ b/bench/cardano-timeseries-io/src/Cardano/Timeseries/Interface.hs @@ -0,0 +1,41 @@ +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE GADTs #-} +module Cardano.Timeseries.Interface(ExecutionError(..), execute) where + +import Cardano.Timeseries.AsText +import Cardano.Timeseries.Domain.Types (Timestamp) +import Cardano.Timeseries.Elab (elab, initialSt) +import Cardano.Timeseries.Interp (interp) +import Cardano.Timeseries.Interp.Config (Config (..)) +import qualified Cardano.Timeseries.Interp.Types as QueryError +import Cardano.Timeseries.Interp.Value (Value) +import Cardano.Timeseries.Store +import qualified Cardano.Timeseries.Surface.Expr.Parser as Surface.Parser + +import Control.Monad.Except (runExceptT) +import Control.Monad.State.Strict (evalState) +import Data.Bifunctor (first) +import Data.Text (Text) +import qualified Data.Text as Text +import Text.Megaparsec hiding (count) +import Text.Megaparsec.Char (space) + +data ExecutionError where + ParsingError :: {message :: Text} -> ExecutionError + ElabError :: {message :: Text} -> ExecutionError + InterpError :: {message :: Text} -> ExecutionError deriving (Show) + + +instance AsText ExecutionError where + asText ParsingError{message} = "Parsing error: " <> message + asText ElabError{message} = "Elaboration error: " <> message + asText InterpError{message} = "Interpretation error: " <> message + +execute :: Store s Double => s -> Config -> Timestamp -> Text -> Either ExecutionError Value +execute store interpCfg now stringQuery = do + surfaceQuery <- first (ParsingError . Text.pack . errorBundlePretty) $ + parse (Surface.Parser.expr <* space <* eof) "input" stringQuery + query <- first ElabError $ + evalState (runExceptT (elab surfaceQuery)) initialSt + first (InterpError . QueryError.message) $ + evalState (runExceptT $ interp interpCfg store mempty query now) 0 diff --git a/bench/cardano-timeseries-io/src/Cardano/Timeseries/Interp.hs b/bench/cardano-timeseries-io/src/Cardano/Timeseries/Interp.hs new file mode 100644 index 00000000000..0b1a2161c7f --- /dev/null +++ b/bench/cardano-timeseries-io/src/Cardano/Timeseries/Interp.hs @@ -0,0 +1,349 @@ +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE UndecidableInstances #-} +{-# LANGUAGE ViewPatterns #-} + +module Cardano.Timeseries.Interp(interp) where +import Cardano.Timeseries.AsText (showT) +import Cardano.Timeseries.Domain.Identifier (Identifier (..)) +import Cardano.Timeseries.Domain.Instant (Instant (Instant), InstantVector, share) +import qualified Cardano.Timeseries.Domain.Instant as Domain +import qualified Cardano.Timeseries.Domain.Instant as Instant +import Cardano.Timeseries.Domain.Interval +import Cardano.Timeseries.Domain.Timeseries (TimeseriesVector, eachNewest, eachOldest, + superseries, transpose) +import Cardano.Timeseries.Domain.Types (Label, Labelled, MetricIdentifier, Timestamp) +import Cardano.Timeseries.Interp.Config (Config (..)) +import Cardano.Timeseries.Interp.Expect +import Cardano.Timeseries.Interp.Statistics +import Cardano.Timeseries.Interp.Types +import Cardano.Timeseries.Interp.Value as Value +import Cardano.Timeseries.Query.BinaryArithmeticOp (BinaryArithmeticOp) +import qualified Cardano.Timeseries.Query.BinaryArithmeticOp as BinaryArithmeticOp +import Cardano.Timeseries.Query.BinaryRelation (BinaryRelation, embedScalar, + mbBinaryRelationInstantVector, mbBinaryRelationScalar) +import qualified Cardano.Timeseries.Query.BinaryRelation as BinaryRelation +import Cardano.Timeseries.Query.Expr as Expr +import Cardano.Timeseries.Store (Store (metrics)) +import qualified Cardano.Timeseries.Store as Store +import Cardano.Timeseries.Util (maybeToEither) + +import Prelude hiding (max, min, pred) + +import Control.Monad (filterM, (<=<)) +import Control.Monad.Except (liftEither) +import Control.Monad.State (get, put) +import Control.Monad.Trans (lift) +import Data.Function (on) +import Data.List (find, groupBy) +import qualified Data.List as List +import Data.Map.Strict (Map) +import qualified Data.Map.Strict as Map +import Data.Maybe (fromMaybe) +import Data.Set (Set, isSubsetOf, member) +import qualified Data.Set as Set +import Data.Text (Text) +import Data.Word (Word64) + +import Statistics.Function (minMax) +import Statistics.Quantile (cadpw, quantile) +import Statistics.Sample (mean) +import qualified Data.Text as Text + + +interpJoin :: (a -> b -> c) -> InstantVector a -> InstantVector b -> Either InterpError (InstantVector c) +interpJoin _ [] _ = Right [] +interpJoin f (inst@(Domain.Instant ls t v) : xs) other = do + Domain.Instant _ _ v' <- maybeToEither (InterpError $ "No matching label: " <> showT ls) $ find (share inst) other + rest <- interpJoin f xs other + Right (Domain.Instant ls t (f v v') : rest) + +interpRange :: FunctionValue -> Interval -> Word64 -> InterpM (TimeseriesVector Value) +interpRange f Interval{..} rate = transpose <$> sample start end where + + sample :: Timestamp -> Timestamp -> InterpM [InstantVector Value] + sample t max | t > max = pure [] + sample t max = (:) <$> (expectInstantVector <=< f) (Value.Timestamp t) <*> sample (t + rate) max + +interpLabelInst :: LabelConstraint -> Labelled (Bool, Text) +interpLabelInst (LabelConstraintEq (k, v)) = (k, (Prelude.True, v)) +interpLabelInst (LabelConstraintNotEq (k, v)) = (k, (Prelude.False, v)) + +interpLabelInsts :: Set LabelConstraint -> (Set (Labelled Text), Set (Labelled Text)) +interpLabelInsts ls = + let (sub, notsub) = List.partition cond $ fmap interpLabelInst (Set.toList ls) in + (Set.fromList $ fmap extract sub, Set.fromList $ fmap extract notsub) where + cond (_, (b, _)) = b + extract (x, (_, y)) = (x, y) + +interpVariable :: Store s Double => s -> MetricIdentifier -> Value -> InterpM Value +interpVariable store x t_ = do + t <- expectTimestamp t_ + pure (Value.InstantVector (fmap (fmap Value.Scalar) (Store.evaluate store x t))) + +interpQuantileBy :: Set Label -> Double -> InstantVector Double -> Timestamp -> InterpM (InstantVector Double) +interpQuantileBy ls k vs now = + let groups = groupBy (on (==) (superseries ls . (.labels))) vs + quantiles = fmap (\g -> (superseries ls (head g).labels, quantile cadpw (floor (k * 100)) 100 (Instant.toVector g))) + groups in + pure $ fmap (\(idx, v) -> Instant idx now v) quantiles + +interpFilter :: FunctionValue -> InstantVector Value -> InterpM (InstantVector Value) +interpFilter f = filterM pred where + pred :: Instant Value -> InterpM Bool + pred inst = (expectBoolean <=< f) inst.value + +interpMap :: FunctionValue -> InstantVector Value -> InterpM (InstantVector Value) +interpMap f = traverse (traverse f) + +interpRate :: TimeseriesVector Double -> InterpM (InstantVector Double) +interpRate v = do + min <- liftEither $ maybeToEither (InterpError "Can't compute rate") (eachOldest v) + max <- liftEither $ maybeToEither (InterpError "Can't compute rate") (eachNewest v) + pure $ zipWith compute min max where + + compute :: Instant Double -> Instant Double -> Instant Double + compute min max = + let x = (max.value - min.value) / fromIntegral (max.timestamp - min.timestamp) in + Instant min.labels max.timestamp x + +interpIncrease :: TimeseriesVector Double -> InterpM (InstantVector Double) +interpIncrease v = liftEither $ do + min <- maybeToEither (InterpError "Can't compute rate") (eachOldest v) + max <- maybeToEither (InterpError "Can't compute rate") (eachNewest v) + Right $ zipWith compute min max where + + compute :: Instant Double -> Instant Double -> Instant Double + compute min max = + let x = max.value - min.value in + Instant min.labels max.timestamp x + +-- | (v `op` s) ≡ map (\x -> x `op` s) v +-- | where v : InstantVector Scalar +-- | s : Scalar +interpBinaryArithmeticOp :: Store s Double + => Config + -> s + -> Map Identifier Value + -> Expr + -> BinaryArithmeticOp + -> Expr + -> Timestamp + -> InterpM Value +interpBinaryArithmeticOp cfg store env v op k now = do + nextVarIdx <- lift get + lift (put (1 + nextVarIdx)) + interp cfg store env + (Map + ( + Lambda + (Machine nextVarIdx) + (BinaryArithmeticOp.embedScalar op (Variable (Machine nextVarIdx)) k) + ) + v + ) + now + +-- | (v `R` s) ≡ filter (\x -> x `R` s) v +-- | where v : InstantVector Scalar +-- | s : Scalar +interpFilterBinaryRelation :: Store s Double + => Config + -> s + -> Map Identifier Value + -> Expr + -> BinaryRelation + -> Expr + -> Timestamp + -> InterpM Value +interpFilterBinaryRelation cfg store env v rel k now = do + nextVarIdx <- lift get + lift (put (1 + nextVarIdx)) + interp cfg store env + (Filter + ( + Lambda + (Machine nextVarIdx) + (embedScalar rel (Variable (Machine nextVarIdx)) k) + ) + v + ) + now + +-- | Given a metric store, an assignment of values to local variables, a query expression and a timestamp "now", +-- interpret the `Expr` into a `Value`. +interp :: Store s Double => Config -> s -> Map Identifier Value -> Expr -> Timestamp -> InterpM Value +interp _ store _ Expr.Metrics _ = do + pure $ Value.Text $ Text.intercalate ", " (Set.toList $ metrics store) +interp _ _ _ (Expr.Number x) _ = do + pure (Value.Scalar x) +interp _ store env (Expr.Variable x) _ = + case Map.lookup x env of + Just v -> pure v + Nothing -> + case x of + User u | member u (metrics store) -> + pure $ Value.Function (interpVariable store u) + _ -> + throwInterpError $ "Undefined variable: " <> showT x +interp _ _ _ Now now = pure (Timestamp (fromIntegral now)) +interp _ _ _ Epoch _ = pure (Timestamp 0) +interp cfg store env (Lambda x body) now = pure $ Value.Function $ \v -> + interp cfg store (Map.insert x v env) body now +interp cfg store env (Let x rhs body) now = do + v <- interp cfg store env rhs now + interp cfg store (Map.insert x v env) body now +interp cfg store env (FastForward t_ d_) now = do + t <- interp cfg store env t_ now >>= expectTimestamp + d <- interp cfg store env d_ now >>= expectDuration + pure (Value.Timestamp (t + d)) +interp cfg store env (FilterByLabel cs s_) now = do + s <- interp cfg store env s_ now >>= expectInstantVector + let (mustBe, mustNotBe) = interpLabelInsts cs + pure $ + Value.InstantVector $ + flip filter s $ \i -> + (&&) + (mustBe `isSubsetOf` i.labels) + (Set.null (mustNotBe `Set.intersection` i.labels)) +interp cfg store env (Unless u_ v_) now = do + u <- interp cfg store env u_ now >>= expectInstantVector + v <- interp cfg store env v_ now >>= expectInstantVector + let vls = Set.fromList (map (.labels) v) + pure (Value.InstantVector (filter (\i -> not (member i.labels vls)) u)) +interp cfg store env (Filter f_ t_) now = do + f <- interp cfg store env f_ now >>= expectFunction + t <- interp cfg store env t_ now >>= expectInstantVector + Value.InstantVector <$> interpFilter f t +interp cfg store env (Join a_ b_) now = do + a <- interp cfg store env a_ now >>= expectInstantVector + b <- interp cfg store env b_ now >>= expectInstantVector + Value.InstantVector <$> liftEither (interpJoin Value.Pair a b) +interp cfg store env (Map f_ x_) now = do + f <- interp cfg store env f_ now >>= expectFunction + x <- interp cfg store env x_ now >>= expectInstantVector + Value.InstantVector <$> interpMap f x +interp cfg store env (Range s_ a_ b_ r_) now = do + s <- interp cfg store env s_ now >>= expectFunction + a <- interp cfg store env a_ now >>= expectTimestamp + b <- interp cfg store env b_ now >>= expectTimestamp + r <- traverse (\r' -> interp cfg store env r' now >>= expectDuration) r_ + RangeVector <$> interpRange s (Interval a b) (fromMaybe cfg.defaultRangeSamplingRateMillis r) +interp cfg store env (Rewind t_ d_) now = do + t <- interp cfg store env t_ now >>= expectTimestamp + d <- interp cfg store env d_ now >>= expectDuration + pure (Timestamp (t - d)) +interp cfg store env (BoolToScalar t_) now = do + t <- interp cfg store env t_ now >>= expectBoolean + pure (Scalar (if t then 1 else 0)) +interp cfg store env (InstantVectorToScalar t_) now = do + t <- interp cfg store env t_ now >>= expectInstantVectorBool + pure (Value.InstantVector (fmap (\x -> Value.Scalar (if x then 1.0 else 0.0)) <$> t)) +interp cfg store env (TimestampToScalar t_) now = do + t <- interp cfg store env t_ now >>= expectTimestamp + pure (Scalar (fromIntegral t)) +interp cfg store env (DurationToScalar t_) now = do + t <- interp cfg store env t_ now >>= expectDuration + pure (Scalar (fromIntegral t)) +interp _ _ _ (Milliseconds t) _ = pure $ Duration t +interp _ _ _ (Seconds t) _ = pure $ Duration (1000 * t) +interp _ _ _ (Minutes t) _ = pure $ Duration (60 * 1000 * t) +interp _ _ _ (Hours t) _ = pure $ Duration (60 * 60 * 1000 * t) +interp cfg store env (BinaryArithmeticOp.mbBinaryArithmeticOpInstantVectorScalar -> Just (v, op, k)) now = do + interpBinaryArithmeticOp cfg store env v op k now +interp cfg store env (Quantile k_ expr) now = do + k <- interp cfg store env k_ now >>= expectScalar + v <- interp cfg store env expr now >>= expectInstantVectorScalar + pure $ Value.Scalar $ quantile cadpw (floor (k * 100)) 100 (Instant.toVector v) +interp cfg store env (QuantileBy ls k_ expr) now = do + k <- interp cfg store env k_ now >>= expectScalar + v <- interp cfg store env expr now >>= expectInstantVectorScalar + Value.InstantVector . fmap (fmap Value.Scalar) <$> interpQuantileBy ls k v now +interp cfg store env (QuantileOverTime k_ expr) now = do + k <- interp cfg store env k_ now >>= expectScalar + v <- interp cfg store env expr now >>= expectRangeVectorScalar + pure $ Value.InstantVector (fmap Value.Scalar <$> quantileRangeVector k v) +interp cfg store env (Rate r_) now = do + r <- interp cfg store env r_ now >>= expectRangeVectorScalar + -- TODO: PromQL's rate() performs linear regression to extrapolate the samples to the bounds + r' <- interpRate r + pure (Value.InstantVector (fmap (fmap Value.Scalar) r')) +interp cfg store env (Increase r_) now = do + r <- interp cfg store env r_ now >>= expectRangeVectorScalar + -- TODO: PromQL's increase() performs linear regression to extrapolate the samples to the bounds + r' <- interpIncrease r + pure (Value.InstantVector (fmap (fmap Value.Scalar) r')) +interp cfg store env (Avg expr) now = do + v <- interp cfg store env expr now >>= expectInstantVectorScalar + pure $ Value.Scalar $ mean (Instant.toVector v) +interp cfg store env (Max expr) now = do + v <- interp cfg store env expr now >>= expectInstantVectorScalar + pure $ Value.Scalar $ snd $ minMax (Instant.toVector v) +interp cfg store env (Min expr) now = do + v <- interp cfg store env expr now >>= expectInstantVectorScalar + pure $ Value.Scalar $ fst $ minMax (Instant.toVector v) +interp cfg store env (AvgOverTime expr) now = do + v <- interp cfg store env expr now >>= expectRangeVectorScalar + pure $ Value.InstantVector (fmap Value.Scalar <$> avgOverTime now v) +interp cfg store env (SumOverTime expr) now = do + v <- interp cfg store env expr now >>= expectRangeVectorScalar + pure $ Value.InstantVector (fmap Value.Scalar <$> sumOverTime now v) +interp cfg store env (MkPair a b) now = do + va <- interp cfg store env a now + vb <- interp cfg store env b now + pure $ Value.Pair va vb +interp cfg store env (Fst t) now = do + (a, _) <- interp cfg store env t now >>= expectPair + pure a +interp cfg store env (Snd t) now = do + (_, b) <- interp cfg store env t now >>= expectPair + pure b +interp _ _ _ Expr.True _ = do + pure Truth +interp _ _ _ Expr.False _ = do + pure Falsity +interp cfg store env (Expr.And a b) now = do + va <- interp cfg store env a now >>= expectBoolean + vb <- interp cfg store env b now >>= expectBoolean + pure (fromBool (va && vb)) +interp cfg store env (Expr.Or a b) now = do + va <- interp cfg store env a now >>= expectBoolean + vb <- interp cfg store env b now >>= expectBoolean + pure (fromBool (va || vb)) +interp cfg store env (Expr.Not t_) now = do + vt <- interp cfg store env t_ now >>= expectBoolean + pure (fromBool (not vt)) +interp cfg store env (Expr.EqBool a b) now = do + va <- interp cfg store env a now >>= expectBoolean + vb <- interp cfg store env b now >>= expectBoolean + pure (fromBool (va == vb)) +interp cfg store env (Expr.NotEqBool a b) now = do + va <- interp cfg store env a now >>= expectBoolean + vb <- interp cfg store env b now >>= expectBoolean + pure (fromBool (va /= vb)) +interp cfg store env (mbBinaryRelationScalar -> Just (a, rel, b)) now = do + va <- interp cfg store env a now >>= expectScalar + vb <- interp cfg store env b now >>= expectScalar + pure (fromBool (BinaryRelation.materializeScalar rel va vb)) +interp cfg store env (BinaryArithmeticOp.mbBinaryArithmeticOpScalar -> Just (a, op, b)) now = do + va <- interp cfg store env a now >>= expectScalar + vb <- interp cfg store env b now >>= expectScalar + pure (Value.Scalar (BinaryArithmeticOp.materializeScalar op va vb)) +interp cfg store env (Expr.Abs x_) now = do + x <- interp cfg store env x_ now >>= expectScalar + pure (Value.Scalar (abs x)) +interp cfg store env (Expr.RoundScalar x_) now = do + x <- interp cfg store env x_ now >>= expectScalar + pure (Value.Scalar (fromIntegral (round x :: Int))) +interp cfg store env (Application f_ e_) now = do + f <- interp cfg store env f_ now >>= expectFunction + e <- interp cfg store env e_ now + f e +interp cfg store env (Expr.AddDuration a_ b_) now = do + a <- interp cfg store env a_ now >>= expectDuration + b <- interp cfg store env b_ now >>= expectDuration + pure (Value.Duration (a + b)) +interp cfg store env (mbBinaryRelationInstantVector -> Just (v, rel, k)) now = + interpFilterBinaryRelation cfg store env v rel k now +interp _ _ _ expr _ = throwInterpError $ "Can't interpret expression: " <> showT expr diff --git a/bench/cardano-timeseries-io/src/Cardano/Timeseries/Interp/Config.hs b/bench/cardano-timeseries-io/src/Cardano/Timeseries/Interp/Config.hs new file mode 100644 index 00000000000..2282c88314a --- /dev/null +++ b/bench/cardano-timeseries-io/src/Cardano/Timeseries/Interp/Config.hs @@ -0,0 +1,8 @@ +{- HLINT ignore "Use newtype instead of data" -} + +module Cardano.Timeseries.Interp.Config where +import Data.Word (Word64) + +data Config = Config { + defaultRangeSamplingRateMillis :: Word64 +} deriving (Show, Eq) diff --git a/bench/cardano-timeseries-io/src/Cardano/Timeseries/Interp/Expect.hs b/bench/cardano-timeseries-io/src/Cardano/Timeseries/Interp/Expect.hs new file mode 100644 index 00000000000..19bbee2547e --- /dev/null +++ b/bench/cardano-timeseries-io/src/Cardano/Timeseries/Interp/Expect.hs @@ -0,0 +1,72 @@ +module Cardano.Timeseries.Interp.Expect where + +import Cardano.Timeseries.AsText (showT) +import Cardano.Timeseries.Domain.Instant (Instant, InstantVector) +import Cardano.Timeseries.Domain.Timeseries (Timeseries, TimeseriesVector) +import Cardano.Timeseries.Interp.Types +import Cardano.Timeseries.Interp.Value as Value + +import Data.Word (Word64) + +expectInstantVector :: Value -> InterpM (InstantVector Value) +expectInstantVector (Value.InstantVector v) = pure v +expectInstantVector _ = throwInterpError "Unexpected expression type: expected an instant vector" + +expectRangeVector :: Value -> InterpM (TimeseriesVector Value) +expectRangeVector (Value.RangeVector v) = pure v +expectRangeVector _ = throwInterpError "Unexpected expression type: expected a range vector" + +expectTimeseriesScalar :: Timeseries Value -> InterpM (Timeseries Double) +expectTimeseriesScalar = traverse expectScalar + +expectRangeVectorScalar :: Value -> InterpM (TimeseriesVector Double) +expectRangeVectorScalar v = expectRangeVector v >>= traverse expectTimeseriesScalar + +expectInstantScalar :: Instant Value -> InterpM (Instant Double) +expectInstantScalar = traverse expectScalar + +expectInstantBool :: Instant Value -> InterpM (Instant Bool) +expectInstantBool = traverse expectBool + +expectInstantVectorScalar :: Value -> InterpM (InstantVector Double) +expectInstantVectorScalar v = expectInstantVector v >>= traverse expectInstantScalar + +expectInstantVectorBool :: Value -> InterpM (InstantVector Bool) +expectInstantVectorBool v = expectInstantVector v >>= traverse expectInstantBool + +expectPair :: Value -> InterpM (Value, Value) +expectPair (Value.Pair a b) = pure (a, b) +expectPair _ = throwInterpError "Unexpected expression type: expected a pair" + +expectScalar :: Value -> InterpM Double +expectScalar (Value.Scalar x) = pure x +expectScalar _ = throwInterpError "Unexpected expression type: expected a scalar" + +expectBool :: Value -> InterpM Bool +expectBool Value.Truth = pure Prelude.True +expectBool Value.Falsity = pure Prelude.False +expectBool _ = throwInterpError "Unexpected expression type: expected a bool" + +expectBoolean :: Value -> InterpM Bool +expectBoolean Truth = pure Prelude.True +expectBoolean Falsity = pure Prelude.False +expectBoolean _ = throwInterpError "Unexpected expression type: expected a boolean" + +expectDuration :: Value -> InterpM Word64 +expectDuration (Value.Duration x) = pure x +expectDuration _ = throwInterpError "Unexpected expression type: expected a duration" + +expectTimestamp :: Value -> InterpM Word64 +expectTimestamp (Value.Timestamp x) = pure x +expectTimestamp _ = throwInterpError "Unexpected expression type: expected a timestamp" + +expectFunction :: Value -> InterpM FunctionValue +expectFunction (Value.Function f) = pure f +expectFunction _ = throwInterpError "Unexpected expression type: expected a function" + +expectWord64 :: Double -> InterpM Word64 +expectWord64 x + | snd pf == 0.0 = pure $ fst pf + | otherwise = throwInterpError $ "Expected a whole number, got: " <> showT x + where + pf = properFraction x diff --git a/bench/cardano-timeseries-io/src/Cardano/Timeseries/Interp/Statistics.hs b/bench/cardano-timeseries-io/src/Cardano/Timeseries/Interp/Statistics.hs new file mode 100644 index 00000000000..a15757b0820 --- /dev/null +++ b/bench/cardano-timeseries-io/src/Cardano/Timeseries/Interp/Statistics.hs @@ -0,0 +1,31 @@ +{-# LANGUAGE RecordWildCards #-} +module Cardano.Timeseries.Interp.Statistics where +import Cardano.Timeseries.Domain.Instant +import qualified Cardano.Timeseries.Domain.Instant as Domain +import qualified Cardano.Timeseries.Domain.Instant as Instant +import Cardano.Timeseries.Domain.Timeseries +import qualified Cardano.Timeseries.Domain.Timeseries as Timeseries +import Cardano.Timeseries.Domain.Types + +import Data.Maybe (fromJust) + +import Statistics.Quantile (cadpw, quantile) +import Statistics.Sample (mean) + +avgOverTime :: Timestamp -> TimeseriesVector Double -> InstantVector Double +avgOverTime at = fmap compute where + compute :: Timeseries Double -> Instant Double + compute series = Domain.Instant (Timeseries.labels series) at (mean $ Timeseries.toVector series) + +sumOverTime :: Timestamp -> TimeseriesVector Double -> InstantVector Double +sumOverTime at = fmap compute where + compute :: Timeseries Double -> Instant Double + compute series = Domain.Instant (Timeseries.labels series) at (sum $ Timeseries.toVector series) + +quantileTimeseries :: Double -> Timeseries Double -> Instant Double +quantileTimeseries k v@Timeseries{..} = + let value = quantile cadpw (floor (k * 100)) 100 (Timeseries.toVector v) in + Instant labels (Instant.timestamp $ fromJust (Timeseries.newest v)) value + +quantileRangeVector :: Double -> TimeseriesVector Double -> InstantVector Double +quantileRangeVector k = map (quantileTimeseries k) diff --git a/bench/cardano-timeseries-io/src/Cardano/Timeseries/Interp/Types.hs b/bench/cardano-timeseries-io/src/Cardano/Timeseries/Interp/Types.hs new file mode 100644 index 00000000000..70b97629f77 --- /dev/null +++ b/bench/cardano-timeseries-io/src/Cardano/Timeseries/Interp/Types.hs @@ -0,0 +1,16 @@ +module Cardano.Timeseries.Interp.Types where +import Control.Monad.Except (ExceptT, throwError) +import Control.Monad.State.Strict (State) +import Cardano.Timeseries.AsText +import Data.Text (Text) + +newtype InterpError = + InterpError { message :: Text } + +instance AsText InterpError where + asText InterpError{message} = "Interpretation error: " <> message + +type InterpM a = ExceptT InterpError (State Int) a + +throwInterpError :: Text -> InterpM a +throwInterpError = throwError . InterpError diff --git a/bench/cardano-timeseries-io/src/Cardano/Timeseries/Interp/Value.hs b/bench/cardano-timeseries-io/src/Cardano/Timeseries/Interp/Value.hs new file mode 100644 index 00000000000..10661368366 --- /dev/null +++ b/bench/cardano-timeseries-io/src/Cardano/Timeseries/Interp/Value.hs @@ -0,0 +1,57 @@ +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE GADTs #-} + +module Cardano.Timeseries.Interp.Value(Value(..), FunctionValue, fromBool) where + +import Cardano.Timeseries.AsText +import Cardano.Timeseries.Domain.Instant +import Cardano.Timeseries.Domain.Timeseries (TimeseriesVector) +import Cardano.Timeseries.Interp.Types (InterpM) + +import Control.DeepSeq (NFData) +import Data.Text (unpack, Text) +import Data.Word (Word64) +import GHC.Generics (Generic) + +type FunctionValue = Value -> InterpM Value + +-- | A model of values that queries interpret into. +data Value where + -- | A scalar. + Scalar :: Double -> Value + -- | A range vector. + RangeVector :: TimeseriesVector Value -> Value + -- | An instant vector. + InstantVector :: InstantVector Value -> Value + -- | A pair. + Pair :: Value -> Value -> Value + -- | Truth. + Truth :: Value + -- | Falsity. + Falsity :: Value + -- | Duration (milliseconds) + Duration :: Word64 -> Value + -- | Timestamp (milliseconds since epoch) + Timestamp :: Word64 -> Value + -- | Function + Function :: FunctionValue -> Value + -- | Text + Text :: Text -> Value deriving Generic + +instance NFData Value + +instance Show Value where + show (Scalar x) = show x + show (RangeVector x) = unpack (asText x) + show (InstantVector x) = unpack (asText x) + show (Pair x y) = "(" <> show x <> ", " <> show y <> ")" + show Truth = "true" + show Falsity = "false" + show (Duration d) = show d <> "ms" + show (Timestamp t) = show t + show (Function _) = "" + show (Text t) = show t + +fromBool :: Bool -> Value +fromBool Prelude.True = Truth +fromBool Prelude.False = Falsity diff --git a/bench/cardano-timeseries-io/src/Cardano/Timeseries/Query/BinaryArithmeticOp.hs b/bench/cardano-timeseries-io/src/Cardano/Timeseries/Query/BinaryArithmeticOp.hs new file mode 100644 index 00000000000..9cd3383d41e --- /dev/null +++ b/bench/cardano-timeseries-io/src/Cardano/Timeseries/Query/BinaryArithmeticOp.hs @@ -0,0 +1,47 @@ +module Cardano.Timeseries.Query.BinaryArithmeticOp(BinaryArithmeticOp(..), + embedScalar, embedInstantVectorScalar, + mbBinaryArithmeticOpScalar, mbBinaryArithmeticOpInstantVectorScalar, materializeScalar) where + +import Cardano.Timeseries.AsText +import Cardano.Timeseries.Query.Expr (Expr (..)) + +data BinaryArithmeticOp = Add | Sub | Mul | Div deriving (Show, Eq, Ord) + +instance AsText BinaryArithmeticOp where + asText = \case + Add -> "+" + Sub -> "-" + Mul -> "*" + Div -> "/" + +embedScalar :: BinaryArithmeticOp -> Expr -> Expr -> Expr +embedScalar Add = AddScalar +embedScalar Sub = SubScalar +embedScalar Mul = MulScalar +embedScalar Div = DivScalar + +embedInstantVectorScalar :: BinaryArithmeticOp -> Expr -> Expr -> Expr +embedInstantVectorScalar Add = AddInstantVectorScalar +embedInstantVectorScalar Sub = SubInstantVectorScalar +embedInstantVectorScalar Mul = MulInstantVectorScalar +embedInstantVectorScalar Div = DivInstantVectorScalar + +mbBinaryArithmeticOpInstantVectorScalar :: Expr -> Maybe (Expr, BinaryArithmeticOp, Expr) +mbBinaryArithmeticOpInstantVectorScalar (AddInstantVectorScalar a b) = Just (a, Add, b) +mbBinaryArithmeticOpInstantVectorScalar (SubInstantVectorScalar a b) = Just (a, Sub, b) +mbBinaryArithmeticOpInstantVectorScalar (MulInstantVectorScalar a b) = Just (a, Mul, b) +mbBinaryArithmeticOpInstantVectorScalar (DivInstantVectorScalar a b) = Just (a, Div, b) +mbBinaryArithmeticOpInstantVectorScalar _ = Nothing + +mbBinaryArithmeticOpScalar :: Expr -> Maybe (Expr, BinaryArithmeticOp, Expr) +mbBinaryArithmeticOpScalar (AddScalar a b) = Just (a, Add, b) +mbBinaryArithmeticOpScalar (SubScalar a b) = Just (a, Sub, b) +mbBinaryArithmeticOpScalar (MulScalar a b) = Just (a, Mul, b) +mbBinaryArithmeticOpScalar (DivScalar a b) = Just (a, Div, b) +mbBinaryArithmeticOpScalar _ = Nothing + +materializeScalar :: BinaryArithmeticOp -> Double -> Double -> Double +materializeScalar Add = (+) +materializeScalar Sub = (-) +materializeScalar Mul = (*) +materializeScalar Div = (/) diff --git a/bench/cardano-timeseries-io/src/Cardano/Timeseries/Query/BinaryRelation.hs b/bench/cardano-timeseries-io/src/Cardano/Timeseries/Query/BinaryRelation.hs new file mode 100644 index 00000000000..c6e24074bd6 --- /dev/null +++ b/bench/cardano-timeseries-io/src/Cardano/Timeseries/Query/BinaryRelation.hs @@ -0,0 +1,79 @@ +module Cardano.Timeseries.Query.BinaryRelation( + BinaryRelation(..), + embedScalar, + embedInstantVectorScalar, + mbBinaryRelationInstantVector, + mbBinaryRelationScalar, + swapInstantVectorScalar, + materializeScalar) where + +import Cardano.Timeseries.AsText +import Cardano.Timeseries.Query.Expr (Expr (..)) + +-- | A datatype used to carve out a subset of `Function` that represents binary relations. +data BinaryRelation = Eq | Lt | Lte | Gt | Gte | NotEq deriving (Show, Eq, Ord) + +instance AsText BinaryRelation where + asText = \case + Eq -> "==" + Lt -> "<" + Lte -> "<=" + Gt -> ">" + Gte -> ">=" + NotEq -> "!=" + +embedScalar :: BinaryRelation -> Expr -> Expr -> Expr +embedScalar Eq = EqScalar +embedScalar Lt = LtScalar +embedScalar Lte = LteScalar +embedScalar Gt = GtScalar +embedScalar Gte = GteScalar +embedScalar NotEq = NotEqScalar + +embedInstantVectorScalar :: BinaryRelation -> Expr -> Expr -> Expr +embedInstantVectorScalar Eq = EqInstantVectorScalar +embedInstantVectorScalar Lt = LtInstantVectorScalar +embedInstantVectorScalar Lte = LteInstantVectorScalar +embedInstantVectorScalar Gt = GtInstantVectorScalar +embedInstantVectorScalar Gte = GteInstantVectorScalar +embedInstantVectorScalar NotEq = NotEqInstantVectorScalar + +-- k < v <=> v > k +-- k ≤ v <=> v ≥ k +-- k > v <=> v < k +-- k ≥ v <=> v ≤ k +-- k = v <=> v = k +-- k ≠ v <=> v ≠ k +swapInstantVectorScalar :: BinaryRelation -> BinaryRelation +swapInstantVectorScalar Eq = Eq +swapInstantVectorScalar NotEq = NotEq +swapInstantVectorScalar Lt = Gt +swapInstantVectorScalar Lte = Gte +swapInstantVectorScalar Gt = Lt +swapInstantVectorScalar Gte = Lte + +mbBinaryRelationInstantVector :: Expr -> Maybe (Expr, BinaryRelation, Expr) +mbBinaryRelationInstantVector (EqInstantVectorScalar a b) = Just (a, Eq, b) +mbBinaryRelationInstantVector (LtInstantVectorScalar a b) = Just (a, Lt, b) +mbBinaryRelationInstantVector (LteInstantVectorScalar a b) = Just (a, Lte, b) +mbBinaryRelationInstantVector (GtInstantVectorScalar a b) = Just (a, Gt, b) +mbBinaryRelationInstantVector (GteInstantVectorScalar a b) = Just (a, Gte, b) +mbBinaryRelationInstantVector (NotEqInstantVectorScalar a b) = Just (a, NotEq, b) +mbBinaryRelationInstantVector _ = Nothing + +mbBinaryRelationScalar :: Expr -> Maybe (Expr, BinaryRelation, Expr) +mbBinaryRelationScalar (EqScalar a b) = Just (a, Eq, b) +mbBinaryRelationScalar (LtScalar a b) = Just (a, Lt, b) +mbBinaryRelationScalar (LteScalar a b) = Just (a, Lte, b) +mbBinaryRelationScalar (GtScalar a b) = Just (a, Gt, b) +mbBinaryRelationScalar (GteScalar a b) = Just (a, Gte, b) +mbBinaryRelationScalar (NotEqScalar a b) = Just (a, NotEq, b) +mbBinaryRelationScalar _ = Nothing + +materializeScalar :: BinaryRelation -> Double -> Double -> Bool +materializeScalar Eq = (==) +materializeScalar Lt = (<) +materializeScalar Lte = (<=) +materializeScalar Gt = (>) +materializeScalar Gte = (>=) +materializeScalar NotEq = (/=) diff --git a/bench/cardano-timeseries-io/src/Cardano/Timeseries/Query/Expr.hs b/bench/cardano-timeseries-io/src/Cardano/Timeseries/Query/Expr.hs new file mode 100644 index 00000000000..c587654b311 --- /dev/null +++ b/bench/cardano-timeseries-io/src/Cardano/Timeseries/Query/Expr.hs @@ -0,0 +1,99 @@ +module Cardano.Timeseries.Query.Expr(HoleIdentifier, LabelConstraint(..), Expr(..)) where +import Cardano.Timeseries.Domain.Identifier (Identifier) +import Cardano.Timeseries.Domain.Types (Label, Labelled) + +import Data.Set (Set) +import Data.Text (Text) +import Data.Word (Word64) + +type HoleIdentifier = Int + +data LabelConstraint = LabelConstraintEq (Labelled Text) | LabelConstraintNotEq (Labelled Text) + deriving (Show, Eq, Ord) + +-- | This expression has the following property, assumed in the interpreter: +-- | every expression can be given at most one type and can have at most one interpretation. +-- | The property essentially means that expressions like (a + b) can't be part of the language unless +-- | `+` has only one possible meaning, which is not that case (It can be addition of scalars and addition of instant vectors) +data Expr = Number Double + | Variable Identifier + | Str String + | Application Expr Expr + | Lambda Identifier Expr + | Let Identifier Expr Expr + + | AddInstantVectorScalar Expr Expr + | SubInstantVectorScalar Expr Expr + | MulInstantVectorScalar Expr Expr + | DivInstantVectorScalar Expr Expr + | EqInstantVectorScalar Expr Expr + | LtInstantVectorScalar Expr Expr + | LteInstantVectorScalar Expr Expr + | GtInstantVectorScalar Expr Expr + | GteInstantVectorScalar Expr Expr + | NotEqInstantVectorScalar Expr Expr + + | True + | False + | And Expr Expr + | Or Expr Expr + | Not Expr + | EqBool Expr Expr + | NotEqBool Expr Expr + + | Milliseconds Word64 + | Seconds Word64 + | Minutes Word64 + | Hours Word64 + | DurationToScalar Expr + | AddDuration Expr Expr + | Now + | Epoch + | Rewind Expr Expr + | FastForward Expr Expr + | TimestampToScalar Expr + + | AddScalar Expr Expr + | SubScalar Expr Expr + | MulScalar Expr Expr + | DivScalar Expr Expr + | EqScalar Expr Expr + | LtScalar Expr Expr + | LteScalar Expr Expr + | GtScalar Expr Expr + | GteScalar Expr Expr + | NotEqScalar Expr Expr + | BoolToScalar Expr + | InstantVectorToScalar Expr + | Abs Expr + | RoundScalar Expr + + | MkPair Expr Expr + | Fst Expr + | Snd Expr + + | AvgOverTime Expr + | SumOverTime Expr + | Avg Expr + | QuantileBy (Set Label) Expr Expr + | Quantile Expr Expr + | Max Expr + | Min Expr + | Rate Expr + | Increase Expr + | QuantileOverTime Expr Expr + + + | Filter Expr Expr + | Map Expr Expr + | Join Expr Expr + + | Range Expr Expr Expr (Maybe Expr) + + | Unless Expr Expr + + | FilterByLabel (Set LabelConstraint) Expr + + | Metrics + + | Hole HoleIdentifier deriving Show diff --git a/bench/cardano-timeseries-io/src/Cardano/Timeseries/Resolve.hs b/bench/cardano-timeseries-io/src/Cardano/Timeseries/Resolve.hs new file mode 100644 index 00000000000..8b58649242f --- /dev/null +++ b/bench/cardano-timeseries-io/src/Cardano/Timeseries/Resolve.hs @@ -0,0 +1,115 @@ +module Cardano.Timeseries.Resolve(resolveTy, resolveBinding, resolveContext, resolveExpr') where +import Cardano.Timeseries.Query.Expr (Expr (..)) +import qualified Cardano.Timeseries.Query.Expr as Expr +import Cardano.Timeseries.Typing +import qualified Cardano.Timeseries.Typing as Ty + +import qualified Data.Map.Strict as Map + + +-- | Computes the head-normal form of `Ty` w.r.t. hole resolution (i.e. unfolds holes recursively up to the head expression). +resolveTy :: Defs -> Ty -> Ty +resolveTy defs (Ty.Hole x) = + case Map.lookup x defs of + Just (TyHoleInst rhs) -> resolveTy defs rhs + Just _ -> Ty.Hole x + Nothing -> error $ "[INTERNAL ERROR] Can't find hole in Σ: " <> show x +resolveTy defs (InstantVector typ) = InstantVector (resolveTy defs typ) +resolveTy defs (RangeVector typ) = RangeVector (resolveTy defs typ) +resolveTy defs (Fun typ typ') = Fun (resolveTy defs typ) (resolveTy defs typ') +resolveTy defs (Pair typ typ') = Pair (resolveTy defs typ) (resolveTy defs typ') +resolveTy _ Scalar = Scalar +resolveTy _ Timestamp = Timestamp +resolveTy _ Duration = Duration +resolveTy _ Bool = Bool +resolveTy _ Text = Text + +-- | Computes the head-normal form of `Binding` w.r.t. hole resolution +-- (i.e. unfolds holes recursively up to the head expression in type of the binding). +resolveBinding :: Defs -> Binding -> Binding +resolveBinding defs (LetBinding x rhs typ) = + LetBinding x rhs (resolveTy defs typ) +resolveBinding defs (LambdaBinding x typ) = + LambdaBinding x (resolveTy defs typ) + +-- | Computes the head-normal form of `Context` w.r.t. hole resolution +-- (i.e. unfolds holes recursively up to the head expression in every type of the context). +resolveContext :: Defs -> Context -> Context +resolveContext defs = fmap (resolveBinding defs) + + +-- | Computes the normal form of `Expr` w.r.t. hole resolution (i.e. resolves *all* holes in the expression). +resolveExpr' :: Defs -> Expr -> Expr +resolveExpr' _ Metrics = Metrics +resolveExpr' _ (Number f) = Number f +resolveExpr' _ (Str s) = Str s +resolveExpr' defs (AddInstantVectorScalar a b) = AddInstantVectorScalar (resolveExpr' defs a) (resolveExpr' defs b) +resolveExpr' defs (SubInstantVectorScalar a b) = SubInstantVectorScalar (resolveExpr' defs a) (resolveExpr' defs b) +resolveExpr' defs (MulInstantVectorScalar a b) = MulInstantVectorScalar (resolveExpr' defs a) (resolveExpr' defs b) +resolveExpr' defs (DivInstantVectorScalar a b) = DivInstantVectorScalar (resolveExpr' defs a) (resolveExpr' defs b) +resolveExpr' defs (AddScalar a b) = AddScalar (resolveExpr' defs a) (resolveExpr' defs b) +resolveExpr' defs (SubScalar a b) = SubScalar (resolveExpr' defs a) (resolveExpr' defs b) +resolveExpr' defs (MulScalar a b) = MulScalar (resolveExpr' defs a) (resolveExpr' defs b) +resolveExpr' defs (DivScalar a b) = DivScalar (resolveExpr' defs a) (resolveExpr' defs b) +resolveExpr' defs (EqScalar a b) = EqScalar (resolveExpr' defs a) (resolveExpr' defs b) +resolveExpr' defs (NotEqScalar a b) = NotEqScalar (resolveExpr' defs a) (resolveExpr' defs b) +resolveExpr' defs (LtScalar a b) = LtScalar (resolveExpr' defs a) (resolveExpr' defs b) +resolveExpr' defs (LteScalar a b) = LteScalar (resolveExpr' defs a) (resolveExpr' defs b) +resolveExpr' defs (GtScalar a b) = GtScalar (resolveExpr' defs a) (resolveExpr' defs b) +resolveExpr' defs (GteScalar a b) = GteScalar (resolveExpr' defs a) (resolveExpr' defs b) +resolveExpr' defs (EqInstantVectorScalar a b) = EqInstantVectorScalar (resolveExpr' defs a) (resolveExpr' defs b) +resolveExpr' defs (NotEqInstantVectorScalar a b) = NotEqInstantVectorScalar (resolveExpr' defs a) (resolveExpr' defs b) +resolveExpr' defs (LtInstantVectorScalar a b) = LtInstantVectorScalar (resolveExpr' defs a) (resolveExpr' defs b) +resolveExpr' defs (LteInstantVectorScalar a b) = LteInstantVectorScalar (resolveExpr' defs a) (resolveExpr' defs b) +resolveExpr' defs (GtInstantVectorScalar a b) = GtInstantVectorScalar (resolveExpr' defs a) (resolveExpr' defs b) +resolveExpr' defs (GteInstantVectorScalar a b) = GteInstantVectorScalar (resolveExpr' defs a) (resolveExpr' defs b) +resolveExpr' _ Expr.True = Expr.True +resolveExpr' _ Expr.False = Expr.False +resolveExpr' defs (Not t) = Not (resolveExpr' defs t) +resolveExpr' defs (And a b) = And (resolveExpr' defs a) (resolveExpr' defs b) +resolveExpr' defs (Or a b) = Or (resolveExpr' defs a) (resolveExpr' defs b) +resolveExpr' defs (EqBool a b) = EqBool (resolveExpr' defs a) (resolveExpr' defs b) +resolveExpr' defs (NotEqBool a b) = NotEqBool (resolveExpr' defs a) (resolveExpr' defs b) +resolveExpr' defs (Application f e) = Application (resolveExpr' defs f) (resolveExpr' defs e) +resolveExpr' defs (Lambda x f) = Lambda x (resolveExpr' defs f) +resolveExpr' defs (Let x rhs e) = Let x (resolveExpr' defs rhs) (resolveExpr' defs e) +resolveExpr' defs (MkPair a b) = MkPair (resolveExpr' defs a) (resolveExpr' defs b) +resolveExpr' _ (Variable x) = Variable x +resolveExpr' _ (Milliseconds l) = Milliseconds l +resolveExpr' _ (Seconds l) = Seconds l +resolveExpr' _ (Minutes l) = Minutes l +resolveExpr' _ (Hours l) = Hours l +resolveExpr' defs (AddDuration a b) = AddDuration (resolveExpr' defs a) (resolveExpr' defs b) +resolveExpr' defs (Rewind a b) = Rewind (resolveExpr' defs a) (resolveExpr' defs b) +resolveExpr' defs (FastForward a b) = FastForward (resolveExpr' defs a) (resolveExpr' defs b) +resolveExpr' _ Epoch = Epoch +resolveExpr' _ Now = Now +resolveExpr' defs (DurationToScalar t) = DurationToScalar (resolveExpr' defs t) +resolveExpr' defs (TimestampToScalar t) = TimestampToScalar (resolveExpr' defs t) +resolveExpr' defs (BoolToScalar t) = BoolToScalar (resolveExpr' defs t) +resolveExpr' defs (InstantVectorToScalar t) = InstantVectorToScalar (resolveExpr' defs t) +resolveExpr' defs (Abs t) = Abs (resolveExpr' defs t) +resolveExpr' defs (RoundScalar t) = RoundScalar (resolveExpr' defs t) +resolveExpr' defs (Fst t) = Fst (resolveExpr' defs t) +resolveExpr' defs (Snd t) = Snd (resolveExpr' defs t) +resolveExpr' defs (AvgOverTime t) = AvgOverTime (resolveExpr' defs t) +resolveExpr' defs (SumOverTime t) = SumOverTime (resolveExpr' defs t) +resolveExpr' defs (Avg t) = Avg (resolveExpr' defs t) +resolveExpr' defs (Min t) = Min (resolveExpr' defs t) +resolveExpr' defs (Max t) = Max (resolveExpr' defs t) +resolveExpr' defs (Rate t) = Rate (resolveExpr' defs t) +resolveExpr' defs (Increase t) = Increase (resolveExpr' defs t) +resolveExpr' defs (QuantileOverTime a b) = QuantileOverTime (resolveExpr' defs a) (resolveExpr' defs b) +resolveExpr' defs (Filter a b) = Filter (resolveExpr' defs a) (resolveExpr' defs b) +resolveExpr' defs (FilterByLabel a b) = FilterByLabel a (resolveExpr' defs b) +resolveExpr' defs (Join a b) = Join (resolveExpr' defs a) (resolveExpr' defs b) +resolveExpr' defs (Unless a b) = Unless (resolveExpr' defs a) (resolveExpr' defs b) +resolveExpr' defs (Map a b) = Map (resolveExpr' defs a) (resolveExpr' defs b) +resolveExpr' defs (Quantile a b) = Quantile (resolveExpr' defs a) (resolveExpr' defs b) +resolveExpr' defs (Range a b c d) = + Range (resolveExpr' defs a) (resolveExpr' defs b) (resolveExpr' defs c) (resolveExpr' defs <$> d) +resolveExpr' defs (QuantileBy a b c) = QuantileBy a (resolveExpr' defs b) (resolveExpr' defs c) +resolveExpr' defs (Expr.Hole idx) = + case Map.lookup idx defs of + Just (ExprHoleInst rhs _) -> resolveExpr' defs rhs + _ -> Expr.Hole idx diff --git a/bench/cardano-timeseries-io/src/Cardano/Timeseries/Store.hs b/bench/cardano-timeseries-io/src/Cardano/Timeseries/Store.hs new file mode 100644 index 00000000000..fe1febc7cab --- /dev/null +++ b/bench/cardano-timeseries-io/src/Cardano/Timeseries/Store.hs @@ -0,0 +1,41 @@ +{-# LANGUAGE FunctionalDependencies #-} + +module Cardano.Timeseries.Store(Store(..), stalenessConstant) where + +import Cardano.Timeseries.Domain.Instant +import Cardano.Timeseries.Domain.Types + +import Data.Set (Set) +import Data.Word (Word64) + +-- | Milliseconds +stalenessConstant :: Word64 +stalenessConstant = 5 * 60 * 1000 + +-- | A type-class witnessing that `s` is a metric-store of `a`. +class Store s a | s -> a where + -- | Insert an instant into the store under a metric name. + insert :: s -> MetricIdentifier -> Instant a -> s + + -- | Delete all entries older than the specified `Timestamp` from the store + truncate :: s -> Timestamp -> s + + -- | Compute a point vector of type `a` such that the timestamp of every point in the vector + -- | lies within the staleness window of the target timestamp and is the most recent of all + -- | points in the store sharing a series. + evaluate :: s -> MetricIdentifier -> Timestamp -> InstantVector a + + -- | Find the earliest occurrence of the metric in the store, if any. + earliest :: s -> MetricIdentifier -> Maybe Timestamp + + -- | Find the latest occurrence of the metric in the store, if any. + latest :: s -> MetricIdentifier -> Maybe Timestamp + + -- | An empty store. + new :: s + + -- A set of metric identifies in the store. + metrics :: s -> Set MetricIdentifier + + -- | Total number of (, , , ) tuples. + count :: s -> Int diff --git a/bench/cardano-timeseries-io/src/Cardano/Timeseries/Store/Flat.hs b/bench/cardano-timeseries-io/src/Cardano/Timeseries/Store/Flat.hs new file mode 100644 index 00000000000..7cc9abd7ce1 --- /dev/null +++ b/bench/cardano-timeseries-io/src/Cardano/Timeseries/Store/Flat.hs @@ -0,0 +1,74 @@ +{-# LANGUAGE DeriveFunctor #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE InstanceSigs #-} +{-# LANGUAGE MultiParamTypeClasses #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE RecordWildCards #-} +module Cardano.Timeseries.Store.Flat(Flat, Point(..)) where + +import Cardano.Timeseries.Domain.Instant (Instant (..), InstantVector, share) +import qualified Cardano.Timeseries.Domain.Instant as Instant +import Cardano.Timeseries.Domain.Types +import Cardano.Timeseries.Store +import Cardano.Timeseries.Util (toMaybe) + +import Prelude hiding (Foldable (..)) + +import Control.DeepSeq (NFData) +import Data.Foldable (Foldable (..)) +import Data.Set (fromList) +import GHC.Generics (Generic) + +data Point a = Point { + name :: MetricIdentifier, + instant :: Instant a +} deriving (Show, Eq, Functor, Generic) + +instance NFData a => NFData (Point a) + +type Flat a = [Point a] + +instance Store (Flat a) a where + insert :: Flat a -> MetricIdentifier -> Instant a -> Flat a + insert store metric instant = Point metric instant : store + + truncate store cutoff = filter f store where + f Point{instant} = instant.timestamp >= cutoff + + evaluate :: Flat a -> MetricIdentifier -> Timestamp -> InstantVector a + evaluate store targetMetric targetTime = updateTime $ foldl' choose [] store where + + updateTime :: InstantVector a -> InstantVector a + updateTime = fmap (\i -> Instant i.labels targetTime i.value) + + choose :: InstantVector a -> Point a -> InstantVector a + choose acc point = accumulate acc (toMaybe (satisfies point) point) where + + -- | Does that point match target metric name? + -- | Does that point lie within the staleness window? + satisfies :: Point a -> Bool + satisfies x = x.name == targetMetric + && x.instant.timestamp + stalenessConstant >= targetTime + && x.instant.timestamp <= targetTime + + accumulate :: InstantVector a -> Maybe (Point a) -> InstantVector a + accumulate acc' Nothing = acc' + accumulate acc' (Just p_) = accumulateInt acc' p_ where + accumulateInt :: InstantVector a -> Point a -> InstantVector a + accumulateInt [] p = [p.instant] + accumulateInt (x : xs) p | share x p.instant = Instant.mostRecent x p.instant : xs + accumulateInt (x : xs) p = x : accumulateInt xs p + + new = [] + + metrics store = fromList (map (.name) store) + + count = length + + earliest [] _ = Nothing + earliest store ident = Just $ minimum [timestamp instant | Point{..} <- store, name == ident] + + latest [] _ = Nothing + latest store ident = Just $ maximum [timestamp instant | Point{..} <- store, name == ident] diff --git a/bench/cardano-timeseries-io/src/Cardano/Timeseries/Store/Flat/Parser.hs b/bench/cardano-timeseries-io/src/Cardano/Timeseries/Store/Flat/Parser.hs new file mode 100644 index 00000000000..24239bb9fba --- /dev/null +++ b/bench/cardano-timeseries-io/src/Cardano/Timeseries/Store/Flat/Parser.hs @@ -0,0 +1,62 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Cardano.Timeseries.Store.Flat.Parser(double, point) where + +import Cardano.Timeseries.Domain.Instant +import Cardano.Timeseries.Domain.Types (Labelled, MetricIdentifier, Timestamp) +import Cardano.Timeseries.Store.Flat (Point (..)) + +import Data.Char (isControl) +import Data.Scientific (toRealFloat) +import Data.Set (fromList) +import Data.Text (Text) +import qualified Data.Text as Text +import Data.Void (Void) +import Data.Word (Word64) +import Text.Megaparsec +import Text.Megaparsec.Char (char, space, space1, string) +import Text.Megaparsec.Char.Lexer (decimal, scientific, signed) + +type Parser = Parsec Void Text + +double :: Parser Double +double = toRealFloat <$> signed (pure ()) scientific + +-- | s ::= +-- s [s = s, ..., s = s] +point :: Show a => Parser a -> Parser (Point a) +point value = makePoint + <$> text + <* space + <*> inBrackets labels + <* space + <*> timestamp + <* space1 + <*> value + + where + + makePoint :: MetricIdentifier -> [Labelled Text] -> Timestamp -> a -> Point a + makePoint n ls t v = Point n (Instant (fromList ls) t v) + + comma :: Parser () + comma = space <* string "," <* space + + equals :: Parser () + equals = space <* string "=" <* space + + inBrackets :: Parser a -> Parser a + inBrackets f = string "[" *> space *> f <* space <* string "]" + + text :: Parser Text + text = Text.pack <$> (char '\"' *> many one) <* char '\"' where + one :: Parser Char + one = satisfy (\x -> not (isControl x) && (x /= '"') && (x /= '\n') && (x /= '\r')) + + labels :: Parser [(Text, Text)] + labels = sepBy + ((,) <$> (space *> text) <* equals <*> text) + comma + + timestamp :: Parser Word64 + timestamp = decimal diff --git a/bench/cardano-timeseries-io/src/Cardano/Timeseries/Store/Tree.hs b/bench/cardano-timeseries-io/src/Cardano/Timeseries/Store/Tree.hs new file mode 100644 index 00000000000..17a3e96c264 --- /dev/null +++ b/bench/cardano-timeseries-io/src/Cardano/Timeseries/Store/Tree.hs @@ -0,0 +1,90 @@ +{-# LANGUAGE DeriveFoldable #-} +{-# LANGUAGE DeriveFunctor #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE InstanceSigs #-} +{-# LANGUAGE MultiParamTypeClasses #-} +{-# LANGUAGE OverloadedRecordDot #-} +module Cardano.Timeseries.Store.Tree(Point(..), Tree, fromFlat) where + +import Cardano.Timeseries.Domain.Instant (Instant (..), InstantVector) +import Cardano.Timeseries.Domain.Types (MetricIdentifier, SeriesIdentifier, Timestamp) +import Cardano.Timeseries.Store +import Cardano.Timeseries.Store.Flat (Flat) +import qualified Cardano.Timeseries.Store.Flat as Flat +import Cardano.Timeseries.Util (range) + +import Prelude hiding (lookup) + +import Control.DeepSeq (NFData) +import Data.Functor +import qualified Data.List as List +import Data.Map.Strict (Map) +import qualified Data.Map.Strict as Map +import Data.Maybe (fromMaybe) +import Data.Set (Set) +import GHC.Generics (Generic) + +data Point a = Point { + labels :: SeriesIdentifier, + value :: a +} deriving (Show, Ord, Eq, Foldable, Functor, Generic) + +instance NFData a => NFData (Point a) + +type Tree a = Map MetricIdentifier (Map Timestamp [Point a]) + +instance Store (Tree a) a where + insert :: Tree a -> MetricIdentifier -> Instant a -> Tree a + insert store x (Instant ls t v) = flip (Map.insert x) store $ uncurry (Map.insert t) $ + case Map.lookup x store of + Nothing -> ([Point ls v], Map.empty) + Just inner -> (Point ls v : fromMaybe [] (Map.lookup t inner), inner) + + truncate store cutoff = fmap (Map.dropWhileAntitone (< cutoff)) store + + evaluate :: Tree a -> MetricIdentifier -> Timestamp -> InstantVector a + evaluate store x t = case Map.lookup x store of + Just inner -> + updateTime $ convert $ Map.foldlWithKey accumulate Map.empty $ + range + rangeStartExc + rangeEndExc + inner + + where + -- | (-) wraps around, so make sure we do not in case `t` is too small + rangeStartExc = max 0 (t - stalenessConstant) + + rangeEndExc = t + 1 + + updateTime :: InstantVector a -> InstantVector a + updateTime = fmap (\i -> Instant i.labels t i.value) + + accumulate :: Map SeriesIdentifier (Timestamp, a) -> Timestamp -> [Point a] -> Map SeriesIdentifier (Timestamp, a) + accumulate closest tAcc = List.foldl' accumulateInt closest where + accumulateInt closest' (Point ls v) = flip (Map.insert ls) closest' $ + case Map.lookup ls closest' of + Just (t', v') | t' > tAcc -> (t', v') + _ -> (tAcc, v) + + convert :: Map SeriesIdentifier (Timestamp, a) -> InstantVector a + convert = map (\(ls, (tConv, v)) -> Instant ls tConv v) . Map.toList + Nothing -> [] + + new :: Tree a + new = Map.empty + + metrics :: Tree a -> Set MetricIdentifier + metrics = Map.keysSet + + count = sum . (sum . Map.map length <$>) + + earliest store key = Map.lookup key store >>= Map.lookupMin <&> fst + + latest store key = Map.lookup key store >>= Map.lookupMax <&> fst + +fromFlat :: Flat a -> Tree a +fromFlat [] = new +fromFlat (Flat.Point x instant : ps) = insert (fromFlat ps) x instant diff --git a/bench/cardano-timeseries-io/src/Cardano/Timeseries/Surface/Expr.hs b/bench/cardano-timeseries-io/src/Cardano/Timeseries/Surface/Expr.hs new file mode 100644 index 00000000000..222923141f2 --- /dev/null +++ b/bench/cardano-timeseries-io/src/Cardano/Timeseries/Surface/Expr.hs @@ -0,0 +1,233 @@ +{-# LANGUAGE FlexibleInstances #-} +{-# OPTIONS_GHC -Wno-orphans #-} +module Cardano.Timeseries.Surface.Expr(Expr(..), mkRange, mkApp, Loc, getLoc) where + +import Cardano.Timeseries.AsText +import Cardano.Timeseries.Domain.Identifier (Identifier) +import Cardano.Timeseries.Domain.Types (Label) +import Cardano.Timeseries.Query.Expr (LabelConstraint) + +import Prelude hiding (Foldable (..)) + +import Data.Foldable (Foldable (..)) +import Data.Set (Set) +import Data.Text (Text, pack) +import Data.Word (Word64) +import Text.Megaparsec (SourcePos, sourcePosPretty) + +-- ---- not ---- +-- not < atom +-- ---- range ---- +-- range < atom +-- ---- app ---- +-- app < atom +-- app < not +-- app < range +-- ---- add ---- +-- add < atom +-- add < not +-- add < range +-- add < app +-- ---- mul ---- +-- mul < atom +-- mul < not +-- mul < range +-- mul < app +-- mul < add +-- ---- comp ---- +-- comp < atom +-- comp < not +-- comp < range +-- comp < app +-- comp < add +-- comp < mul +-- ---- and ---- +-- and < atom +-- and < not +-- and < range +-- and < app +-- and < add +-- and < mul +-- and < comp +-- ---- or ---- +-- or < atom +-- or < not +-- or < range +-- or < app +-- or < add +-- or < mul +-- or < comp +-- or < and +-- ---- universe ---- +-- universe < atom +-- universe < not +-- universe < range +-- universe < app +-- universe < add +-- universe < mul +-- universe < comp +-- universe < and +-- universe < or + +-- s ::= +-- lc ::= s = s | s != s // label constraint +-- l̅c̅ ::= {lc, ..., .lc} +-- t{atom} ::= (t{≥ universe}, t{≥ universe}) +-- | x +-- | epoch +-- | now +-- | true +-- | false +-- | ms +-- | s +-- | m +-- | h +-- | (t{≥ universe}) +-- | +-- | "" +-- t{not} ::= !t{> not} +-- t{range} ::= t{> range} l̅c̅ +-- | t{> range} [̅t̅{̅≥̅ ̅u̅n̅i̅v̅e̅r̅s̅e̅}̅;̅ ̅t̅{̅≥̅ ̅u̅n̅i̅v̅e̅r̅s̅e̅}̅]̅ ̅|̅ ̅t̅[̅t̅{̅≥̅ ̅u̅n̅i̅v̅e̅r̅s̅e̅}̅;̅ ̅t̅{̅≥̅ ̅u̅n̅i̅v̅e̅r̅s̅e̅}̅ ̅:̅ ̅t̅{̅≥̅ ̅u̅n̅i̅v̅e̅r̅s̅e̅}̅]̅ +-- t{app} ::= fst t{> app} | snd t{> app} +-- | min t{> app} | max t{> app} | avg t{> app} | filter t{> app} t{> app} +-- | join t{> app} t{> app} +-- | map t{> app} t{> app} +-- | round t{> app} +-- | abs t{> app} +-- | increase t{> app} +-- | rate t{> app} +-- | avg_over_time t{> app} +-- | sum_over_time t{> app} +-- | quantile_over_time t{> app} t{> app} +-- | unless t{> app} t{> app} +-- | quantile_by (s, ..., s) t{> app} t{> app} +-- | earliest x +-- | latest x +-- | to_scalar t{> app} +-- | t{> app} t̅{̅>̅ ̅a̅p̅p̅}̅ +-- t{mul} ::= t{> mul} *̅|̅/̅ ̅t̅{̅>̅ ̅m̅u̅l̅}̅ +-- t{add} ::= t{> add} +̅|̅-̅ ̅t̅{̅>̅ ̅a̅d̅d̅}̅ +-- t{comp} ::= t{> comp} == t{> comp} | t{> comp} != t{> comp} | t{> comp} < t{> comp} | t{> comp} <= t{> comp} +-- | t{> comp} > t{> comp} | t{> comp} >= t{> comp} +-- t{and} ::= t{> and} &̅&̅ ̅t̅{̅>̅ ̅a̅n̅d̅}̅ +-- t{or} ::= t{> or} |̅|̅ ̅t̅{̅>̅ ̅o̅r̅}̅ +-- t{universe} ::= let x = t{> universe} in t{≥ universe} | \x -> t{≥ universe} + +-- | Source location. +type Loc = SourcePos + +instance AsText Loc where + asText = pack . sourcePosPretty + +data Expr = + Let Loc Identifier Expr Expr + | Lambda Loc Identifier Expr + | Fst Loc Expr + | Snd Loc Expr + | MkPair Loc Expr Expr + | Eq Loc Expr Expr + | NotEq Loc Expr Expr + | Lt Loc Expr Expr + | Lte Loc Expr Expr + | Gt Loc Expr Expr + | Gte Loc Expr Expr + | Add Loc Expr Expr + | Sub Loc Expr Expr + | Mul Loc Expr Expr + | Div Loc Expr Expr + | Not Loc Expr + | Or Loc Expr Expr + | And Loc Expr Expr + | Milliseconds Loc Word64 + | Seconds Loc Word64 + | Minutes Loc Word64 + | Hours Loc Word64 + | Epoch Loc + | Now Loc + | Range Loc Expr Expr Expr (Maybe Expr) + | FilterByLabel Loc (Set LabelConstraint) Expr + | Max Loc Expr + | Min Loc Expr + | Avg Loc Expr + | Filter Loc Expr Expr + | Join Loc Expr Expr + | Map Loc Expr Expr + | Round Loc Expr + | Abs Loc Expr + | Increase Loc Expr + | Rate Loc Expr + | AvgOverTime Loc Expr + | SumOverTime Loc Expr + | QuantileOverTime Loc Expr Expr + | Unless Loc Expr Expr + | QuantileBy Loc (Set Label) Expr Expr + | Earliest Loc Identifier + | Latest Loc Identifier + | Metrics Loc + | ToScalar Loc Expr + | Variable Loc Identifier + | Str Loc Text + | Number Loc Double + | Truth Loc + | Falsity Loc + | App Loc Expr Expr deriving (Show) + +getLoc :: Expr -> Loc +getLoc (Let l _ _ _) = l +getLoc (Lambda l _ _) = l +getLoc (Fst l _) = l +getLoc (Snd l _) = l +getLoc (MkPair l _ _) = l +getLoc (Eq l _ _) = l +getLoc (NotEq l _ _) = l +getLoc (Lt l _ _) = l +getLoc (Lte l _ _) = l +getLoc (Gt l _ _) = l +getLoc (Gte l _ _) = l +getLoc (Add l _ _) = l +getLoc (Sub l _ _) = l +getLoc (Mul l _ _) = l +getLoc (Div l _ _) = l +getLoc (Not l _) = l +getLoc (Or l _ _) = l +getLoc (And l _ _) = l +getLoc (Milliseconds l _) = l +getLoc (Seconds l _) = l +getLoc (Minutes l _) = l +getLoc (Hours l _) = l +getLoc (Epoch l) = l +getLoc (Now l) = l +getLoc (Range l _ _ _ _) = l +getLoc (FilterByLabel l _ _) = l +getLoc (Max l _) = l +getLoc (Min l _) = l +getLoc (Avg l _) = l +getLoc (Filter l _ _) = l +getLoc (Join l _ _) = l +getLoc (Map l _ _) = l +getLoc (Abs l _) = l +getLoc (Round l _) = l +getLoc (Increase l _) = l +getLoc (Rate l _) = l +getLoc (AvgOverTime l _) = l +getLoc (SumOverTime l _) = l +getLoc (QuantileOverTime l _ _) = l +getLoc (Unless l _ _) = l +getLoc (QuantileBy l _ _ _) = l +getLoc (Earliest l _) = l +getLoc (Latest l _) = l +getLoc (Metrics l) = l +getLoc (ToScalar l _) = l +getLoc (Variable l _) = l +getLoc (Str l _) = l +getLoc (Number l _) = l +getLoc (Truth l) = l +getLoc (Falsity l) = l +getLoc (App l _ _) = l + +mkRange :: Loc -> Expr -> Either (Expr, Expr, Maybe Expr) (Set LabelConstraint) -> Expr +mkRange loc t (Left (a, b, c)) = Range loc t a b c +mkRange loc t (Right set) = FilterByLabel loc set t + +mkApp :: Loc -> Expr -> [Expr] -> Expr +mkApp l = foldl' (App l) diff --git a/bench/cardano-timeseries-io/src/Cardano/Timeseries/Surface/Expr/Head.hs b/bench/cardano-timeseries-io/src/Cardano/Timeseries/Surface/Expr/Head.hs new file mode 100644 index 00000000000..2896468521f --- /dev/null +++ b/bench/cardano-timeseries-io/src/Cardano/Timeseries/Surface/Expr/Head.hs @@ -0,0 +1,27 @@ +module Cardano.Timeseries.Surface.Expr.Head(Head(..)) where +import Cardano.Timeseries.Domain.Identifier (Identifier) +import Cardano.Timeseries.Domain.Types (Label) + +import Data.Set (Set) + +data Head = Fst + | Snd + | Min + | Max + | Avg + | Filter + | Join + | Map + | Round + | Abs + | Increase + | Rate + | AvgOverTime + | SumOverTime + | QuantileOverTime + | Unless + | QuantileBy (Set Label) + | Earliest Identifier + | Latest Identifier + | ToScalar + | Metrics deriving (Show) diff --git a/bench/cardano-timeseries-io/src/Cardano/Timeseries/Surface/Expr/Parser.hs b/bench/cardano-timeseries-io/src/Cardano/Timeseries/Surface/Expr/Parser.hs new file mode 100644 index 00000000000..cefdf0b0dd3 --- /dev/null +++ b/bench/cardano-timeseries-io/src/Cardano/Timeseries/Surface/Expr/Parser.hs @@ -0,0 +1,376 @@ +{- HLINT ignore "Use <$>" -} +module Cardano.Timeseries.Surface.Expr.Parser(Parser, expr) where + +import Cardano.Timeseries.Domain.Identifier (Identifier (User)) +import Cardano.Timeseries.Domain.Types (Label) +import Cardano.Timeseries.Query.Expr (LabelConstraint (..)) +import Cardano.Timeseries.Surface.Expr +import Cardano.Timeseries.Surface.Expr.Head (Head) +import qualified Cardano.Timeseries.Surface.Expr.Head as Head + +import Prelude hiding (Foldable (..), head) + +import Control.Applicative hiding (many, some) +import Control.Monad (guard) +import Data.Char (isAlpha, isAlphaNum) +import Data.Foldable (Foldable (..)) +import Data.Functor (void) +import Data.Scientific (toRealFloat) +import Data.Set (Set) +import qualified Data.Set as Set +import Data.Text (Text) +import qualified Data.Text as Text +import Data.Void (Void) +import GHC.Unicode (isControl) +import Text.Megaparsec +import Text.Megaparsec.Char (char, space, space1, string) +import Text.Megaparsec.Char.Lexer (decimal, scientific, signed) + +type Parser = Parsec Void Text + +keywords :: [Text] +keywords = ["let", "in", "true", "false", "epoch", "now", "fst", "snd", + "min", "max", "avg", "filter", "join", "map", "abs", "increase", + "rate", "avg_over_time", "sum_over_time", "quantile_over_time", "unless", + "quantile_by", "earliest", "latest", "to_scalar", "metrics"] + +unescapedVariableIdentifierNextChar :: Parser Char +unescapedVariableIdentifierNextChar = satisfy (\x -> isAlphaNum x || x == '_') + +unescapedVariableIdentifier :: Parser Text +unescapedVariableIdentifier = + Text.pack <$> ((:) <$> firstChar <*> many unescapedVariableIdentifierNextChar) "identifier" where + firstChar :: Parser Char + firstChar = satisfy (\x -> isAlpha x || x == '_') + +number :: Parser Expr +number = + Number <$> getSourcePos <*> (toRealFloat <$> signed (pure ()) scientific "number") + +escapedVariableIdentifier :: Parser Text +escapedVariableIdentifier = Text.pack <$> (char '`' *> manyTill one (char '`')) where + one :: Parser Char + one = satisfy (\x -> not (isControl x) && (x /= '`') && (x /= '\n') && (x /= '\r')) + +literalVariableIdentifier :: Parser Text +literalVariableIdentifier = do + x <- unescapedVariableIdentifier + guard (x `notElem` keywords) + pure x + +variableIdentifier :: Parser Identifier +variableIdentifier = User <$> (literalVariableIdentifier <|> escapedVariableIdentifier) + +variable :: Parser Expr +variable = Variable <$> getSourcePos <*> variableIdentifier + +text :: Parser Text +text = Text.pack <$> (char '\"' *> many one) <* char '\"' where + one :: Parser Char + one = satisfy (\x -> not (isControl x) && (x /= '"') && (x /= '\n') && (x /= '\r')) + +str :: Parser Expr +str = Str <$> getSourcePos <*> text + +milliseconds :: Parser Expr +milliseconds = Milliseconds <$> getSourcePos <*> decimal <* string "ms" <* notFollowedBy (satisfy isAlpha) + +seconds :: Parser Expr +seconds = Seconds <$> getSourcePos <*> decimal <* string "s" <* notFollowedBy (satisfy isAlpha) + +minutes :: Parser Expr +minutes = Minutes <$> getSourcePos <*> decimal <* string "m" <* notFollowedBy (satisfy isAlpha) + +hours :: Parser Expr +hours = Hours <$> getSourcePos <*> decimal <* string "h" <* notFollowedBy (satisfy isAlpha) + +now :: Parser Expr +now = Now <$> getSourcePos <* string "now" + +epoch :: Parser Expr +epoch = Now <$> getSourcePos <* string "epoch" + +true :: Parser Expr +true = Truth <$> getSourcePos <* string "true" + +false :: Parser Expr +false = Falsity <$> getSourcePos <* string "false" + +continueTight :: Expr -> Parser Expr +continueTight a = a <$ string ")" + +continuePair :: Loc -> Expr -> Parser Expr +continuePair l a = do + void $ string "," + space + b <- exprUniverse + space + void $ string ")" + pure (MkPair l a b) + +tightOrPair :: Parser Expr +tightOrPair = do + l <- getSourcePos + void $ string "(" + space + a <- exprUniverse + space + try (continuePair l a) <|> continueTight a + +exprAtom :: Parser Expr +exprAtom = tightOrPair + <|> epoch + <|> true + <|> false + <|> now + <|> variable + <|> try milliseconds + <|> try seconds + <|> try minutes + <|> try hours + <|> number + <|> str + +exprNot :: Parser Expr +exprNot = Not <$> getSourcePos <*> (string "!" *> space *> exprAtom) + +range :: Parser (Expr, Expr, Maybe Expr) +range = do + void $ string "[" + space + a <- exprUniverse + space + void $ string ";" + space + b <- exprUniverse + c <- optional $ do + space + void $ string ":" + space + c <- exprUniverse + space + pure c + void $ string "]" + pure (a, b, c) + +exprRange :: Parser Expr +exprRange = do + l <- getSourcePos + head <- exprAtom + ext <- many (try $ space *> (try (Left <$> range) <|> Right <$> labelConstraints)) + pure $ foldl' (mkRange l) head ext + +labelConstraint :: Parser LabelConstraint +labelConstraint = do + x <- text + space + c <- Left () <$ string "=" <|> Right () <$ string "!=" + space + v <- text + pure (mk x c v) where + mk x (Left _) v = LabelConstraintEq (x, v) + mk x (Right _) v = LabelConstraintNotEq (x, v) + +labelConstraints :: Parser (Set LabelConstraint) +labelConstraints = do + void $ string "{" + space + list <- sepBy labelConstraint (space *> string "," <* space) + space + void $ string "}" + pure $ Set.fromList list + +setLabel :: Parser (Set Label) +setLabel = do + void $ string "(" + space + list <- sepBy text (space *> string "," <* space) + space + void $ string ")" + pure $ Set.fromList list + +headParse :: Parser Head +headParse = ( + Head.Fst <$ string "fst" + <|> Head.Snd <$ string "snd" + <|> Head.Min <$ string "min" + <|> Head.Max <$ string "max" + <|> Head.Filter <$ string "filter" + <|> Head.Join <$ string "join" + <|> Head.Map <$ string "map" + <|> Head.Abs <$ string "abs" + <|> Head.Round <$ string "round" + <|> Head.Increase <$ string "increase" + <|> Head.Rate <$ string "rate" + <|> Head.AvgOverTime <$ string "avg_over_time" + <|> Head.Avg <$ string "avg" + <|> Head.SumOverTime <$ string "sum_over_time" + <|> Head.QuantileOverTime <$ string "quantile_over_time" + <|> Head.Unless <$ string "unless" + <|> Head.QuantileBy <$> (string "quantile_by" *> space1 *> setLabel) + <|> Head.Earliest <$> (string "earliest" *> space1 *> variableIdentifier) + <|> Head.Latest <$> (string "latest" *> space1 *> variableIdentifier) + <|> Head.ToScalar <$ string "to_scalar" + <|> Head.Metrics <$ string "metrics" + ) <* notFollowedBy unescapedVariableIdentifierNextChar + +wrongNumberOfArguments :: Int -> String -> Parser a +wrongNumberOfArguments n head = fail $ "Wrong number of arguments (" <> show n <> ") for `" <> head <> "`" + +applyBuiltin :: Loc -> Head -> [Expr] -> Parser Expr +applyBuiltin l Head.Fst [t] = pure $ Fst l t +applyBuiltin l Head.Snd [t] = pure $ Snd l t +applyBuiltin l Head.Min [t] = pure $ Min l t +applyBuiltin l Head.Max [t] = pure $ Max l t +applyBuiltin l Head.Avg [t] = pure $ Avg l t +applyBuiltin l Head.Filter [f, xs] = pure $ Filter l f xs +applyBuiltin l Head.Join [xs, ys] = pure $ Join l xs ys +applyBuiltin l Head.Map [f, xs] = pure $ Map l f xs +applyBuiltin l Head.Abs [t] = pure $ Abs l t +applyBuiltin l Head.Round [t] = pure $ Round l t +applyBuiltin l Head.Increase [xs] = pure $ Increase l xs +applyBuiltin l Head.Rate [xs] = pure $ Rate l xs +applyBuiltin l Head.AvgOverTime [xs] = pure $ AvgOverTime l xs +applyBuiltin l Head.SumOverTime [xs] = pure $ SumOverTime l xs +applyBuiltin l Head.QuantileOverTime [k, xs] = pure $ QuantileOverTime l k xs +applyBuiltin l Head.Unless [xs, ys] = pure $ Unless l xs ys +applyBuiltin l (Head.QuantileBy ls) [k, xs] = pure $ QuantileBy l ls k xs +applyBuiltin l (Head.Earliest x) [] = pure $ Earliest l x +applyBuiltin l (Head.Latest x) [] = pure $ Latest l x +applyBuiltin l Head.ToScalar [t] = pure $ ToScalar l t +applyBuiltin l Head.Metrics [] = pure $ Metrics l +applyBuiltin _ h args = wrongNumberOfArguments (length args) (show h) + +apply :: Loc -> Either Head Expr -> [Expr] -> Parser Expr +apply l (Left t) = applyBuiltin l t +apply l (Right t) = pure . mkApp l t + +exprAppArg :: Parser Expr +exprAppArg = try exprNot <|> exprRange + +exprApp :: Parser Expr +exprApp = do + l <- getSourcePos + h <- try (Left <$> headParse) <|> Right <$> exprAppArg + args <- many (try (space1 *> exprAppArg)) + apply l h args + +data Mul = Asterisk | Slash + +mul :: Parser Mul +mul = Asterisk <$ try (string "*") <|> Slash <$ string "/" + +applyMul :: Loc -> Expr -> (Mul, Expr) -> Expr +applyMul l x (Asterisk, y) = Mul l x y +applyMul l x (Slash, y) = Div l x y + +applyListMul :: Loc -> Expr -> [(Mul, Expr)] -> Expr +applyListMul l = foldl' (applyMul l) + +exprMul :: Parser Expr +exprMul = do + l <- getSourcePos + h <- exprApp + args <- many ((,) <$> try (space *> mul) <*> (space *> exprApp)) + pure $ applyListMul l h args + +data Add = Plus | Minus deriving (Show) + +add :: Parser Add +add = Plus <$ try (string "+") <|> Minus <$ string "-" + +applyAdd :: Loc -> Expr -> (Add, Expr) -> Expr +applyAdd l x (Plus, y) = Add l x y +applyAdd l x (Minus, y) = Sub l x y + +applyListAdd :: Loc -> Expr -> [(Add, Expr)] -> Expr +applyListAdd l = foldl' (applyAdd l) + +exprAdd :: Parser Expr +exprAdd = do + l <- getSourcePos + h <- exprMul + args <- many ((,) <$> try (space *> add) <*> (space *> exprMul)) + pure $ applyListAdd l h args + +data Comp = EqSign | NotEqSign | LtSign | LteSign | GtSign | GteSign + +comp :: Parser Comp +comp = EqSign <$ string "==" + <|> NotEqSign <$ string "!=" + <|> LteSign <$ string "<=" + <|> GteSign <$ string ">=" + <|> LtSign <$ string "<" + <|> GtSign <$ string ">" + +applyComp :: Loc -> Expr -> (Comp, Expr) -> Expr +applyComp l a (EqSign, b) = Eq l a b +applyComp l a (NotEqSign, b) = NotEq l a b +applyComp l a (LtSign, b) = Lt l a b +applyComp l a (LteSign, b) = Lte l a b +applyComp l a (GtSign, b) = Gt l a b +applyComp l a (GteSign, b) = Gte l a b + +applyListComp :: Loc -> Expr -> [(Comp, Expr)] -> Expr +applyListComp l = foldl' (applyComp l) + +exprComp :: Parser Expr +exprComp = do + l <- getSourcePos + h <- exprAdd + args <- many ((,) <$> try (space *> comp) <*> (space *> exprAdd)) + pure $ applyListComp l h args + +applyAnd :: Loc -> Expr -> [Expr] -> Expr +applyAnd l = foldl' (And l) + +exprAnd :: Parser Expr +exprAnd = do + l <- getSourcePos + h <- exprComp + args <- many (try (space *> string "&&") *> space *> exprComp) + pure $ applyAnd l h args + +applyOr :: Loc -> Expr -> [Expr] -> Expr +applyOr l = foldl' (Or l) + +exprOr :: Parser Expr +exprOr = do + l <- getSourcePos + h <- exprAnd + args <- many (try (space *> string "||") *> space *> exprAnd) + pure $ applyOr l h args + +exprLet :: Parser Expr +exprLet = do + l <- getSourcePos + void $ string "let" + space1 + x <- variableIdentifier + space + void $ string "=" + space + rhs <- exprOr + space + void $ string "in" + space1 + body <- exprUniverse + pure $ Let l x rhs body + +exprLambda :: Parser Expr +exprLambda = do + l <- getSourcePos + void $ string "\\" + space + x <- variableIdentifier + space + void $ string "->" + space + body <- exprUniverse + pure $ Lambda l x body + +exprUniverse :: Parser Expr +exprUniverse = try exprLet <|> (try exprLambda <|> exprOr) + +expr :: Parser Expr +expr = exprUniverse diff --git a/bench/cardano-timeseries-io/src/Cardano/Timeseries/Typing.hs b/bench/cardano-timeseries-io/src/Cardano/Timeseries/Typing.hs new file mode 100644 index 00000000000..96ce23e0907 --- /dev/null +++ b/bench/cardano-timeseries-io/src/Cardano/Timeseries/Typing.hs @@ -0,0 +1,108 @@ +{-# LANGUAGE FlexibleInstances #-} + +module Cardano.Timeseries.Typing( + Ty(..), + Binding(..), + Context, + identifier, + Def(..), + Defs, + instantiateTy, + instantiateExpr, + prettyTy, + TyPrec(..), + ty + ) where + +import Cardano.Timeseries.AsText +import Cardano.Timeseries.Domain.Identifier (Identifier) +import Cardano.Timeseries.Query.Expr (HoleIdentifier) +import qualified Cardano.Timeseries.Query.Expr as Semantic + +import Data.Foldable as Foldable (toList) +import Data.Map.Strict (Map) +import qualified Data.Map.Strict as Map +import Data.Sequence as Seq +import Data.Text (Text) +import qualified Data.Text as Text + +-- | Typing of a query expression. +data Ty = InstantVector Ty + | RangeVector Ty + | Scalar + | Text + | Bool + | Pair Ty Ty + | Timestamp + | Duration + | Fun Ty Ty + | Hole HoleIdentifier deriving (Show, Eq) + +data TyPrec = Loose | FunCodomain | FunDomain | Tight deriving (Show, Eq, Ord) + +conditionalParens :: Bool -> Text -> Text +conditionalParens True t = "(" <> t <> ")" +conditionalParens False t = t + +prettyTy :: TyPrec -> Ty -> Text +prettyTy prec (InstantVector typ) = conditionalParens (prec == Tight) $ + "InstantVector " <> prettyTy Tight typ +prettyTy prec (RangeVector typ) = conditionalParens (prec == Tight) $ + "RangeVector " <> prettyTy Tight typ +prettyTy _prec (Pair typ typ') = + "(" <> prettyTy Loose typ <> ", " <> prettyTy Loose typ' <> ")" +prettyTy prec (Fun typ typ') = conditionalParens (prec > FunCodomain) $ + prettyTy FunDomain typ <> " -> " <> prettyTy FunCodomain typ' +prettyTy _ Scalar = "Scalar" +prettyTy _ Bool = "Bool" +prettyTy _ Timestamp = "Timestamp" +prettyTy _ Duration = "Duration" +prettyTy _ Text = "Text" +prettyTy _ (Hole idx) = "?" <> showT idx + +-- | A context entry of a typing context. +data Binding = LetBinding Identifier Semantic.Expr Ty + | LambdaBinding Identifier Ty deriving (Show) + +instance AsText Binding where + asText (LetBinding x _rhs typ) = "(" <> asText x <> " ≔ " <> "..." <> " : " <> prettyTy Loose typ <> ")" + asText (LambdaBinding x typ) = "(" <> asText x <> " : " <> prettyTy Loose typ <> ")" + +identifier :: Binding -> Identifier +identifier (LetBinding x _ _) = x +identifier (LambdaBinding x _) = x + +ty :: Binding -> Ty +ty (LetBinding _ _ typ) = typ +ty (LambdaBinding _ typ) = typ + +-- | Γ +-- A typing context of a query expression. +type Context = Seq Binding + +instance AsText Context where + asText Empty = "()" + asText ctx = Text.unwords $ map asText $ Foldable.toList ctx + +-- | (? type) | (? ≔ T type) | (? : T) | (? ≔ t : T) +-- Definition of a type- or expression- level hole. +data Def = TyHoleDecl | TyHoleInst Ty | ExprHoleDecl Ty | ExprHoleInst Semantic.Expr Ty + +-- | Σ +-- A collection of hole definitions `Def` indexed by `HoleIdentifier`. +type Defs = Map HoleIdentifier Def + +-- | Assumes that the given `Defs` contains a `TyHoleDecl` of the given `HoleIdentifier`. +instantiateTy :: HoleIdentifier -> Ty -> Defs -> Defs +instantiateTy x rhs defs = + case Map.lookup x defs of + Just TyHoleDecl -> Map.insert x (TyHoleInst rhs) defs + _ -> error $ "[INTERNAL ERROR] Incorrect or missing Def for type hole identifier: " <> show x + +-- | Assumes that the given `Defs` contains a `ExprHoleDecl` of the given `HoleIdentifier`. +-- Types of the hole and the provided `Semantic.Expr` must be compatible. +instantiateExpr :: HoleIdentifier -> Semantic.Expr -> Defs -> Defs +instantiateExpr x rhs defs = + case Map.lookup x defs of + Just (ExprHoleDecl typ) -> Map.insert x (ExprHoleInst rhs typ) defs + _ -> error $ "[INTERNAL ERROR] Incorrect or missing Def for expr hole identifier: " <> show x diff --git a/bench/cardano-timeseries-io/src/Cardano/Timeseries/Unify.hs b/bench/cardano-timeseries-io/src/Cardano/Timeseries/Unify.hs new file mode 100644 index 00000000000..6c1be5eb1f1 --- /dev/null +++ b/bench/cardano-timeseries-io/src/Cardano/Timeseries/Unify.hs @@ -0,0 +1,93 @@ +{- HLINT ignore "Use newtype instead of data" -} +module Cardano.Timeseries.Unify(UnificationProblem(..), St(..), UnifyM, unify, solve) where +import Cardano.Timeseries.Query.Expr (HoleIdentifier) +import Cardano.Timeseries.Resolve +import Cardano.Timeseries.Surface.Expr (Loc) +import Cardano.Timeseries.Typing (Defs, Ty (..), TyPrec (Loose), instantiateTy, prettyTy) + +import Control.Monad.Except (ExceptT, throwError) +import Control.Monad.State.Strict (State, get, state) +import Data.Text (Text, pack) +import Text.Megaparsec (sourcePosPretty) + +-- | A = B type +-- An equation between two query types containing holes. +-- Unification is an algorithm of finding unique solutions to such equations. +data UnificationProblem = UnificationProblem { + -- | Source location that induces the unification problem. + loc :: Loc, + lhs :: Ty, + rhs :: Ty +} + +data St = St { + defs :: Defs +} + +type UnifyError = Text + +type UnifyM a = ExceptT UnifyError (State St) a + +updateDefs :: Defs -> St -> St +updateDefs defs (St _) = St defs + +unifyHole :: HoleIdentifier -> Ty -> UnifyM () +unifyHole x rhs = do + occurs x rhs >>= \case + True -> error $ "[INTERNAL ERROR] Occurs check failed for " <> show x <> " and " <> show rhs + False -> state $ \st -> + ((), updateDefs (instantiateTy x rhs (defs st)) st) + +-- | Assume the types are head-neutral (i.e. resolved holes have been substituted in if the hole is the outer expression) +unifyNu :: Loc -> Ty -> Ty -> UnifyM [UnificationProblem] +unifyNu loc (Fun a b) (Fun a' b') = pure [UnificationProblem loc a a', UnificationProblem loc b b'] +unifyNu loc (InstantVector a) (InstantVector a') = pure [UnificationProblem loc a a'] +unifyNu loc (RangeVector a) (RangeVector a') = pure [UnificationProblem loc a a'] +unifyNu _ Scalar Scalar = pure [] +unifyNu _ Bool Bool = pure [] +unifyNu _ Text Text = pure [] +unifyNu _ Timestamp Timestamp = pure [] +unifyNu _ Duration Duration = pure [] +unifyNu loc (Pair a b) (Pair a' b') = pure [UnificationProblem loc a a', UnificationProblem loc b b'] +unifyNu _ (Hole x) (Hole y) | x == y = pure [] +unifyNu _ (Hole x) ty = [] <$ unifyHole x ty +unifyNu _ ty (Hole y) = [] <$ unifyHole y ty +unifyNu loc lhs rhs = + throwError $ "Can't solve unification constraint: " + <> prettyTy Loose lhs + <> " = " + <> prettyTy Loose rhs + <> " @ " + <> pack (sourcePosPretty loc) + +-- | Check if the given hole identifier occurs in the given type. +occurs :: HoleIdentifier -> Ty -> UnifyM Bool +occurs x ty = do + ds <- defs <$> get + occursNu x (resolveTy ds ty) + +-- | `Ty` is assumed to be normal w.r.t. hole substitution. +occursNu :: HoleIdentifier -> Ty -> UnifyM Bool +occursNu x (InstantVector ty) = occursNu x ty +occursNu x (RangeVector ty) = occursNu x ty +occursNu x (Fun ty ty') = (||) <$> occursNu x ty <*> occursNu x ty' +occursNu x (Pair ty ty') = (||) <$> occursNu x ty <*> occursNu x ty' +occursNu _ Scalar = pure False +occursNu _ Bool = pure False +occursNu _ Timestamp = pure False +occursNu _ Duration = pure False +occursNu _ Text = pure False +occursNu x (Hole x') = pure (x == x') + +unify :: Loc -> Ty -> Ty -> UnifyM [UnificationProblem] +unify loc lhs rhs = do + ds <- defs <$> get + unifyNu loc (resolveTy ds lhs) (resolveTy ds rhs) + +-- | Solve the list of unification problems, instantiating holes in the process. +-- If a problem doesn't have a (unique) solution, throw an error. +solve :: [UnificationProblem] -> UnifyM () +solve [] = pure () +solve (UnificationProblem loc lhs rhs : rest) = do + new <- unify loc lhs rhs + solve (new ++ rest) diff --git a/bench/cardano-timeseries-io/src/Cardano/Timeseries/Util.hs b/bench/cardano-timeseries-io/src/Cardano/Timeseries/Util.hs new file mode 100644 index 00000000000..9cad82f8eca --- /dev/null +++ b/bench/cardano-timeseries-io/src/Cardano/Timeseries/Util.hs @@ -0,0 +1,23 @@ +module Cardano.Timeseries.Util(toMaybe, maybeToEither, range) where + +import Prelude hiding (head) + +import Data.Map.Strict (Map) +import qualified Data.Map.Strict as Map + + +toMaybe :: Bool -> a -> Maybe a +toMaybe False _ = Nothing +toMaybe True x = Just x + +maybeToEither :: e -> Maybe a -> Either e a +maybeToEither _ (Just x) = Right x +maybeToEither err Nothing = Left err + +-- | Return submap containing only keys in (lo, hi). +-- | Complexity: O(log(n)). +range :: Ord k => k -> k -> Map k v -> Map k v +range lo hi m = + let (_, m1) = Map.split lo m -- drop all =< lo + (m2, _) = Map.split hi m1 -- drop all >=) hi + in m2 diff --git a/cabal.project b/cabal.project index b96b27fbc3c..c2418ba18c5 100644 --- a/cabal.project +++ b/cabal.project @@ -32,6 +32,7 @@ packages: bench/plutus-scripts-bench bench/tx-generator bench/cardano-recon-framework + bench/cardano-timeseries-io trace-dispatcher trace-resources trace-forward @@ -77,3 +78,10 @@ if impl (ghc >= 9.12) -- Do NOT add more source-repository-package stanzas here unless they are strictly -- temporary! Please read the section in CONTRIBUTING about updating dependencies. +source-repository-package + type: git + location: https://github.com/input-output-hk/ekg-forward.git + tag: c72c9a29045431df7484b665bed33c12ea71d0ac + --sha256: sha256-b87qt8RMI4gNPF8QTrRjfS5KK2/JhbxUp5ijscn2Vf8= + subdir: + . diff --git a/cardano-tracer/cardano-tracer.cabal b/cardano-tracer/cardano-tracer.cabal index 33f5cd51571..405b9f5223e 100644 --- a/cardano-tracer/cardano-tracer.cabal +++ b/cardano-tracer/cardano-tracer.cabal @@ -123,6 +123,7 @@ library Cardano.Tracer.Handlers.Metrics.Monitoring Cardano.Tracer.Handlers.Metrics.Prometheus Cardano.Tracer.Handlers.Metrics.Servers + Cardano.Tracer.Handlers.Metrics.TimeseriesServer Cardano.Tracer.Handlers.Metrics.Utils Cardano.Tracer.Handlers.Notifications.Check @@ -145,8 +146,14 @@ library Cardano.Tracer.Types Cardano.Tracer.Utils + Cardano.Tracer.Timeseries + Cardano.Tracer.Timeseries.Trace + Cardano.Tracer.Timeseries.Types + other-modules: Cardano.Tracer.Handlers.Logs.Journal.NoSystemd Cardano.Tracer.Handlers.Notifications.Timer + Cardano.Tracer.Time + Paths_cardano_tracer autogen-modules: Paths_cardano_tracer @@ -202,6 +209,7 @@ library , warp ^>= 3.4 , warp-tls , yaml + , cardano-timeseries-io if os(windows) build-depends: Win32 diff --git a/cardano-tracer/src/Cardano/Tracer/Acceptors/Client.hs b/cardano-tracer/src/Cardano/Tracer/Acceptors/Client.hs index 3211e877808..d40ad44cad6 100644 --- a/cardano-tracer/src/Cardano/Tracer/Acceptors/Client.hs +++ b/cardano-tracer/src/Cardano/Tracer/Acceptors/Client.hs @@ -1,4 +1,6 @@ {-# LANGUAGE DataKinds #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} module Cardano.Tracer.Acceptors.Client ( runAcceptorsClient @@ -41,7 +43,7 @@ import Data.Word (Word32) import qualified Network.Mux as Mux import qualified Network.Socket as Socket import qualified System.Metrics.Configuration as EKGF -import System.Metrics.Network.Acceptor (acceptEKGMetricsInit) +import System.Metrics.Network.Acceptor (acceptMetricsInit) import qualified Trace.Forward.Configuration.DataPoint as DPF import qualified Trace.Forward.Configuration.TraceObject as TF @@ -192,10 +194,11 @@ runEKGAcceptorInit respoinderCtx LBS.ByteString IO () Void runEKGAcceptorInit tracerEnv ekgConfig errorHandler = - acceptEKGMetricsInit + acceptMetricsInit ekgConfig (prepareMetricsStores tracerEnv . micConnectionId) - (errorHandler . micConnectionId) + (store tracerEnv . connIdToNodeId . micConnectionId) + (errorHandler . micConnectionId) where runTraceObjectsAcceptorInit :: Show addr diff --git a/cardano-tracer/src/Cardano/Tracer/Acceptors/Server.hs b/cardano-tracer/src/Cardano/Tracer/Acceptors/Server.hs index 37c0470c7e2..a05d900fd5b 100644 --- a/cardano-tracer/src/Cardano/Tracer/Acceptors/Server.hs +++ b/cardano-tracer/src/Cardano/Tracer/Acceptors/Server.hs @@ -5,8 +5,6 @@ module Cardano.Tracer.Acceptors.Server ( runAcceptorsServer ) where -import "contra-tracer" Control.Tracer (nullTracer) - import Cardano.Logging (TraceObject) import qualified Cardano.Logging.Types as Net import Cardano.Tracer.Acceptors.Utils @@ -25,25 +23,25 @@ import Ouroboros.Network.Mux (MiniProtocol (..), MiniProtocolLimits (. miniProtocolNum, miniProtocolRun) import Ouroboros.Network.Protocol.Handshake (Handshake, HandshakeArguments (..)) import qualified Ouroboros.Network.Protocol.Handshake as Handshake +import qualified Ouroboros.Network.Server.Simple as Server import Ouroboros.Network.Snocket (LocalAddress, LocalSocket, Snocket, localAddressFromPath, localSnocket, makeLocalBearer, makeSocketBearer, socketSnocket) -import Ouroboros.Network.Socket (ConnectionId (..), - SomeResponderApplication (..)) -import qualified Ouroboros.Network.Server.Simple as Server +import Ouroboros.Network.Socket (ConnectionId (..), SomeResponderApplication (..)) import Codec.CBOR.Term (Term) import Control.Concurrent.Async (wait) +import "contra-tracer" Control.Tracer (nullTracer) import qualified Data.ByteString.Lazy as LBS +import Data.Functor (void) import Data.List.NonEmpty (NonEmpty ((:|))) import qualified Data.Text as Text -import Data.Functor (void) import Data.Void (Void) import Data.Word (Word32) import qualified Network.Mux as Mux import qualified Network.Socket as Socket import qualified System.Metrics.Configuration as EKGF -import System.Metrics.Network.Acceptor (acceptEKGMetricsResp) +import System.Metrics.Network.Acceptor (acceptMetricsResp) import qualified Trace.Forward.Configuration.DataPoint as DPF import qualified Trace.Forward.Configuration.TraceObject as TF @@ -180,9 +178,10 @@ runEKGAcceptor -> (ConnectionId addr -> IO ()) -> RunMiniProtocol 'Mux.ResponderMode initiatorCtx (ResponderContext addr) LBS.ByteString IO Void () runEKGAcceptor tracerEnv ekgConfig errorHandler = - acceptEKGMetricsResp + acceptMetricsResp ekgConfig (prepareMetricsStores tracerEnv . rcConnectionId) + (store tracerEnv . connIdToNodeId . rcConnectionId) (errorHandler . rcConnectionId) runTraceObjectsAcceptor diff --git a/cardano-tracer/src/Cardano/Tracer/Acceptors/Utils.hs b/cardano-tracer/src/Cardano/Tracer/Acceptors/Utils.hs index 79989e4a9d1..7f0994b1307 100644 --- a/cardano-tracer/src/Cardano/Tracer/Acceptors/Utils.hs +++ b/cardano-tracer/src/Cardano/Tracer/Acceptors/Utils.hs @@ -3,12 +3,14 @@ {-# LANGUAGE TupleSections #-} {-# OPTIONS_GHC -Wno-redundant-constraints #-} +{-# LANGUAGE ViewPatterns #-} module Cardano.Tracer.Acceptors.Utils ( prepareDataPointRequestor , prepareMetricsStores , removeDisconnectedNode , notifyAboutNodeDisconnected + , store ) where #if RTVIEW @@ -26,14 +28,19 @@ import Control.Concurrent.STM.TVar (TVar, modifyTVar', newTVarIO) import qualified Data.Bimap as BM import qualified Data.Map.Strict as M import qualified Data.Set as S -import Data.Time.Clock.POSIX (getPOSIXTime) #if RTVIEW import Data.Time.Clock.System (getSystemTime, systemToUTCTime) #endif import qualified System.Metrics as EKG -import System.Metrics.Store.Acceptor (MetricsLocalStore, emptyMetricsLocalStore) +import System.Metrics.Store.Acceptor (MetricsLocalStore, emptyMetricsLocalStore, storeMetrics) import Trace.Forward.Utils.DataPoint (DataPointRequestor, initDataPointRequestor) +import System.Metrics.ReqResp +import Cardano.Timeseries.Domain.Types (MetricIdentifier) +import Data.Foldable +import qualified Cardano.Tracer.Timeseries as Timeseries +import Data.Maybe (mapMaybe) +import Cardano.Tracer.Time (getTimeMs) prepareDataPointRequestor :: Show addr @@ -54,10 +61,10 @@ prepareMetricsStores -> IO (EKG.Store, TVar MetricsLocalStore) prepareMetricsStores TracerEnv{teConnectedNodes, teAcceptedMetrics} connId = do addConnectedNode teConnectedNodes connId - store <- EKG.newStore + st <- EKG.newStore - EKG.registerCounter "ekg.server_timestamp_ms" getTimeMs store - storesForNewNode <- (store ,) <$> newTVarIO emptyMetricsLocalStore + EKG.registerCounter "ekg.server_timestamp_ms" getTimeMs st + storesForNewNode <- (st ,) <$> newTVarIO emptyMetricsLocalStore atomically do modifyTVar' teAcceptedMetrics do @@ -65,15 +72,6 @@ prepareMetricsStores TracerEnv{teConnectedNodes, teAcceptedMetrics} connId = do return storesForNewNode - where - -- forkServer definition of `getTimeMs'. The ekg frontend relies - -- on the "ekg.server_timestamp_ms" metric being in every - -- store. While forkServer adds that that automatically we must - -- manually add it. - -- url - -- + https://github.com/tvh/ekg-wai/blob/master/System/Remote/Monitoring/Wai.hs#L237-L238 - getTimeMs = (round . (* 1000)) `fmap` getPOSIXTime - addConnectedNode :: Show addr => ConnectedNodes @@ -115,3 +113,20 @@ notifyAboutNodeDisconnected TracerEnvRTView{teEventsQueues} connId = do #else notifyAboutNodeDisconnected _ _ = pure () #endif + +store :: TracerEnv -> NodeId -> (EKG.Store, TVar MetricsLocalStore) -> Response -> IO () +store tracerEnv nodeId (ekgStore, localStore) resp@(ResponseMetrics ms) = do + storeMetrics resp ekgStore localStore + ts <- getTimeMs + for_ (teTimeseriesHandle tracerEnv) $ \h -> Timeseries.insert h nodeId (fromIntegral ts) (mapMaybe parseMetric ms) + + where + numeralOnly :: MetricValue -> Maybe Double + numeralOnly (GaugeValue x) = Just (fromIntegral x) + numeralOnly (CounterValue x) = Just (fromIntegral x) + numeralOnly (LabelValue _) = Nothing + + parseMetric :: (MetricName, MetricValue) -> Maybe (MetricIdentifier, Double) + parseMetric (k, numeralOnly -> Just v) = Just (k, v) + parseMetric _ = Nothing + diff --git a/cardano-tracer/src/Cardano/Tracer/Configuration.hs b/cardano-tracer/src/Cardano/Tracer/Configuration.hs index 75d6f09932e..daee2dbc080 100644 --- a/cardano-tracer/src/Cardano/Tracer/Configuration.hs +++ b/cardano-tracer/src/Cardano/Tracer/Configuration.hs @@ -164,6 +164,7 @@ data TracerConfig = TracerConfig , hasEKG :: !(Maybe Endpoint) -- ^ Endpoint for EKG web-page. , hasPrometheus :: !(Maybe Endpoint) -- ^ Endpoint for Prometheus web-page. , hasRTView :: !(Maybe Endpoint) -- ^ Endpoint for RTView web-page. + , hasTimeseries :: !(Maybe Endpoint) , tlsCertificate :: !(Maybe Certificate) -- | Socket for tracer's to reforward on. Second member of the triplet is the list of prefixes to reforward. -- Third member of the triplet is the forwarder config. diff --git a/cardano-tracer/src/Cardano/Tracer/Environment.hs b/cardano-tracer/src/Cardano/Tracer/Environment.hs index 3daf1d0f4d3..ee5241b73f4 100644 --- a/cardano-tracer/src/Cardano/Tracer/Environment.hs +++ b/cardano-tracer/src/Cardano/Tracer/Environment.hs @@ -15,6 +15,7 @@ import Cardano.Tracer.Handlers.State.TraceObjects #endif import Cardano.Tracer.MetaTrace import Cardano.Tracer.Types +import Cardano.Tracer.Timeseries (TimeseriesHandle) import Control.Concurrent.Extra (Lock) import Data.Text (Text) @@ -36,6 +37,7 @@ data TracerEnv = TracerEnv , teRegistry :: !HandleRegistry , teStateDir :: !(Maybe FilePath) , teMetricsHelp :: ![(Text, Builder)] + , teTimeseriesHandle :: !(Maybe TimeseriesHandle) } #if RTVIEW diff --git a/cardano-tracer/src/Cardano/Tracer/Handlers/Metrics/Servers.hs b/cardano-tracer/src/Cardano/Tracer/Handlers/Metrics/Servers.hs index 8425350a635..31398659024 100644 --- a/cardano-tracer/src/Cardano/Tracer/Handlers/Metrics/Servers.hs +++ b/cardano-tracer/src/Cardano/Tracer/Handlers/Metrics/Servers.hs @@ -1,6 +1,6 @@ -{-# LANGUAGE NumericUnderscores #-} -{-# LANGUAGE NamedFieldPuns #-} {-# LANGUAGE CPP #-} +{-# LANGUAGE NamedFieldPuns #-} +{-# LANGUAGE NumericUnderscores #-} module Cardano.Tracer.Handlers.Metrics.Servers ( runMetricsServers @@ -10,17 +10,19 @@ import Cardano.Tracer.Configuration import Cardano.Tracer.Environment import Cardano.Tracer.Handlers.Metrics.Monitoring import Cardano.Tracer.Handlers.Metrics.Prometheus +import Cardano.Tracer.Handlers.Metrics.TimeseriesServer (runTimeseriesServer) import qualified Cardano.Tracer.Handlers.Metrics.Utils as Utils import Cardano.Tracer.Utils (sequenceConcurrently_) import Control.AutoUpdate -import Data.Maybe (catMaybes) import Control.Monad (unless) +import Data.Maybe (catMaybes) -- | Runs metrics servers if needed: -- -- 1. Prometheus exporter. -- 2. EKG monitoring web-page. +-- 3. Timeseries query server. -- runMetricsServers :: TracerEnv @@ -44,8 +46,11 @@ runMetricsServers tracerEnv = do servers = catMaybes [ runPrometheusServer tracerEnv <$> hasPrometheus , runMonitoringServer tracerEnv <$> hasEKG + , const <$> (runTimeseriesServer teTracer cfg <$> hasTimeseries <*> teTimeseriesHandle) ] TracerEnv - { teConfig = TracerConfig { hasPrometheus, hasEKG } + { teConfig = cfg@TracerConfig { hasPrometheus, hasEKG, hasTimeseries }, + teTracer, + teTimeseriesHandle } = tracerEnv diff --git a/cardano-tracer/src/Cardano/Tracer/Handlers/Metrics/TimeseriesServer.hs b/cardano-tracer/src/Cardano/Tracer/Handlers/Metrics/TimeseriesServer.hs new file mode 100644 index 00000000000..a71f9faeb12 --- /dev/null +++ b/cardano-tracer/src/Cardano/Tracer/Handlers/Metrics/TimeseriesServer.hs @@ -0,0 +1,173 @@ +{-# LANGUAGE BangPatterns #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE ViewPatterns #-} + +module Cardano.Tracer.Handlers.Metrics.TimeseriesServer(runTimeseriesServer) where +import Cardano.Timeseries.AsText +import Cardano.Tracer.Configuration (Certificate (..), Endpoint, TracerConfig (..), + epForceSSL, setEndpoint) +import Cardano.Tracer.Handlers.Metrics.Utils (contentHdrUtf8Text) +import Cardano.Tracer.MetaTrace +import Cardano.Tracer.Timeseries + +import Control.Monad (guard) +import qualified Data.ByteString.Lazy as BL +import Data.Maybe (fromMaybe) +import Data.Text (Text) +import qualified Data.Text as Text +import Data.Text.Encoding (decodeUtf8Lenient) +import qualified Data.Text.Encoding as T +import Network.HTTP.Types +import Network.Wai +import Network.Wai.Handler.Warp hiding (run) +import Network.Wai.Handler.WarpTLS +import System.Time.Extra (sleep) + +-- | POST timeseries/query +parsePostTimeseriesQuery :: Request -> Maybe () +parsePostTimeseriesQuery request = do + guard (request.pathInfo == ["timeseries", "query"]) + guard (request.requestMethod == methodPost) + +-- | POST timeseries/prune +parsePostTimeseriesPrune :: Request -> Maybe () +parsePostTimeseriesPrune request = do + guard (request.pathInfo == ["timeseries", "prune"]) + guard (request.requestMethod == methodPost) + +-- | POST timeseries/config/retention +parsePostTimeseriesConfigRetention :: Request -> Maybe () +parsePostTimeseriesConfigRetention request = do + guard (request.pathInfo == ["timeseries", "config", "retention"]) + guard (request.requestMethod == methodPost) + +-- | POST timeseries/config/pruning +parsePostTimeseriesConfigPruningPeriod :: Request -> Maybe () +parsePostTimeseriesConfigPruningPeriod request = do + guard (request.pathInfo == ["timeseries", "config", "pruning"]) + guard (request.requestMethod == methodPost) + +-- | GET timeseries/config/retention +parseGetTimeseriesConfigRetention :: Request -> Maybe () +parseGetTimeseriesConfigRetention request = do + guard (request.pathInfo == ["timeseries", "config", "retention"]) + guard (request.requestMethod == methodGet) + +-- | GET timeseries/config/pruning +parseGetTimeseriesConfigPruningPeriod :: Request -> Maybe () +parseGetTimeseriesConfigPruningPeriod request = do + guard (request.pathInfo == ["timeseries", "config", "pruning"]) + guard (request.requestMethod == methodGet) + +ok :: Response +ok = responseLBS status204 [] "" + +malformed :: Response +malformed = responseLBS status400 contentHdrUtf8Text "Malformed input" + +notFound :: Response +notFound = responseLBS status404 [] "" + +expectEmptyQuery :: Request + -> (Response -> IO ResponseReceived) + -> IO ResponseReceived + -> IO ResponseReceived +expectEmptyQuery request send kont = + case queryToQueryText request.queryString of + [] -> kont + _ -> send malformed + +expectOneOptionalItemQuery :: Read item + => Request + -> (Response -> IO ResponseReceived) + -> Text + -> (Maybe item -> IO ResponseReceived) + -> IO ResponseReceived +expectOneOptionalItemQuery request send key kont = + case queryToQueryText request.queryString of + [(key', Just !(read . Text.unpack -> Just !v))] | key' == key -> kont (Just v) + [] -> kont Nothing + _ -> send malformed + +expectOneItemQuery :: Read item + => Request + -> (Response -> IO ResponseReceived) + -> Text + -> (item -> IO ResponseReceived) + -> IO ResponseReceived +expectOneItemQuery request send key kont = + case queryToQueryText request.queryString of + [(key', Just !(read . Text.unpack -> Just !v))] | key' == key -> kont v + _ -> send malformed + +encodeUtf8 :: Text -> BL.ByteString +encodeUtf8 = BL.fromStrict . T.encodeUtf8 + +timeseriesApp :: TimeseriesHandle -> Application +timeseriesApp handle request@(parsePostTimeseriesQuery -> Just ()) send = + expectEmptyQuery request send $ do + bs <- consumeRequestBodyStrict request + let query = decodeUtf8Lenient (BL.toStrict bs) + execute handle query >>= \case + Left err -> send $ + responseLBS status400 contentHdrUtf8Text (encodeUtf8 (asText err)) + where + Right v -> send $ responseLBS status200 contentHdrUtf8Text (encodeUtf8 (showT v)) +timeseriesApp handle request@(parsePostTimeseriesPrune -> Just ()) send = + expectEmptyQuery request send $ do + prune handle + send ok +timeseriesApp handle request@(parsePostTimeseriesConfigRetention -> Just ()) send = do + expectOneItemQuery request send "value" $ \v -> do + modifyConfig handle (\cfg -> Just cfg{retentionMillis = v}) + send ok +timeseriesApp handle request@(parsePostTimeseriesConfigPruningPeriod -> Just ()) send = do + expectOneOptionalItemQuery request send "value" \v -> do + modifyConfig handle (\cfg -> Just cfg{pruningPeriodSec = v}) + send ok +timeseriesApp handle request@(parseGetTimeseriesConfigRetention -> Just ()) send = + expectEmptyQuery request send $ do + v <- (.retentionMillis) <$> readConfig handle + send $ responseLBS status200 contentHdrUtf8Text (encodeUtf8 (showT v)) +timeseriesApp handle request@(parseGetTimeseriesConfigPruningPeriod -> Just ()) send = + expectEmptyQuery request send $ do + v <- (.pruningPeriodSec) <$> readConfig handle + send $ responseLBS status200 contentHdrUtf8Text (encodeUtf8 (showT v)) +timeseriesApp _ _ send = send notFound + +runTimeseriesServer :: Trace IO TracerTrace -> TracerConfig -> Endpoint -> TimeseriesHandle -> IO () +runTimeseriesServer tr tracerConfig endpoint handle = do + + -- Pause to prevent collision between "Listening"-notifications from servers. + sleep 0.1 + + traceWith tr TracerStartedTimeseries + { ttTimeseriesEndpoint = endpoint + } + + + let + settings :: Settings + settings = setEndpoint endpoint defaultSettings + + tls_settings :: Certificate -> TLSSettings + tls_settings Certificate {..} = + tlsSettingsChain certificateFile (fromMaybe [] certificateChain) certificateKeyFile + + application :: Application + application = timeseriesApp handle + + run :: IO () + run | Just True <- epForceSSL endpoint , Just cert <- tlsCertificate tracerConfig + = runTLS (tls_settings cert) settings application + -- Trace, if we expect SSL without getting certificates. + | Just True <- epForceSSL endpoint + = do traceWith tr TracerMissingCertificate + { ttMissingCertificateEndpoint = endpoint } + runSettings settings application + | otherwise + = runSettings settings application + run diff --git a/cardano-tracer/src/Cardano/Tracer/MetaTrace.hs b/cardano-tracer/src/Cardano/Tracer/MetaTrace.hs index b40195fa228..b73238acdbf 100644 --- a/cardano-tracer/src/Cardano/Tracer/MetaTrace.hs +++ b/cardano-tracer/src/Cardano/Tracer/MetaTrace.hs @@ -22,10 +22,12 @@ module Cardano.Tracer.MetaTrace import Cardano.Logging import Cardano.Logging.Resources import Cardano.Tracer.Configuration +import Cardano.Tracer.Timeseries.Trace (TimeseriesTrace) import Cardano.Tracer.Types (NodeId (..), NodeName) import Data.Aeson hiding (Error) import qualified Data.Aeson as AE +import Data.Functor (($>), (<&>)) import qualified Data.Map.Strict as Map import Data.Text as T (Text, pack) import qualified System.IO as Sys @@ -58,6 +60,9 @@ data TracerTrace | TracerStartedPrometheus { ttPrometheusEndpoint :: Endpoint } + | TracerStartedTimeseries + { ttTimeseriesEndpoint :: Endpoint + } | TracerStartedMonitoring { ttMonitoringEndpoint :: Endpoint , ttMonitoringType :: Text @@ -90,6 +95,13 @@ data TracerTrace } deriving Show +-- | A bundle of domain-split tracers used in the application. +data TraceBundle = TraceBundle{ + -- | A tracer used to trace all kinds of things happening in the application. + assorted :: !(Trace IO TracerTrace), + -- | A tracer that has to do only with timeseries storing/querying/pruning etc. + timeseries :: !(Trace IO TimeseriesTrace) +} instance LogFormatting TracerTrace where forHuman t@TracerConfigIs{ttWarnRTViewMissing = True} = @@ -136,6 +148,10 @@ instance LogFormatting TracerTrace where [ "kind" .= AE.String "TracerStartedPrometheus" , "endpoint" .= ttPrometheusEndpoint ] + TracerStartedTimeseries{..} -> mconcat + [ "kind" .= AE.String "TracerStartedTimeseries" + , "endpoint" .= ttTimeseriesEndpoint + ] TracerStartedMonitoring{..} -> mconcat [ "kind" .= AE.String "TracerStartedMonitoring" , "endpoint" .= ttMonitoringEndpoint @@ -202,6 +218,7 @@ instance MetaTrace TracerTrace where namespaceFor TracerAddNewNodeIdMapping {} = Namespace [] ["AddNewNodeIdMapping"] namespaceFor TracerStartedLogRotator = Namespace [] ["StartedLogRotator"] namespaceFor TracerStartedPrometheus{} = Namespace [] ["StartedPrometheus"] + namespaceFor TracerStartedTimeseries{} = Namespace [] ["StartedTimeseriers"] namespaceFor TracerStartedMonitoring{} = Namespace [] ["StartedMonitoring"] namespaceFor TracerStartedAcceptors {} = Namespace [] ["StartedAcceptors"] namespaceFor TracerStartedRTView = Namespace [] ["StartedRTView"] @@ -227,6 +244,7 @@ instance MetaTrace TracerTrace where severityFor (Namespace _ ["AddNewNodeIdMapping"]) _ = Just Info severityFor (Namespace _ ["StartedLogRotator"]) _ = Just Info severityFor (Namespace _ ["StartedPrometheus"]) _ = Just Info + severityFor (Namespace _ ["StartedTimeseries"]) _ = Just Info severityFor (Namespace _ ["StartedMonitoring"]) _ = Just Info severityFor (Namespace _ ["StartedAcceptors"]) _ = Just Info severityFor (Namespace _ ["StartedRTView"]) _ = Just Info @@ -256,6 +274,7 @@ instance MetaTrace TracerTrace where , Namespace [] ["AddNewNodeIdMapping"] , Namespace [] ["StartedLogRotator"] , Namespace [] ["StartedPrometheus"] + , Namespace [] ["StartedTimeseries"] , Namespace [] ["StartedMonitoring"] , Namespace [] ["StartedAcceptors"] , Namespace [] ["StartedRTView"] @@ -273,34 +292,56 @@ instance MetaTrace TracerTrace where , Namespace [] ["ForwardingInterrupted"] ] -stderrShowTracer :: Trace IO TracerTrace +stderrShowTracer :: Show a => Trace IO a stderrShowTracer = contramapM' (either (const $ pure ()) (Sys.hPrint Sys.stderr) . snd) -mkTracerTracer :: SeverityF -> IO (Trace IO TracerTrace) -mkTracerTracer defSeverity = do - standardTracer - >>= machineFormatter +mkTracerTracer :: Trace IO FormattedMessage -> SeverityF -> IO (Trace IO TracerTrace) +mkTracerTracer std defSeverity = + machineFormatter std >>= filterSeverityFromConfig >>= \t -> let finalTracer = withNames ["Tracer"] (withSeverity t) - in configTracerTracer defSeverity finalTracer >> pure finalTracer + in configTracerTracer finalTracer $> finalTracer + where + configTracerTracer :: Trace IO TracerTrace -> IO () + configTracerTracer tr = do + configReflection <- emptyConfigReflection + configureTracers configReflection initialTraceConfig [tr] + where + initialTraceConfig :: TraceConfig + initialTraceConfig = + TraceConfig + { tcForwarder = Nothing + , tcNodeName = Nothing + , tcResourceFrequency = Nothing + , tcLedgerMetricsFrequency = Nothing + , tcMetricsPrefix = Nothing + , tcOptions = Map.fromList + [ ([], [ConfSeverity defSeverity]) + , (["Tracer"], [ConfDetail DMaximum]) + ] + } -configTracerTracer :: SeverityF -> Trace IO TracerTrace -> IO () -configTracerTracer defSeverity tr = do - configReflection <- emptyConfigReflection - configureTracers configReflection initialTraceConfig [tr] +mkTimeseriesTracer :: Trace IO FormattedMessage -> IO (Trace IO TimeseriesTrace) +mkTimeseriesTracer std = do + tr <- machineFormatter std >>= filterSeverityFromConfig <&> withNames ["Tracer"] . withSeverity + configReflection <- emptyConfigReflection + configureTracers configReflection cfg [tr] + pure tr where - initialTraceConfig :: TraceConfig - initialTraceConfig = + cfg :: TraceConfig + cfg = TraceConfig - { tcForwarder = Nothing - , tcNodeName = Nothing - , tcResourceFrequency = Nothing + { tcForwarder = Nothing + , tcNodeName = Nothing + , tcResourceFrequency = Nothing , tcLedgerMetricsFrequency = Nothing - , tcMetricsPrefix = Nothing - , tcOptions = Map.fromList - [ ([], [ConfSeverity defSeverity]) - , (["Tracer"], [ConfDetail DMaximum]) - ] + , tcMetricsPrefix = Nothing + , tcOptions = Map.fromList [([], [ConfSeverity (SeverityF (Just Info))])] } + +mkTraceBundle :: SeverityF -> IO TraceBundle +mkTraceBundle sev = do + std <- standardTracer + TraceBundle <$> mkTracerTracer std sev <*> mkTimeseriesTracer std diff --git a/cardano-tracer/src/Cardano/Tracer/Run.hs b/cardano-tracer/src/Cardano/Tracer/Run.hs index 712ce1224ea..a4052565125 100644 --- a/cardano-tracer/src/Cardano/Tracer/Run.hs +++ b/cardano-tracer/src/Cardano/Tracer/Run.hs @@ -2,6 +2,8 @@ {-# LANGUAGE NamedFieldPuns #-} {-# LANGUAGE NumericUnderscores #-} +{-# LANGUAGE BangPatterns #-} +{-# LANGUAGE OverloadedRecordDot #-} -- | This top-level module is used by 'cardano-tracer' app. module Cardano.Tracer.Run @@ -37,31 +39,33 @@ import Control.Exception (SomeException, try) import Control.Monad import Data.Aeson (decodeFileStrict') import Data.Foldable (for_) +import Data.Traversable (for) import Data.Maybe (fromMaybe) import qualified Data.Map.Strict as M (Map, empty, filter, toList) import Data.Text as T (Text, null) import Data.Text.Lazy.Builder as TB (Builder, fromText) +import qualified Cardano.Tracer.Timeseries as Timeseries -- | Top-level run function, called by 'cardano-tracer' app. runCardanoTracer :: TracerParams -> IO () runCardanoTracer TracerParams{tracerConfig, stateDir, logSeverity} = do - tr <- mkTracerTracer $ SeverityF $ logSeverity <|> Just Info -- default severity filter to Info - traceWith tr TracerBuildInfo + tr <- mkTraceBundle $ SeverityF $ logSeverity <|> Just Info -- default severity filter to Info + traceWith tr.assorted TracerBuildInfo #if RTVIEW { ttBuiltWithRTView = True #else { ttBuiltWithRTView = False #endif } - traceWith tr TracerParamsAre + traceWith tr.assorted TracerParamsAre { ttConfigPath = tracerConfig , ttStateDir = stateDir , ttMinLogSeverity = logSeverity } config <- readTracerConfig tracerConfig - traceWith tr TracerConfigIs + traceWith tr.assorted TracerConfigIs { ttConfig = config #if RTVIEW , ttWarnRTViewMissing = False @@ -75,7 +79,7 @@ runCardanoTracer TracerParams{tracerConfig, stateDir, logSeverity} = do forever do mbrs <- readResourceStats for_ mbrs \resourceStat -> - traceWith tr (TracerResource resourceStat) + traceWith tr.assorted (TracerResource resourceStat) threadDelay (1_000 * msInterval) -- Delay in seconds, given milliseconds link threadId @@ -87,12 +91,12 @@ runCardanoTracer TracerParams{tracerConfig, stateDir, logSeverity} = do doRunCardanoTracer :: TracerConfig -- ^ Tracer's configuration. -> Maybe FilePath -- ^ Path to RTView's internal state files. - -> Trace IO TracerTrace + -> TraceBundle -> ProtocolsBrake -- ^ The flag we use to stop all the protocols. -> DataPointRequestors -- ^ The DataPointRequestors to ask 'DataPoint's. -> IO () doRunCardanoTracer config rtViewStateDir tr protocolsBrake dpRequestors = do - traceWith tr TracerInitStarted + traceWith tr.assorted TracerInitStarted connectedNodes <- initConnectedNodes connectedNodesNames <- initConnectedNodesNames acceptedMetrics <- initAcceptedMetrics @@ -109,16 +113,18 @@ doRunCardanoTracer config rtViewStateDir tr protocolsBrake dpRequestors = do currentLogLock <- newLock currentDPLock <- newLock - traceWith tr TracerInitEventQueues + traceWith tr.assorted TracerInitEventQueues #if RTVIEW eventsQueues <- initEventsQueues tr rtViewStateDir connectedNodesNames dpRequestors currentDPLock rtViewPageOpened <- newTVarIO False #endif - (reforwardTraceObject,_trDataPoint) <- initReForwarder config tr + (reforwardTraceObject,_trDataPoint) <- initReForwarder config tr.assorted registry <- newRegistry + !timeseriesHandle <- for (hasTimeseries config) (const $ Timeseries.create tr.timeseries Nothing) + -- Environment for all following functions. let tracerEnv :: TracerEnv tracerEnv = TracerEnv @@ -130,11 +136,12 @@ doRunCardanoTracer config rtViewStateDir tr protocolsBrake dpRequestors = do , teCurrentDPLock = currentDPLock , teDPRequestors = dpRequestors , teProtocolsBrake = protocolsBrake - , teTracer = tr + , teTracer = tr.assorted , teReforwardTraceObjects = reforwardTraceObject , teRegistry = registry , teStateDir = rtViewStateDir , teMetricsHelp = mHelp + , teTimeseriesHandle = timeseriesHandle } tracerEnvRTView :: TracerEnvRTView @@ -151,15 +158,15 @@ doRunCardanoTracer config rtViewStateDir tr protocolsBrake dpRequestors = do -- Specify what should be done before 'cardano-tracer' stops. beforeProgramStops $ do - traceWith tr TracerShutdownInitiated + traceWith tr.assorted TracerShutdownInitiated #if RTVIEW backupAllHistory tracerEnv tracerEnvRTView traceWith tr TracerShutdownHistBackup #endif applyBrake (teProtocolsBrake tracerEnv) - traceWith tr TracerShutdownComplete + traceWith tr.assorted TracerShutdownComplete - traceWith tr TracerInitDone + traceWith tr.assorted TracerInitDone sequenceConcurrently_ [ runLogsRotator tracerEnv , runMetricsServers tracerEnv diff --git a/cardano-tracer/src/Cardano/Tracer/Time.hs b/cardano-tracer/src/Cardano/Tracer/Time.hs new file mode 100644 index 00000000000..95d2078f657 --- /dev/null +++ b/cardano-tracer/src/Cardano/Tracer/Time.hs @@ -0,0 +1,12 @@ +module Cardano.Tracer.Time(getTimeMs) where +import Data.Int (Int64) +import Data.Time.Clock.POSIX (getPOSIXTime) + +-- forkServer definition of `getTimeMs'. The ekg frontend relies +-- on the "ekg.server_timestamp_ms" metric being in every +-- store. While forkServer adds that that automatically we must +-- manually add it. +-- url +-- + https://github.com/tvh/ekg-wai/blob/master/System/Remote/Monitoring/Wai.hs#L237-L238 +getTimeMs :: IO Int64 +getTimeMs = (round . (* 1000)) `fmap` getPOSIXTime diff --git a/cardano-tracer/src/Cardano/Tracer/Timeseries.hs b/cardano-tracer/src/Cardano/Tracer/Timeseries.hs new file mode 100644 index 00000000000..e3be3203ac1 --- /dev/null +++ b/cardano-tracer/src/Cardano/Tracer/Timeseries.hs @@ -0,0 +1,142 @@ +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} +{-# OPTIONS_GHC -Wno-unrecognised-pragmas #-} +{-# HLINT ignore "Use newtype instead of data" #-} +{-# LANGUAGE BangPatterns #-} +{-# LANGUAGE NumericUnderscores #-} +module Cardano.Tracer.Timeseries( + TimeseriesConfig(..) + , TimeseriesHandle + , QueryId + , create + , modifyConfig + , readConfig + , writeConfig + , insert + , execute + , prune) where +import Cardano.Logging (Trace, traceWith) +import Cardano.Timeseries.Domain.Instant (Instant (..)) +import Cardano.Timeseries.Domain.Types (MetricIdentifier, Timestamp) +import Cardano.Timeseries.Interface (ExecutionError) +import qualified Cardano.Timeseries.Interface as Interface +import qualified Cardano.Timeseries.Interp.Config as Interp (Config (..)) +import Cardano.Timeseries.Interp.Value (Value) +import qualified Cardano.Timeseries.Store as Store +import Cardano.Timeseries.Store.Tree (Tree) +import Cardano.Tracer.Time (getTimeMs) +import Cardano.Tracer.Timeseries.Trace (TimeseriesTrace (..)) +import Cardano.Tracer.Timeseries.Types +import Cardano.Tracer.Types (NodeId (..)) + +import Prelude hiding (Foldable (..)) + +import Control.Arrow (second) +import Control.Concurrent (MVar, forkIO, newEmptyMVar, putMVar, takeMVar, threadDelay) +import Control.Concurrent.Async (race_) +import Control.Concurrent.STM (TVar, modifyTVar', newTVarIO, readTVar, readTVarIO, + stateTVar) +import Control.Monad (forever) +import Control.Monad.STM (atomically) +import Data.Foldable (Foldable (..)) +import Data.Maybe (fromMaybe) +import qualified Data.Set as Set +import Data.Text (Text) +import GHC.Conc (labelThread) + +-- | Not exported. The user gets the default if `create`-d with a `Nothing` +defaultTimeseriesInterpConfig :: Interp.Config +defaultTimeseriesInterpConfig = Interp.Config + (15 * 1000) -- 15 s + +-- | Not exported. The user gets the default if `create`-d with a `Nothing` +defaultTimeseriesConfig :: TimeseriesConfig +defaultTimeseriesConfig = TimeseriesConfig + (1 * 24 * 60 * 60 * 1000) -- 1 day in ms + (Just (1 * 60 * 60)) -- 1 hour in s + defaultTimeseriesInterpConfig + +-- | The constructor is not exported. Use the API methods below for working with timeseries. +data TimeseriesHandle = TimeseriesHandle { + cfg :: !(TVar TimeseriesConfig), + store :: !(TVar (Tree Double)), + reconfigured :: !(MVar ()), + tracer :: !(Trace IO TimeseriesTrace), + nextQueryId :: !(TVar QueryId) +} + +-- Create an opaque "handle" for a timeseries store, given an optional configuration. +-- Also spawns a "pruner" thread (if specified in the config) that periodically prunes the store of expired entries. +create :: Trace IO TimeseriesTrace -> Maybe TimeseriesConfig -> IO TimeseriesHandle +create tr mbCfg = do + cfg_ <- newTVarIO (fromMaybe defaultTimeseriesConfig mbCfg) + st <- newTVarIO Store.new + qid <- newTVarIO 0 + rec <- newEmptyMVar + let handle = TimeseriesHandle cfg_ st rec tr qid + prunerThread <- forkIO (runPruner handle) + labelThread prunerThread "timeseries-pruner-thread" + traceWith tr (TimeseriesTraceCreate mbCfg) + pure handle + where + runPruner :: TimeseriesHandle -> IO () + runPruner handle = forever $ do + cfg_ <- readTVarIO handle.cfg + case cfg_.pruningPeriodSec of + Nothing -> + -- If the current configuration doesn't specify a pruning period, we block + -- the thread until a reconfiguration happens. + takeMVar handle.reconfigured + Just period -> do + prune handle + -- Wait for the given period or wake up on a reconfiguration. + race_ + (threadDelay (fromIntegral period * 1_000_000)) + (takeMVar handle.reconfigured) + +-- COMMENT: (@russoul) do we have a good place for this function (named after the diagonal functor)? +diag :: a -> (a, a) +diag x = (x, x) + +-- | Reconfigure the store. The new parameters are applied immediately. +-- Wakes up the pruner thread as well. +-- If the supplied config is empty, reconfigures back to the default config. +modifyConfig :: TimeseriesHandle -> (TimeseriesConfig -> Maybe TimeseriesConfig) -> IO () +modifyConfig handle cfg_ = do + newCfg <- atomically $ stateTVar handle.cfg (second (fromMaybe defaultTimeseriesConfig) . diag . cfg_) + putMVar handle.reconfigured () + traceWith handle.tracer (TimeseriesTraceReconfigure newCfg) + +readConfig :: TimeseriesHandle -> IO TimeseriesConfig +readConfig handle = readTVarIO handle.cfg + +writeConfig :: TimeseriesHandle -> Maybe TimeseriesConfig -> IO () +writeConfig handle k = modifyConfig handle (const k) + +-- | Insert a batch on metric data into the store at the given timestamp. +insert :: TimeseriesHandle -> NodeId -> Timestamp -> [(MetricIdentifier, Double)] -> IO () +insert handle nodeId t batch = do + atomically $ modifyTVar' handle.store $ \st -> foldl' f st batch + traceWith handle.tracer (TimeseriesTraceInsert nodeId t batch) + where + f st (k, v) = Store.insert st k (Instant (Set.singleton ("node_id", nodeId.text)) t v) + +-- | Execute a query on the store. +execute :: TimeseriesHandle -> Text -> IO (Either ExecutionError Value) +execute handle stringQuery = do + (theCfg, theStore) <- atomically $ (,) <$> readTVar handle.cfg <*> readTVar handle.store + t <- getTimeMs + queryId <- atomically $ readTVar handle.nextQueryId <* modifyTVar' handle.nextQueryId (+ 1) + traceWith handle.tracer (TimeseriesTraceIssueExecute queryId stringQuery) + let !result = Interface.execute theStore theCfg.interpCfg (fromIntegral t) stringQuery + traceWith handle.tracer (TimeseriesTraceYieldExecute queryId result) + pure result + +-- | Prune the store of the entries past the configured retention period (relative to "now"). +-- The pruner thread uses that function internally, but one can still invoke the pruning manually at will. +prune :: TimeseriesHandle -> IO () +prune handle = do + t <- getTimeMs + theCfg <- readTVarIO handle.cfg + atomically $ modifyTVar' handle.store (\s -> Store.truncate s (fromIntegral t - theCfg.retentionMillis)) + traceWith handle.tracer (TimeseriesTracePrune theCfg.retentionMillis) diff --git a/cardano-tracer/src/Cardano/Tracer/Timeseries/Trace.hs b/cardano-tracer/src/Cardano/Tracer/Timeseries/Trace.hs new file mode 100644 index 00000000000..503b98aeab2 --- /dev/null +++ b/cardano-tracer/src/Cardano/Tracer/Timeseries/Trace.hs @@ -0,0 +1,88 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Cardano.Tracer.Timeseries.Trace(TimeseriesTrace(..)) where + +import Cardano.Logging (LogFormatting (..), SeverityS (..)) +import Cardano.Logging.Types (MetaTrace (..), Namespace (..)) +import Cardano.Timeseries.AsText +import Cardano.Timeseries.Domain.Types (MetricIdentifier, Timestamp) +import Cardano.Timeseries.Interface (ExecutionError (..)) +import Cardano.Timeseries.Interp.Value (Value (..)) +import Cardano.Tracer.Timeseries.Types +import Cardano.Tracer.Types (NodeId (..)) + +import Data.Aeson (toJSON) +import Data.Aeson.KeyMap (singleton) +import Data.Aeson.Types ((.=)) +import Data.Text (Text) +import Data.Word (Word64) + +data TimeseriesTrace = TimeseriesTraceCreate (Maybe TimeseriesConfig) + | TimeseriesTraceReconfigure (Maybe TimeseriesConfig) + | TimeseriesTraceInsert NodeId Timestamp [(MetricIdentifier, Double)] + | TimeseriesTraceIssueExecute QueryId Text + | TimeseriesTraceYieldExecute QueryId (Either ExecutionError Value) + -- COMMENT: (@russoul) shall we count the number of pruned entries? + | TimeseriesTracePrune {- retentionMillis -} Word64 deriving (Show) + +instance LogFormatting TimeseriesTrace where + forMachine _ (TimeseriesTraceCreate cfg) = + singleton "cfg" (toJSON cfg) + forMachine _ (TimeseriesTraceReconfigure cfg) = + singleton "cfg" (toJSON cfg) + forMachine _ (TimeseriesTraceInsert nodeId t batch) = mconcat + [ + "node_id" .= nodeId + , + "timestamp" .= t + , + "batch" .= batch + ] + forMachine _ (TimeseriesTraceIssueExecute queryId queryText) = mconcat + [ + "query_id" .= queryId + , + "query_text" .= queryText + ] + forMachine _ (TimeseriesTraceYieldExecute queryId result) = mconcat + [ + "query_id" .= queryId + , + "query_result" .= either asText showT result + ] + forMachine _ (TimeseriesTracePrune retMs) = + singleton "retention_millis" (toJSON retMs) + +instance MetaTrace TimeseriesTrace where + allNamespaces = + [ + Namespace [] ["Timeseries", "Create"] + , Namespace [] ["Timeseries", "Reconfigure"] + , Namespace [] ["Timeseries", "Insert"] + , Namespace [] ["Timeseries", "IssueExecute"] + , Namespace [] ["Timeseries", "YieldExecute"] + , Namespace [] ["Timeseries", "Prune"] + ] + + namespaceFor TimeseriesTraceCreate{} = Namespace [] ["Timeseries", "Create"] + namespaceFor TimeseriesTraceReconfigure{} = Namespace [] ["Timeseries", "Reconfigure"] + namespaceFor TimeseriesTraceInsert{} = Namespace [] ["Timeseries", "Insert"] + namespaceFor TimeseriesTraceIssueExecute{} = Namespace [] ["Timeseries", "IssueExecute"] + namespaceFor TimeseriesTraceYieldExecute{} = Namespace [] ["Timeseries", "YieldExecute"] + namespaceFor TimeseriesTracePrune{} = Namespace [] ["Timeseries", "Prune"] + + severityFor (Namespace [] ["Timeseries", "Create"]) _ = Just Info + severityFor (Namespace [] ["Timeseries", "Reconfigure"]) _ = Just Info + severityFor (Namespace [] ["Timeseries", "Insert"]) _ = Just Debug -- That one clogs up the traces, hence lower severity + severityFor (Namespace [] ["Timeseries", "IssueExecute"]) _ = Just Info + severityFor (Namespace [] ["Timeseries", "YieldExecute"]) _ = Just Info + severityFor (Namespace [] ["Timeseries", "Prune"]) _ = Just Info + severityFor _ _ = Nothing + + documentFor (Namespace [] ["Timeseries", "Create"]) = Just "A timeseries handle has been created." + documentFor (Namespace [] ["Timeseries", "Reconfigure"]) = Just "The timeseries handle has been reconfigured." + documentFor (Namespace [] ["Timeseries", "Insert"]) = Just "A batch of metrics from one node has been inserted to the store." + documentFor (Namespace [] ["Timeseries", "IssueExecute"]) = Just "The timeseries query has been issued." + documentFor (Namespace [] ["Timeseries", "YieldExecute"]) = Just "The timeseries query has yielded a result." + documentFor (Namespace [] ["Timeseries", "Prune"]) = Just "The timeseries store has been pruned." + documentFor _ = Nothing diff --git a/cardano-tracer/src/Cardano/Tracer/Timeseries/Types.hs b/cardano-tracer/src/Cardano/Tracer/Timeseries/Types.hs new file mode 100644 index 00000000000..7bc000db055 --- /dev/null +++ b/cardano-tracer/src/Cardano/Tracer/Timeseries/Types.hs @@ -0,0 +1,25 @@ +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE StandaloneDeriving #-} + +module Cardano.Tracer.Timeseries.Types(QueryId, TimeseriesConfig(..)) where +import qualified Cardano.Timeseries.Interp.Config as Interp + +import Data.Aeson (ToJSON) +import Data.Word (Word64) +import GHC.Generics + +type QueryId = Word64 + +deriving instance Generic Interp.Config +deriving instance ToJSON Interp.Config + +data TimeseriesConfig = TimeseriesConfig { + -- | How long the store entries are retained for (ms). + retentionMillis :: Word64, + -- | How often the pruner thread shall prune the store (sec), if enabled. + pruningPeriodSec :: Maybe Word64, + -- | Parameters of timeseries query interpretation. + interpCfg :: Interp.Config +} deriving (Show, Generic, ToJSON) + diff --git a/cardano-tracer/src/Cardano/Tracer/Types.hs b/cardano-tracer/src/Cardano/Tracer/Types.hs index 312958649c0..aed5547fe73 100644 --- a/cardano-tracer/src/Cardano/Tracer/Types.hs +++ b/cardano-tracer/src/Cardano/Tracer/Types.hs @@ -33,7 +33,7 @@ import Trace.Forward.Utils.DataPoint (DataPointRequestor) -- | Unique identifier of connected node, based on 'remoteAddress' from -- 'ConnectionId', please see 'ouroboros-network'. -newtype NodeId = NodeId Text +newtype NodeId = NodeId {text :: Text} deriving stock (Eq, Ord, Show) deriving newtype (ToJSON) diff --git a/cardano-tracer/test/Cardano/Tracer/Test/Acceptor.hs b/cardano-tracer/test/Cardano/Tracer/Test/Acceptor.hs index d7b08e1ee58..ae5873a97b7 100644 --- a/cardano-tracer/test/Cardano/Tracer/Test/Acceptor.hs +++ b/cardano-tracer/test/Cardano/Tracer/Test/Acceptor.hs @@ -33,6 +33,7 @@ import qualified Data.Text as T import System.Time.Extra (sleep) import Trace.Forward.Utils.DataPoint +import Cardano.Logging (standardTracer) data AcceptorsMode = Initiator | Responder @@ -53,7 +54,8 @@ launchAcceptorsSimple mode localSock dpName = do currentLogLock <- newLock currentDPLock <- newLock - tr <- mkTracerTracer $ SeverityF $ Just Warning + std <- standardTracer + tr <- mkTracerTracer std $ SeverityF $ Just Warning #if RTVIEW eventsQueues <- initEventsQueues tr Nothing connectedNodesNames dpRequestors currentDPLock @@ -82,6 +84,7 @@ launchAcceptorsSimple mode localSock dpName = do , teRegistry = registry , teStateDir = Nothing , teMetricsHelp = [] + , teTimeseriesHandle = Nothing } tracerEnvRTView :: TracerEnvRTView @@ -111,6 +114,7 @@ launchAcceptorsSimple mode localSock dpName = do , hasEKG = Nothing , hasPrometheus = Nothing , hasRTView = Nothing + , hasTimeseries = Nothing , tlsCertificate = Nothing , logging = NE.fromList [LoggingParams "/tmp/demo-acceptor" FileMode ForHuman] , rotation = Nothing diff --git a/cardano-tracer/test/Cardano/Tracer/Test/DataPoint/Tests.hs b/cardano-tracer/test/Cardano/Tracer/Test/DataPoint/Tests.hs index 8abb7a2d72c..6ab547fa798 100644 --- a/cardano-tracer/test/Cardano/Tracer/Test/DataPoint/Tests.hs +++ b/cardano-tracer/test/Cardano/Tracer/Test/DataPoint/Tests.hs @@ -39,7 +39,8 @@ propDataPoint ts@TestSetup{..} rootDir localSock = do stopProtocols <- initProtocolsBrake dpRequestors <- initDataPointRequestors savedDPValues :: TVar DataPointValues <- newTVarIO [] - withAsync (doRunCardanoTracer config (Just $ rootDir <> "/../state") stderrShowTracer stopProtocols dpRequestors) \_ -> do + withAsync (doRunCardanoTracer config (Just $ rootDir <> "/../state") + (TraceBundle stderrShowTracer stderrShowTracer) stopProtocols dpRequestors) \_ -> do sleep 1.0 withAsync (launchForwardersSimple ts Initiator (Net.LocalPipe localSock) 10000) \_ -> do sleep 1.5 @@ -88,6 +89,7 @@ propDataPoint ts@TestSetup{..} rootDir localSock = do , hasEKG = Nothing , hasPrometheus = Nothing , hasRTView = Nothing + , hasTimeseries = Nothing , tlsCertificate = Nothing , logging = NE.fromList [LoggingParams rootDir FileMode ForHuman] , rotation = Nothing diff --git a/cardano-tracer/test/Cardano/Tracer/Test/Logs/Tests.hs b/cardano-tracer/test/Cardano/Tracer/Test/Logs/Tests.hs index 72d597fce35..4b3b70f8f98 100644 --- a/cardano-tracer/test/Cardano/Tracer/Test/Logs/Tests.hs +++ b/cardano-tracer/test/Cardano/Tracer/Test/Logs/Tests.hs @@ -48,7 +48,9 @@ propLogs ts@TestSetup{..} format logRotLimitBytes logRotMaxAgeMinutes rootDir lo lock <- newLock stopProtocols <- initProtocolsBrake dpRequestors <- initDataPointRequestors - withAsync (doRunCardanoTracer (acceptConfig rootDir) (Just $ rootDir <> "/../state") stderrShowTracer stopProtocols dpRequestors) \async1 -> do + withAsync (doRunCardanoTracer (acceptConfig rootDir) + (Just $ rootDir <> "/../state") + (TraceBundle stderrShowTracer stderrShowTracer) stopProtocols dpRequestors) \async1 -> do link async1 sleep 1.0 withAsync (launchForwardersSimple ts Initiator (Net.LocalPipe localSock) 10000) \async2 -> do @@ -68,6 +70,7 @@ propLogs ts@TestSetup{..} format logRotLimitBytes logRotMaxAgeMinutes rootDir lo , ekgRequestFreq = Just 1.0 , hasEKG = Nothing , hasPrometheus = Nothing + , hasTimeseries = Nothing , hasRTView = Nothing , logging = LoggingParams root FileMode format :| [] , rotation = Just $ RotationParams @@ -91,7 +94,9 @@ propMultiInit ts@TestSetup{..} format rootDir howToConnect1 howToConnect2 = do lock <- newLock stopProtocols <- initProtocolsBrake dpRequestors <- initDataPointRequestors - withAsync (doRunCardanoTracer initConfig (Just $ rootDir <> "/../state") stderrShowTracer stopProtocols dpRequestors) \async1 -> do + withAsync (doRunCardanoTracer initConfig + (Just $ rootDir <> "/../state") + (TraceBundle stderrShowTracer stderrShowTracer) stopProtocols dpRequestors) \async1 -> do link async1 sleep 1.0 withAsync (launchForwardersSimple ts Responder howToConnect1 10000) \async2 -> do @@ -115,6 +120,7 @@ propMultiInit ts@TestSetup{..} format rootDir howToConnect1 howToConnect2 = do , hasEKG = Nothing , hasPrometheus = Nothing , hasRTView = Nothing + , hasTimeseries = Nothing , tlsCertificate = Nothing , logging = LoggingParams rootDir FileMode format :| [] , rotation = Nothing @@ -133,7 +139,9 @@ propMultiResp ts@TestSetup{..} format rootDir howToConnect = do lock <- newLock stopProtocols <- initProtocolsBrake dpRequestors <- initDataPointRequestors - withAsync (doRunCardanoTracer respConfig (Just $ rootDir <> "/../state") stderrShowTracer stopProtocols dpRequestors) \async1 -> do + withAsync (doRunCardanoTracer respConfig + (Just $ rootDir <> "/../state") + (TraceBundle stderrShowTracer stderrShowTracer) stopProtocols dpRequestors) \async1 -> do link async1 sleep 1.0 -- withAsync (launchForwardersSimple ts Initiator howToConnect 10000) \async2 -> do @@ -159,6 +167,7 @@ propMultiResp ts@TestSetup{..} format rootDir howToConnect = do , hasEKG = Nothing , hasPrometheus = Nothing , hasRTView = Nothing + , hasTimeseries = Nothing , tlsCertificate = Nothing , logging = LoggingParams rootDir FileMode format :| [] , rotation = Nothing diff --git a/cardano-tracer/test/Cardano/Tracer/Test/Restart/Tests.hs b/cardano-tracer/test/Cardano/Tracer/Test/Restart/Tests.hs index 1f586c76e43..827489af9a4 100644 --- a/cardano-tracer/test/Cardano/Tracer/Test/Restart/Tests.hs +++ b/cardano-tracer/test/Cardano/Tracer/Test/Restart/Tests.hs @@ -42,7 +42,7 @@ propNetworkForwarder ts rootDir localSock = do dpRequestors <- initDataPointRequestors propNetwork' ts rootDir ( launchForwardersSimple ts Initiator (Net.LocalPipe localSock) 10000 - , doRunCardanoTracer config (Just $ rootDir <> "/../state") stderrShowTracer brake dpRequestors + , doRunCardanoTracer config (Just $ rootDir <> "/../state") (TraceBundle stderrShowTracer stderrShowTracer) brake dpRequestors ) propNetwork' @@ -94,6 +94,7 @@ mkConfig TestSetup{..} rootDir p = TracerConfig , hasEKG = Nothing , hasPrometheus = Nothing , hasRTView = Nothing + , hasTimeseries = Nothing , tlsCertificate = Nothing , logging = NE.fromList [LoggingParams rootDir FileMode ForMachine] , rotation = Nothing diff --git a/nix/nixos/cardano-tracer-service-workbench.nix b/nix/nixos/cardano-tracer-service-workbench.nix index a330c3ff98a..b2f5ec0131e 100644 --- a/nix/nixos/cardano-tracer-service-workbench.nix +++ b/nix/nixos/cardano-tracer-service-workbench.nix @@ -46,6 +46,10 @@ let serviceConfigToJSON = epHost = "127.0.0.1"; epPort = 3200; ## supervisord.portShiftPrometheus } // (cfg.prometheus or {}); + hasTimeseries = { + epHost = "127.0.0.1"; + epPort = 3300; ## supervisord.portShiftPrometheus + } // (cfg.prometheus or {}); # Just an example for metrics compatibility mapping. # An entry means the first entry has the second entry as alias. # The Metrics is then available, both with the original and the mapped name. @@ -80,6 +84,7 @@ in pkgs.commonLib.defServiceModule logRoot = opt str null "Log storage root directory."; rotation = opt attrs {} "Log rotation overrides: see cardano-tracer documentation."; RTView = opt attrs {} "RTView config overrides: see cardano-tracer documentation."; + hasTimeseries = opt attrs {} ""; # FIXME: (@russoul) probably shouldn't be here (provisional) ekgPortBase = opt int 3100 "EKG port base."; ekgRequestFreq = opt int 1 "EKG request frequency"; prometheus = opt attrs {} "Prometheus overrides: see cardano-tracer documentation."; diff --git a/nix/workbench/service/tracer.nix b/nix/workbench/service/tracer.nix index cfbf1c1bec3..9a7e5e5c755 100644 --- a/nix/workbench/service/tracer.nix +++ b/nix/workbench/service/tracer.nix @@ -27,6 +27,10 @@ let networkMagic = profile.genesis.network_magic; configFile = "config.json"; metricsHelp = "../../../cardano-tracer/configuration/metrics_help.json"; + hasTimeseries = { # FIXME: (@russoul) for testing only + epHost = "127.0.0.1"; + epPort = 3300; + }; # Decide where the executable comes from: ######################################### } // optionalAttrs (!backend.useCabalRun) {