simplex-chat/tests/SchemaDump.hs

186 lines
7.4 KiB
Haskell
Raw Permalink Normal View History

{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NamedFieldPuns #-}
2022-04-05 12:44:22 +04:00
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
2022-04-05 12:44:22 +04:00
module SchemaDump where
import ChatClient (withTmpFiles)
import ChatTests.DBUtils
import Control.Concurrent.STM
import Control.DeepSeq
import qualified Control.Exception as E
import Control.Monad (unless, void)
import Data.List (dropWhileEnd, sort)
import qualified Data.Map.Strict as M
import Data.Maybe (fromJust, isJust)
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.IO as T
import Database.SQLite.Simple (Query (..))
import Simplex.Chat.Store (createChatStore)
import qualified Simplex.Chat.Store as Store
2025-01-24 17:49:31 +04:00
import Simplex.Messaging.Agent.Env.SQLite (createAgentStore)
import Simplex.Messaging.Agent.Store.Common (withConnection)
import Simplex.Messaging.Agent.Store.DB (TrackQueries (..))
import qualified Simplex.Messaging.Agent.Store.DB as DB
2025-01-24 17:49:31 +04:00
import Simplex.Messaging.Agent.Store.Interface
import qualified Simplex.Messaging.Agent.Store.SQLite.Migrations as Migrations
2025-01-24 17:49:31 +04:00
import Simplex.Messaging.Agent.Store.Shared (Migration (..), MigrationConfirmation (..), MigrationsToRun (..), toDownMigration)
import Simplex.Messaging.Util (ifM, tshow, whenM)
import System.Directory (doesFileExist, removeFile)
2022-04-05 12:44:22 +04:00
import System.Process (readCreateProcess, shell)
import Test.Hspec
testDB :: FilePath
testDB = "tests/tmp/test_chat.db"
2025-01-24 17:49:31 +04:00
testAgentDB :: FilePath
testAgentDB = "tests/tmp/test_agent.db"
appSchema :: FilePath
appSchema = "src/Simplex/Chat/Store/SQLite/Migrations/chat_schema.sql"
2024-10-09 15:15:58 +04:00
appLint :: FilePath
appLint = "src/Simplex/Chat/Store/SQLite/Migrations/chat_lint.sql"
2024-10-09 15:15:58 +04:00
2025-01-24 17:49:31 +04:00
appChatQueryPlans :: FilePath
appChatQueryPlans = "src/Simplex/Chat/Store/SQLite/Migrations/chat_query_plans.txt"
appAgentQueryPlans :: FilePath
appAgentQueryPlans = "src/Simplex/Chat/Store/SQLite/Migrations/agent_query_plans.txt"
testSchema :: FilePath
testSchema = "tests/tmp/test_agent_schema.sql"
2022-04-05 12:44:22 +04:00
schemaDumpTest :: Spec
schemaDumpTest = do
2022-04-05 12:44:22 +04:00
it "verify and overwrite schema dump" testVerifySchemaDump
2024-10-09 15:15:58 +04:00
it "verify .lint fkey-indexes" testVerifyLintFKeyIndexes
it "verify schema down migrations" testSchemaMigrations
2022-04-05 12:44:22 +04:00
testVerifySchemaDump :: IO ()
testVerifySchemaDump = withTmpFiles $ do
savedSchema <- ifM (doesFileExist appSchema) (readFile appSchema) (pure "")
savedSchema `deepseq` pure ()
void $ createChatStore (DBOpts testDB "" False True TQOff) MCError
getSchema testDB appSchema `shouldReturn` savedSchema
removeFile testDB
2024-10-09 15:15:58 +04:00
testVerifyLintFKeyIndexes :: IO ()
testVerifyLintFKeyIndexes = withTmpFiles $ do
savedLint <- ifM (doesFileExist appLint) (readFile appLint) (pure "")
savedLint `deepseq` pure ()
void $ createChatStore (DBOpts testDB "" False True TQOff) MCError
2024-10-09 15:15:58 +04:00
getLintFKeyIndexes testDB "tests/tmp/chat_lint.sql" `shouldReturn` savedLint
removeFile testDB
testSchemaMigrations :: IO ()
testSchemaMigrations = withTmpFiles $ do
let noDownMigrations = dropWhileEnd (\Migration {down} -> isJust down) Store.migrations
Right st <- createDBStore (DBOpts testDB "" False True TQOff) noDownMigrations MCError
mapM_ (testDownMigration st) $ drop (length noDownMigrations) Store.migrations
closeDBStore st
removeFile testDB
2023-03-27 19:39:22 +01:00
whenM (doesFileExist testSchema) $ removeFile testSchema
where
testDownMigration st m = do
putStrLn $ "down migration " <> name m
let downMigr = fromJust $ toDownMigration m
schema <- getSchema testDB testSchema
Migrations.run st True $ MTRUp [m]
schema' <- getSchema testDB testSchema
unless (name m `elem` skipComparisonForUpMigrations) $
schema' `shouldNotBe` schema
Migrations.run st True $ MTRDown [downMigr]
unless (name m `elem` skipComparisonForDownMigrations) $ do
schema'' <- getSchema testDB testSchema
schema'' `shouldBe` schema
Migrations.run st True $ MTRUp [m]
schema''' <- getSchema testDB testSchema
schema''' `shouldBe` schema'
skipComparisonForUpMigrations :: [String]
skipComparisonForUpMigrations =
[ -- schema doesn't change
"20250129_delete_unused_contacts"
]
skipComparisonForDownMigrations :: [String]
skipComparisonForDownMigrations =
[ -- on down migration msg_delivery_events table moves down to the end of the file
"20230504_recreate_msg_delivery_events_cleanup_messages",
-- on down migration idx_chat_items_timed_delete_at index moves down to the end of the file
"20230529_indexes",
-- table and index definitions move down the file, so fields are re-created as not unique
"20230914_member_probes",
-- on down migration idx_connections_via_contact_uri_hash index moves down to the end of the file
core: support batch sending in groups, batch introductions; send recent message history to new members (#3519) * core: batch send stubs, comments * multiple events in ChatMessage and supporting types * Revert "multiple events in ChatMessage and supporting types" This reverts commit 9b239b26ba5c8fdec41c6689a6421baf7ffcc27d. * schema, refactor group processing for batched messages * encoding, refactor processing * refactor code to work with updated schema * encoding, remove instances * wip * implement batching * batch introductions * wip * collect and send message history * missing new line * rename * test * rework to build history via chat items * refactor, tests * correctly set member version range, dont include deleted items * tests * fix disappearing messages * check number of errors * comment * check size in encodeChatMessage * fix - don't check msg size for binary * use builder * rename * rename * rework batching * lazy msg body * use withStoreBatch * refactor * reverse batches * comment * possibly fix builder for single msg * refactor batcher * refactor * dont repopulate msg_deliveries on down migration * EncodedChatMessage type * remove type * batcher tests * add tests * group history preference * test group link * fix tests * fix for random update * add test testImageFitsSingleBatch * refactor * rename function * refactor * mconcat * rename feature * catch error on each batch * refactor file inv retrieval * refactor gathering item forward events * refactor message batching * unite migrations * move files * refactor * Revert "unite migrations" This reverts commit 0be7a3117a2b4eb7f13f1ff639188bb3ff826af8. * refactor splitFileDescr * improve tests * Revert "dont repopulate msg_deliveries on down migration" This reverts commit 2944c1cc28acf85282a85d8458c67cefb7787ac7. * fix down migration --------- Co-authored-by: Evgeny Poberezkin <2769109+epoberezkin@users.noreply.github.com>
2023-12-23 17:07:23 +04:00
"20231019_indexes",
-- table and indexes move down to the end of the file
"20231215_recreate_msg_deliveries",
-- on down migration idx_msg_deliveries_agent_ack_cmd_id index moves down to the end of the file
core: pagination API to load items around defined or the earliest unread item (#5100) * core: auto increment chat item ids (#5088) * core: auto increment chat item ids * file name * down name * update schema * ignore down migration on schema dump test * fix testDirectMessageDelete test * fix testNotes test * core: initial api support for items around a given item (#5092) * core: initial api support for items around a given item * implementation and tests for local messages * pass entities down * unused * getAllChatItems implementation and tests * pagination for getting chat and tests * remove unused import * group implementation and tests * refactor * order by created at for local and direct chats * core: initial landing api for chat and gaps (#5104) * initial work on initial param for loading chat * support for initial * controller parse * fixed sqls * refactor names * fix ChatLandingSection serialized type * total accuracy on landing section * descriptive view message * foldr * refactor to make landingSection reusable * refactor: use foldr everywhere * propagate search * Revert "propagate search" This reverts commit 01611fd7197c135639db2a869d96d7621ba093ee. * throw when search is sent for initial * gap size wip (needs testing) * final * remove order by * remove index --------- Co-authored-by: spaced4ndy <8711996+spaced4ndy@users.noreply.github.com> * core: fix initial api latest chat items ordering (#5151) * core: fix one item missing from latest in initial and wrong check (#5153) * core: fix one item missing from latest in initial and wrong check * final fixes and tests * clearer tests * core: remove gaps and make sure page size is always the same (#5163) * remove gaps * consistent pagination size * proper fix and around fix too * optimize * refactor * core: simplify pagination * core: first unread queries (#5174) * core: pagination nav info (#5175) * core: pagination nav info * wip * rework * rework * group, local * fix * rename * fix tests * just --------- Co-authored-by: spaced4ndy <8711996+spaced4ndy@users.noreply.github.com> Co-authored-by: Evgeny Poberezkin <evgeny@poberezkin.com>
2024-11-14 08:34:25 +00:00
"20240313_drop_agent_ack_cmd_id",
2024-11-25 18:51:49 +04:00
-- sequence table moves down to the end of the file
"20241023_chat_item_autoincrement_id",
-- indexes move down to the end of the file
"20241125_indexes",
-- indexes move down to the end of the file
"20250130_indexes",
-- index moves down to the end of the file
"20250227_member_acceptance"
]
getSchema :: FilePath -> FilePath -> IO String
2024-10-09 15:15:58 +04:00
getSchema dbPath schemaPath = do
void $ readCreateProcess (shell $ "sqlite3 " <> dbPath <> " '.schema --indent' > " <> schemaPath) ""
sch <- readFile schemaPath
sch `deepseq` pure sch
2024-10-09 15:15:58 +04:00
getLintFKeyIndexes :: FilePath -> FilePath -> IO String
getLintFKeyIndexes dbPath lintPath = do
void $ readCreateProcess (shell $ "sqlite3 " <> dbPath <> " '.lint fkey-indexes' > " <> lintPath) ""
lint <- readFile lintPath
lint `deepseq` pure lint
saveQueryPlans :: SpecWith TestParams
2025-01-24 17:49:31 +04:00
saveQueryPlans = it "verify and overwrite query plans" $ \TestParams {chatQueryStats, agentQueryStats} -> do
(chatSavedPlans, chatSavedPlans') <-
updatePlans
appChatQueryPlans
chatQueryStats
(createChatStore (DBOpts testDB "" False True TQOff) MCError)
(\db -> do
DB.execute_ db "CREATE TABLE IF NOT EXISTS temp_conn_ids (conn_id BLOB)"
DB.execute_ db "CREATE TABLE IF NOT EXISTS temp_delete_members (contact_profile_id INTEGER, member_profile_id INTEGER, local_display_name TEXT)"
)
2025-01-24 17:49:31 +04:00
(agentSavedPlans, agentSavedPlans') <-
updatePlans
appAgentQueryPlans
agentQueryStats
(createAgentStore (DBOpts testAgentDB "" False True TQOff) MCError)
(const $ pure ())
chatSavedPlans' == chatSavedPlans `shouldBe` True
agentSavedPlans' == agentSavedPlans `shouldBe` True
2025-01-24 17:49:31 +04:00
removeFile testDB
removeFile testAgentDB
where
2025-01-24 17:49:31 +04:00
updatePlans plansFile statsSel createStore prepareStore = do
savedPlans <- ifM (doesFileExist plansFile) (T.readFile plansFile) (pure "")
savedPlans `deepseq` pure ()
queries <- sort . M.keys <$> readTVarIO statsSel
Right st <- createStore
plans' <- withConnection st $ \db -> do
void $ prepareStore db
mapM (getQueryPlan db) queries
let savedPlans' = T.unlines plans'
T.writeFile plansFile savedPlans'
pure (savedPlans, savedPlans')
getQueryPlan :: DB.Connection -> Query -> IO Text
getQueryPlan db q =
(("Query: " <> fromQuery q) <>) . result <$> E.try (DB.query_ db $ "explain query plan " <> q)
result = \case
Right r -> "\nPlan:\n" <> T.unlines (map planDetail r)
Left (e :: E.SomeException) -> "\nError: " <> tshow e <> "\n"
planDetail :: (Int, Int, Int, Text) -> Text
planDetail (_, _, _, detail) = detail