From 58b8b687ad578f1ee305ae0e549a6198928582e3 Mon Sep 17 00:00:00 2001 From: Patrick Date: Sat, 2 Nov 2024 09:44:33 +0800 Subject: [PATCH 01/87] Refactor session loading to manage pending files so we can batch load them to improve performance fix #4381 --- .../session-loader/Development/IDE/Session.hs | 55 +++++++++++-------- 1 file changed, 33 insertions(+), 22 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index a1d778ab0e..dab01c982f 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -424,7 +424,7 @@ getHieDbLoc dir = do loadSessionWithOptions :: Recorder (WithPriority Log) -> SessionLoadingOptions -> FilePath -> TQueue (IO ()) -> IO (Action IdeGhcSession) loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let toAbsolutePath = toAbsolute rootDir -- see Note [Root Directory] - cradle_files <- newIORef [] + cradle_files <- newIORef (Set.fromList []) -- Mapping from hie.yaml file to HscEnv, one per hie.yaml file hscEnvs <- newVar Map.empty :: IO (Var HieMap) -- Mapping from a Filepath to HscEnv @@ -434,6 +434,8 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do -- they are inconsistent. So, everywhere you modify 'fileToFlags', -- you have to modify 'filesMap' as well. filesMap <- newVar HM.empty :: IO (Var FilesMap) + pendingFilesTQueue <- newTQueueIO + -- Pending files waiting to be loaded -- Version of the mappings above version <- newVar 0 biosSessionLoadingVar <- newVar Nothing :: IO (Var (Maybe SessionLoadingPreferenceConfig)) @@ -550,7 +552,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let session :: (Maybe FilePath, NormalizedFilePath, ComponentOptions, FilePath) - -> IO (IdeResult HscEnvEq,[FilePath]) + -> IO ((IdeResult HscEnvEq,[FilePath]), HashSet FilePath) session args@(hieYaml, _cfp, _opts, _libDir) = do (new_deps, old_deps) <- packageSetup args @@ -562,13 +564,15 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let new_cache = newComponentCache recorder optExtensions _cfp hscEnv all_target_details <- new_cache old_deps new_deps + let flags_map' = HM.fromList (concatMap toFlagsMap all_targets') + all_targets' = concat all_target_details + newLoaded = HM.keys flags_map' this_dep_info <- getDependencyInfo $ maybeToList hieYaml let (all_targets, this_flags_map, this_options) = case HM.lookup _cfp flags_map' of Just this -> (all_targets', flags_map', this) Nothing -> (this_target_details : all_targets', HM.insert _cfp this_flags flags_map', this_flags) - where all_targets' = concat all_target_details - flags_map' = HM.fromList (concatMap toFlagsMap all_targets') + where this_target_details = TargetDetails (TargetFile _cfp) this_error_env this_dep_info [_cfp] this_flags = (this_error_env, this_dep_info) this_error_env = ([this_error], Nothing) @@ -580,27 +584,27 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do void $ modifyVar' fileToFlags $ Map.insert hieYaml this_flags_map void $ modifyVar' filesMap $ flip HM.union (HM.fromList (map ((,hieYaml) . fst) $ concatMap toFlagsMap all_targets)) + -- Typecheck all files in the project on startup + checkProject <- getCheckProject -- The VFS doesn't change on cradle edits, re-use the old one. -- Invalidate all the existing GhcSession build nodes by restarting the Shake session - keys2 <- invalidateShakeCache restartShakeSession VFSUnmodified "new component" [] $ do + keys2 <- invalidateShakeCache keys1 <- extendKnownTargets all_targets + unless (null new_deps || not checkProject) $ do + cfps' <- liftIO $ filterM (IO.doesFileExist . fromNormalizedFilePath) (concatMap targetLocations all_targets) + void $ shakeEnqueue extras $ mkDelayedAction "InitialLoad" Debug $ void $ do + mmt <- uses GetModificationTime cfps' + let cs_exist = catMaybes (zipWith (<$) cfps' mmt) + modIfaces <- uses GetModIface cs_exist + -- update exports map + shakeExtras <- getShakeExtras + let !exportsMap' = createExportsMap $ mapMaybe (fmap hirModIface) modIfaces + liftIO $ atomically $ modifyTVar' (exportsMap shakeExtras) (exportsMap' <>) return [keys1, keys2] - -- Typecheck all files in the project on startup - checkProject <- getCheckProject - unless (null new_deps || not checkProject) $ do - cfps' <- liftIO $ filterM (IO.doesFileExist . fromNormalizedFilePath) (concatMap targetLocations all_targets) - void $ shakeEnqueue extras $ mkDelayedAction "InitialLoad" Debug $ void $ do - mmt <- uses GetModificationTime cfps' - let cs_exist = catMaybes (zipWith (<$) cfps' mmt) - modIfaces <- uses GetModIface cs_exist - -- update exports map - shakeExtras <- getShakeExtras - let !exportsMap' = createExportsMap $ mapMaybe (fmap hirModIface) modIfaces - liftIO $ atomically $ modifyTVar' (exportsMap shakeExtras) (exportsMap' <>) - - return $ second Map.keys this_options + + return $ (second Map.keys this_options, Set.fromList $ fromNormalizedFilePath <$> newLoaded) let consultCradle :: Maybe FilePath -> FilePath -> IO (IdeResult HscEnvEq, [FilePath]) consultCradle hieYaml cfp = do @@ -615,11 +619,13 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do -- Display a user friendly progress message here: They probably don't know what a cradle is let progMsg = "Setting up " <> T.pack (takeBaseName (cradleRootDir cradle)) <> " (for " <> T.pack lfpLog <> ")" + + pendingFiles <- Set.fromList <$> (atomically $ flushTQueue pendingFilesTQueue) eopts <- mRunLspTCallback lspEnv (\act -> withIndefiniteProgress progMsg Nothing NotCancellable (const act)) $ withTrace "Load cradle" $ \addTag -> do addTag "file" lfpLog old_files <- readIORef cradle_files - res <- cradleToOptsAndLibDir recorder (sessionLoading clientConfig) cradle cfp old_files + res <- cradleToOptsAndLibDir recorder (sessionLoading clientConfig) cradle cfp (Set.toList $ pendingFiles <> old_files) addTag "result" (show res) return res @@ -633,8 +639,12 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do [] -> error $ "GHC version could not be parsed: " <> version ((runTime, _):_) | compileTime == runTime -> do - atomicModifyIORef' cradle_files (\xs -> (cfp:xs,())) - session (hieYaml, toNormalizedFilePath' cfp, opts, libDir) + (results, newLoaded) <- session (hieYaml, toNormalizedFilePath' cfp, opts, libDir) + -- put back to pending que if not listed in the results + let remainPendingFiles = Set.delete cfp $ pendingFiles `Set.difference` newLoaded + atomically $ forM_ remainPendingFiles (writeTQueue pendingFilesTQueue) + atomicModifyIORef' cradle_files (\xs -> (pendingFiles `Set.intersection` newLoaded <> xs,())) + return results | otherwise -> return (([renderPackageSetupException cfp GhcVersionMismatch{..}], Nothing),[]) -- Failure case, either a cradle error or the none cradle Left err -> do @@ -708,6 +718,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do return (([renderPackageSetupException file e], Nothing), maybe [] pure hieYaml) returnWithVersion $ \file -> do + atomically $ writeTQueue pendingFilesTQueue $ toAbsolutePath file -- see Note [Serializing runs in separate thread] awaitRunInThread que $ getOptions file From ea002d7ef8f2c8be8663e2689bced7e67b8884ac Mon Sep 17 00:00:00 2001 From: Patrick Date: Sun, 3 Nov 2024 15:28:16 +0800 Subject: [PATCH 02/87] distribute errors to all pending files are being loading --- .../session-loader/Development/IDE/Session.hs | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index dab01c982f..8683b5ada1 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -425,6 +425,7 @@ loadSessionWithOptions :: Recorder (WithPriority Log) -> SessionLoadingOptions - loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let toAbsolutePath = toAbsolute rootDir -- see Note [Root Directory] cradle_files <- newIORef (Set.fromList []) +-- error_loading_files <- newIORef (Set.fromList []) -- Mapping from hie.yaml file to HscEnv, one per hie.yaml file hscEnvs <- newVar Map.empty :: IO (Var HieMap) -- Mapping from a Filepath to HscEnv @@ -606,6 +607,15 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do return $ (second Map.keys this_options, Set.fromList $ fromNormalizedFilePath <$> newLoaded) + let makeError hieYaml cradle err cfp = do + dep_info <- getDependencyInfo (maybeToList hieYaml) + let ncfp = toNormalizedFilePath' cfp + let res = (map (\err' -> renderCradleError err' cradle ncfp) err, Nothing) + void $ modifyVar' fileToFlags $ + Map.insertWith HM.union hieYaml (HM.singleton ncfp (res, dep_info)) + void $ modifyVar' filesMap $ HM.insert ncfp hieYaml + return (fst res) + let consultCradle :: Maybe FilePath -> FilePath -> IO (IdeResult HscEnvEq, [FilePath]) consultCradle hieYaml cfp = do let lfpLog = makeRelative rootDir cfp @@ -648,13 +658,8 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do | otherwise -> return (([renderPackageSetupException cfp GhcVersionMismatch{..}], Nothing),[]) -- Failure case, either a cradle error or the none cradle Left err -> do - dep_info <- getDependencyInfo (maybeToList hieYaml) - let ncfp = toNormalizedFilePath' cfp - let res = (map (\err' -> renderCradleError err' cradle ncfp) err, Nothing) - void $ modifyVar' fileToFlags $ - Map.insertWith HM.union hieYaml (HM.singleton ncfp (res, dep_info)) - void $ modifyVar' filesMap $ HM.insert ncfp hieYaml - return (res, maybe [] pure hieYaml ++ concatMap cradleErrorDependencies err) + errors <- mapM (makeError hieYaml cradle err) $ Set.toList pendingFiles + return ((concat errors, Nothing), maybe [] pure hieYaml ++ concatMap cradleErrorDependencies err) let -- | We allow users to specify a loading strategy. From c78b197000c093e76f5277b7814b81ec32a85564 Mon Sep 17 00:00:00 2001 From: Patrick Date: Sun, 3 Nov 2024 16:15:57 +0800 Subject: [PATCH 03/87] better filter loading files --- cabal.project | 6 ++++++ ghcide/session-loader/Development/IDE/Session.hs | 5 ++++- .../session-loader/Development/IDE/Session/Diagnostics.hs | 2 +- 3 files changed, 11 insertions(+), 2 deletions(-) diff --git a/cabal.project b/cabal.project index 08d743c24e..3cae5e5181 100644 --- a/cabal.project +++ b/cabal.project @@ -6,6 +6,7 @@ packages: ./ghcide ./hls-plugin-api ./hls-test-utils +-- ../hiebios index-state: 2024-10-21T00:00:00Z @@ -46,3 +47,8 @@ constraints: if impl(ghc >= 9.9) -- https://github.com/haskell/haskell-language-server/issues/4324 benchmarks: False + +source-repository-package + type: git + location: https://github.com/soulomoon/hie-bios.git + tag: 7dd19b547ba30b760053b5f796c439cba6ac6a07 diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 8683b5ada1..a4e8678d43 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -658,7 +658,10 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do | otherwise -> return (([renderPackageSetupException cfp GhcVersionMismatch{..}], Nothing),[]) -- Failure case, either a cradle error or the none cradle Left err -> do - errors <- mapM (makeError hieYaml cradle err) $ Set.toList pendingFiles + let failedLoadingFiles = nub $ cfp:concatMap cradleErrorLoadingFiles err + let remainPendingFiles = Set.delete cfp $ pendingFiles `Set.difference` Set.fromList failedLoadingFiles + atomically $ forM_ remainPendingFiles (writeTQueue pendingFilesTQueue) + errors <- mapM (makeError hieYaml cradle err) $ failedLoadingFiles return ((concat errors, Nothing), maybe [] pure hieYaml ++ concatMap cradleErrorDependencies err) let diff --git a/ghcide/session-loader/Development/IDE/Session/Diagnostics.hs b/ghcide/session-loader/Development/IDE/Session/Diagnostics.hs index a8e35e5965..ac18ff2025 100644 --- a/ghcide/session-loader/Development/IDE/Session/Diagnostics.hs +++ b/ghcide/session-loader/Development/IDE/Session/Diagnostics.hs @@ -27,7 +27,7 @@ data CradleErrorDetails = Depicts the cradle error in a user-friendly way. -} renderCradleError :: CradleError -> Cradle a -> NormalizedFilePath -> FileDiagnostic -renderCradleError (CradleError deps _ec ms) cradle nfp +renderCradleError (CradleError deps _ec ms _fps) cradle nfp | HieBios.isCabalCradle cradle = let (fp, showDiag, diag) = ideErrorWithSource (Just "cradle") (Just DiagnosticSeverity_Error) nfp $ T.unlines $ map T.pack userFriendlyMessage in (fp, showDiag, diag{_data_ = Just $ Aeson.toJSON CradleErrorDetails{cabalProjectFiles=absDeps}}) From b87937580e8239024b58d3013cb12f38ec50d0d8 Mon Sep 17 00:00:00 2001 From: Patrick Date: Mon, 4 Nov 2024 21:20:25 +0800 Subject: [PATCH 04/87] fallback to non-batch load --- cabal.project | 6 -- .../session-loader/Development/IDE/Session.hs | 64 ++++++++++++------- .../Development/IDE/Session/Diagnostics.hs | 2 +- 3 files changed, 43 insertions(+), 29 deletions(-) diff --git a/cabal.project b/cabal.project index 3cae5e5181..08d743c24e 100644 --- a/cabal.project +++ b/cabal.project @@ -6,7 +6,6 @@ packages: ./ghcide ./hls-plugin-api ./hls-test-utils --- ../hiebios index-state: 2024-10-21T00:00:00Z @@ -47,8 +46,3 @@ constraints: if impl(ghc >= 9.9) -- https://github.com/haskell/haskell-language-server/issues/4324 benchmarks: False - -source-repository-package - type: git - location: https://github.com/soulomoon/hie-bios.git - tag: 7dd19b547ba30b760053b5f796c439cba6ac6a07 diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index a4e8678d43..1dc4135923 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -146,10 +146,13 @@ data Log | LogNewComponentCache !(([FileDiagnostic], Maybe HscEnvEq), DependencyInfo) | LogHieBios HieBios.Log | LogSessionLoadingChanged + | LogSessionNewLoadedFiles ![FilePath] deriving instance Show Log instance Pretty Log where pretty = \case + LogSessionNewLoadedFiles files -> + "New loaded files:" <+> pretty files LogNoneCradleFound path -> "None cradle found for" <+> pretty path <+> ", ignoring the file" LogSettingInitialDynFlags -> @@ -425,7 +428,7 @@ loadSessionWithOptions :: Recorder (WithPriority Log) -> SessionLoadingOptions - loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let toAbsolutePath = toAbsolute rootDir -- see Note [Root Directory] cradle_files <- newIORef (Set.fromList []) --- error_loading_files <- newIORef (Set.fromList []) + error_loading_files <- newIORef (Set.fromList []) -- Mapping from hie.yaml file to HscEnv, one per hie.yaml file hscEnvs <- newVar Map.empty :: IO (Var HieMap) -- Mapping from a Filepath to HscEnv @@ -603,19 +606,8 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let !exportsMap' = createExportsMap $ mapMaybe (fmap hirModIface) modIfaces liftIO $ atomically $ modifyTVar' (exportsMap shakeExtras) (exportsMap' <>) return [keys1, keys2] - - return $ (second Map.keys this_options, Set.fromList $ fromNormalizedFilePath <$> newLoaded) - let makeError hieYaml cradle err cfp = do - dep_info <- getDependencyInfo (maybeToList hieYaml) - let ncfp = toNormalizedFilePath' cfp - let res = (map (\err' -> renderCradleError err' cradle ncfp) err, Nothing) - void $ modifyVar' fileToFlags $ - Map.insertWith HM.union hieYaml (HM.singleton ncfp (res, dep_info)) - void $ modifyVar' filesMap $ HM.insert ncfp hieYaml - return (fst res) - let consultCradle :: Maybe FilePath -> FilePath -> IO (IdeResult HscEnvEq, [FilePath]) consultCradle hieYaml cfp = do let lfpLog = makeRelative rootDir cfp @@ -630,12 +622,19 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let progMsg = "Setting up " <> T.pack (takeBaseName (cradleRootDir cradle)) <> " (for " <> T.pack lfpLog <> ")" - pendingFiles <- Set.fromList <$> (atomically $ flushTQueue pendingFilesTQueue) + pendingFiles' <- Set.fromList <$> (atomically $ flushTQueue pendingFilesTQueue) + -- remove the file from error loading files + errorFiles <- readIORef error_loading_files + -- remove error files from pending files since error loading need to load one by one + let pendingFiles = pendingFiles' `Set.difference` errorFiles + -- if the file is in error loading files, we fall back to single loading mode + let toLoads = if cfp `Set.member` errorFiles then Set.empty else pendingFiles + eopts <- mRunLspTCallback lspEnv (\act -> withIndefiniteProgress progMsg Nothing NotCancellable (const act)) $ withTrace "Load cradle" $ \addTag -> do addTag "file" lfpLog old_files <- readIORef cradle_files - res <- cradleToOptsAndLibDir recorder (sessionLoading clientConfig) cradle cfp (Set.toList $ pendingFiles <> old_files) + res <- cradleToOptsAndLibDir recorder (sessionLoading clientConfig) cradle cfp (Set.toList $ Set.delete cfp $ toLoads <> old_files) addTag "result" (show res) return res @@ -649,20 +648,37 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do [] -> error $ "GHC version could not be parsed: " <> version ((runTime, _):_) | compileTime == runTime -> do - (results, newLoaded) <- session (hieYaml, toNormalizedFilePath' cfp, opts, libDir) + (results, allNewLoaded) <- session (hieYaml, toNormalizedFilePath' cfp, opts, libDir) -- put back to pending que if not listed in the results - let remainPendingFiles = Set.delete cfp $ pendingFiles `Set.difference` newLoaded + -- delete cfp even if ew report No cradle target found for cfp + let remainPendingFiles = Set.delete cfp $ pendingFiles `Set.difference` allNewLoaded + let newLoadedT = pendingFiles `Set.intersection` allNewLoaded atomically $ forM_ remainPendingFiles (writeTQueue pendingFilesTQueue) - atomicModifyIORef' cradle_files (\xs -> (pendingFiles `Set.intersection` newLoaded <> xs,())) + -- log new loaded files + logWith recorder Info $ LogSessionNewLoadedFiles $ Set.toList newLoadedT + atomicModifyIORef' cradle_files (\xs -> (newLoadedT <> xs,())) + atomicModifyIORef' error_loading_files (\old -> (old `Set.difference` newLoadedT, ())) return results | otherwise -> return (([renderPackageSetupException cfp GhcVersionMismatch{..}], Nothing),[]) -- Failure case, either a cradle error or the none cradle Left err -> do - let failedLoadingFiles = nub $ cfp:concatMap cradleErrorLoadingFiles err - let remainPendingFiles = Set.delete cfp $ pendingFiles `Set.difference` Set.fromList failedLoadingFiles - atomically $ forM_ remainPendingFiles (writeTQueue pendingFilesTQueue) - errors <- mapM (makeError hieYaml cradle err) $ failedLoadingFiles - return ((concat errors, Nothing), maybe [] pure hieYaml ++ concatMap cradleErrorDependencies err) + if (length toLoads > 1) + then do + succLoaded_files <- readIORef cradle_files + -- mark as less loaded files as failedLoadingFiles possible + let failedLoadingFiles = (Set.insert cfp toLoads) `Set.difference` succLoaded_files + atomicModifyIORef' error_loading_files (\xs -> (failedLoadingFiles <> xs,())) + -- retry without other files + atomically $ forM_ pendingFiles (writeTQueue pendingFilesTQueue) + consultCradle hieYaml cfp + else do + dep_info <- getDependencyInfo (maybeToList hieYaml) + let ncfp = toNormalizedFilePath' cfp + let res = (map (\err' -> renderCradleError err' cradle ncfp) err, Nothing) + void $ modifyVar' fileToFlags $ + Map.insertWith HM.union hieYaml (HM.singleton ncfp (res, dep_info)) + void $ modifyVar' filesMap $ HM.insert ncfp hieYaml + return (res, maybe [] pure hieYaml ++ concatMap cradleErrorDependencies err) let -- | We allow users to specify a loading strategy. @@ -703,6 +719,10 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do deps_ok <- checkDependencyInfo old_di if not deps_ok then do + -- todo invoke the action to recompile the file + -- if deps are old, we can try to load the error files again + atomicModifyIORef' error_loading_files (\xs -> (Set.delete file xs,())) + atomicModifyIORef' cradle_files (\xs -> (Set.delete file xs,())) -- If the dependencies are out of date then clear both caches and start -- again. modifyVar_ fileToFlags (const (return Map.empty)) diff --git a/ghcide/session-loader/Development/IDE/Session/Diagnostics.hs b/ghcide/session-loader/Development/IDE/Session/Diagnostics.hs index ac18ff2025..a8e35e5965 100644 --- a/ghcide/session-loader/Development/IDE/Session/Diagnostics.hs +++ b/ghcide/session-loader/Development/IDE/Session/Diagnostics.hs @@ -27,7 +27,7 @@ data CradleErrorDetails = Depicts the cradle error in a user-friendly way. -} renderCradleError :: CradleError -> Cradle a -> NormalizedFilePath -> FileDiagnostic -renderCradleError (CradleError deps _ec ms _fps) cradle nfp +renderCradleError (CradleError deps _ec ms) cradle nfp | HieBios.isCabalCradle cradle = let (fp, showDiag, diag) = ideErrorWithSource (Just "cradle") (Just DiagnosticSeverity_Error) nfp $ T.unlines $ map T.pack userFriendlyMessage in (fp, showDiag, diag{_data_ = Just $ Aeson.toJSON CradleErrorDetails{cabalProjectFiles=absDeps}}) From 8953aec8f4eac9f8c87b6ddf955eeb383ebcf959 Mon Sep 17 00:00:00 2001 From: Patrick Date: Mon, 4 Nov 2024 21:23:51 +0800 Subject: [PATCH 05/87] typo --- ghcide/session-loader/Development/IDE/Session.hs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 1dc4135923..9eac2ce279 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -665,7 +665,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do if (length toLoads > 1) then do succLoaded_files <- readIORef cradle_files - -- mark as less loaded files as failedLoadingFiles possible + -- mark as less loaded files as failedLoadingFiles as possible let failedLoadingFiles = (Set.insert cfp toLoads) `Set.difference` succLoaded_files atomicModifyIORef' error_loading_files (\xs -> (failedLoadingFiles <> xs,())) -- retry without other files From 4bdc2c87c8aead0b14a988e9c0b19b8d2d735558 Mon Sep 17 00:00:00 2001 From: Patrick Date: Mon, 4 Nov 2024 21:24:59 +0800 Subject: [PATCH 06/87] update inline doc --- ghcide/session-loader/Development/IDE/Session.hs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 9eac2ce279..7df8fc0240 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -623,7 +623,6 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do <> " (for " <> T.pack lfpLog <> ")" pendingFiles' <- Set.fromList <$> (atomically $ flushTQueue pendingFilesTQueue) - -- remove the file from error loading files errorFiles <- readIORef error_loading_files -- remove error files from pending files since error loading need to load one by one let pendingFiles = pendingFiles' `Set.difference` errorFiles @@ -656,6 +655,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do atomically $ forM_ remainPendingFiles (writeTQueue pendingFilesTQueue) -- log new loaded files logWith recorder Info $ LogSessionNewLoadedFiles $ Set.toList newLoadedT + -- remove the file from error loading files atomicModifyIORef' cradle_files (\xs -> (newLoadedT <> xs,())) atomicModifyIORef' error_loading_files (\old -> (old `Set.difference` newLoadedT, ())) return results From c4bb53a267c5173394ce330f33e84d6da497541a Mon Sep 17 00:00:00 2001 From: Patrick Date: Mon, 4 Nov 2024 21:26:22 +0800 Subject: [PATCH 07/87] update inline doc --- ghcide/session-loader/Development/IDE/Session.hs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 7df8fc0240..70a882b337 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -438,8 +438,8 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do -- they are inconsistent. So, everywhere you modify 'fileToFlags', -- you have to modify 'filesMap' as well. filesMap <- newVar HM.empty :: IO (Var FilesMap) - pendingFilesTQueue <- newTQueueIO -- Pending files waiting to be loaded + pendingFilesTQueue <- newTQueueIO -- Version of the mappings above version <- newVar 0 biosSessionLoadingVar <- newVar Nothing :: IO (Var (Maybe SessionLoadingPreferenceConfig)) From 112bc951555bf0c1e542ad05586457d351e079af Mon Sep 17 00:00:00 2001 From: Patrick Date: Mon, 4 Nov 2024 21:36:03 +0800 Subject: [PATCH 08/87] add LogSessionReloadOnError to log errors during file reloads; cleanup error loading and cradle files --- ghcide/session-loader/Development/IDE/Session.hs | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 70a882b337..793c6b3669 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -147,10 +147,13 @@ data Log | LogHieBios HieBios.Log | LogSessionLoadingChanged | LogSessionNewLoadedFiles ![FilePath] + | LogSessionReloadOnError FilePath ![FilePath] deriving instance Show Log instance Pretty Log where pretty = \case + LogSessionReloadOnError path files -> + "Reloading file due to error in" <+> pretty path <+> "with files:" <+> pretty files LogSessionNewLoadedFiles files -> "New loaded files:" <+> pretty files LogNoneCradleFound path -> @@ -649,14 +652,14 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do | compileTime == runTime -> do (results, allNewLoaded) <- session (hieYaml, toNormalizedFilePath' cfp, opts, libDir) -- put back to pending que if not listed in the results - -- delete cfp even if ew report No cradle target found for cfp + -- delete cfp even if we report No cradle target found for the cfp let remainPendingFiles = Set.delete cfp $ pendingFiles `Set.difference` allNewLoaded let newLoadedT = pendingFiles `Set.intersection` allNewLoaded atomically $ forM_ remainPendingFiles (writeTQueue pendingFilesTQueue) -- log new loaded files logWith recorder Info $ LogSessionNewLoadedFiles $ Set.toList newLoadedT - -- remove the file from error loading files atomicModifyIORef' cradle_files (\xs -> (newLoadedT <> xs,())) + -- remove the file from error loading files atomicModifyIORef' error_loading_files (\old -> (old `Set.difference` newLoadedT, ())) return results | otherwise -> return (([renderPackageSetupException cfp GhcVersionMismatch{..}], Nothing),[]) @@ -711,6 +714,9 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do modifyVar_ filesMap (const (return HM.empty)) -- Don't even keep the name cache, we start from scratch here! modifyVar_ hscEnvs (const (return Map.empty)) + -- cleanup error loading files and cradle files + atomicModifyIORef' error_loading_files (\_ -> (Set.empty,())) + atomicModifyIORef' cradle_files (\_ -> (Set.empty,())) v <- Map.findWithDefault HM.empty hieYaml <$> readVar fileToFlags let cfp = toAbsolutePath file From 6e04d289fe57145153128b44bf1aacb42992456b Mon Sep 17 00:00:00 2001 From: Patrick Date: Mon, 4 Nov 2024 23:35:57 +0800 Subject: [PATCH 09/87] refactor loadSessionWithOptions to improve error handling and clarify variable names --- ghcide/session-loader/Development/IDE/Session.hs | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 793c6b3669..bcf29f85b4 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -630,13 +630,13 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do -- remove error files from pending files since error loading need to load one by one let pendingFiles = pendingFiles' `Set.difference` errorFiles -- if the file is in error loading files, we fall back to single loading mode - let toLoads = if cfp `Set.member` errorFiles then Set.empty else pendingFiles + let extraToLoads = if cfp `Set.member` errorFiles then Set.empty else pendingFiles eopts <- mRunLspTCallback lspEnv (\act -> withIndefiniteProgress progMsg Nothing NotCancellable (const act)) $ withTrace "Load cradle" $ \addTag -> do addTag "file" lfpLog old_files <- readIORef cradle_files - res <- cradleToOptsAndLibDir recorder (sessionLoading clientConfig) cradle cfp (Set.toList $ Set.delete cfp $ toLoads <> old_files) + res <- cradleToOptsAndLibDir recorder (sessionLoading clientConfig) cradle cfp (Set.toList $ Set.delete cfp $ extraToLoads <> old_files) addTag "result" (show res) return res @@ -660,16 +660,16 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do logWith recorder Info $ LogSessionNewLoadedFiles $ Set.toList newLoadedT atomicModifyIORef' cradle_files (\xs -> (newLoadedT <> xs,())) -- remove the file from error loading files - atomicModifyIORef' error_loading_files (\old -> (old `Set.difference` newLoadedT, ())) + atomicModifyIORef' error_loading_files (\old -> (old `Set.difference` allNewLoaded, ())) return results | otherwise -> return (([renderPackageSetupException cfp GhcVersionMismatch{..}], Nothing),[]) -- Failure case, either a cradle error or the none cradle Left err -> do - if (length toLoads > 1) + if (not $ null extraToLoads) then do succLoaded_files <- readIORef cradle_files -- mark as less loaded files as failedLoadingFiles as possible - let failedLoadingFiles = (Set.insert cfp toLoads) `Set.difference` succLoaded_files + let failedLoadingFiles = (Set.insert cfp extraToLoads) `Set.difference` succLoaded_files atomicModifyIORef' error_loading_files (\xs -> (failedLoadingFiles <> xs,())) -- retry without other files atomically $ forM_ pendingFiles (writeTQueue pendingFilesTQueue) @@ -681,6 +681,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do void $ modifyVar' fileToFlags $ Map.insertWith HM.union hieYaml (HM.singleton ncfp (res, dep_info)) void $ modifyVar' filesMap $ HM.insert ncfp hieYaml + atomicModifyIORef' error_loading_files (\xs -> (Set.insert cfp xs,())) return (res, maybe [] pure hieYaml ++ concatMap cradleErrorDependencies err) let From 67aebc42b01d46c9f699cd4a4f045c548c0960c2 Mon Sep 17 00:00:00 2001 From: Patrick Date: Tue, 5 Nov 2024 02:03:00 +0800 Subject: [PATCH 10/87] refactor loadSessionWithOptions to improve pending file handling and error management --- .../session-loader/Development/IDE/Session.hs | 22 +++++++++---------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index bcf29f85b4..cb2571e046 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -625,17 +625,18 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let progMsg = "Setting up " <> T.pack (takeBaseName (cradleRootDir cradle)) <> " (for " <> T.pack lfpLog <> ")" - pendingFiles' <- Set.fromList <$> (atomically $ flushTQueue pendingFilesTQueue) + pendingFiles <- Set.insert cfp . Set.fromList <$> (atomically $ flushTQueue pendingFilesTQueue) errorFiles <- readIORef error_loading_files - -- remove error files from pending files since error loading need to load one by one - let pendingFiles = pendingFiles' `Set.difference` errorFiles + old_files <- readIORef cradle_files -- if the file is in error loading files, we fall back to single loading mode - let extraToLoads = if cfp `Set.member` errorFiles then Set.empty else pendingFiles + let extraToLoads = if cfp `Set.member` errorFiles + then Set.empty + -- remove error files from pending files since error loading need to load one by one + else Set.delete cfp $ pendingFiles `Set.difference` errorFiles eopts <- mRunLspTCallback lspEnv (\act -> withIndefiniteProgress progMsg Nothing NotCancellable (const act)) $ withTrace "Load cradle" $ \addTag -> do addTag "file" lfpLog - old_files <- readIORef cradle_files res <- cradleToOptsAndLibDir recorder (sessionLoading clientConfig) cradle cfp (Set.toList $ Set.delete cfp $ extraToLoads <> old_files) addTag "result" (show res) return res @@ -654,22 +655,21 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do -- put back to pending que if not listed in the results -- delete cfp even if we report No cradle target found for the cfp let remainPendingFiles = Set.delete cfp $ pendingFiles `Set.difference` allNewLoaded - let newLoadedT = pendingFiles `Set.intersection` allNewLoaded + let newLoaded = pendingFiles `Set.intersection` allNewLoaded atomically $ forM_ remainPendingFiles (writeTQueue pendingFilesTQueue) -- log new loaded files - logWith recorder Info $ LogSessionNewLoadedFiles $ Set.toList newLoadedT - atomicModifyIORef' cradle_files (\xs -> (newLoadedT <> xs,())) - -- remove the file from error loading files + logWith recorder Info $ LogSessionNewLoadedFiles $ Set.toList newLoaded + -- remove all new loaded file from error loading files atomicModifyIORef' error_loading_files (\old -> (old `Set.difference` allNewLoaded, ())) + atomicModifyIORef' cradle_files (\xs -> (newLoaded <> xs,())) return results | otherwise -> return (([renderPackageSetupException cfp GhcVersionMismatch{..}], Nothing),[]) -- Failure case, either a cradle error or the none cradle Left err -> do if (not $ null extraToLoads) then do - succLoaded_files <- readIORef cradle_files -- mark as less loaded files as failedLoadingFiles as possible - let failedLoadingFiles = (Set.insert cfp extraToLoads) `Set.difference` succLoaded_files + let failedLoadingFiles = (Set.insert cfp extraToLoads) `Set.difference` old_files atomicModifyIORef' error_loading_files (\xs -> (failedLoadingFiles <> xs,())) -- retry without other files atomically $ forM_ pendingFiles (writeTQueue pendingFilesTQueue) From 98ae44677d0f4295ed2e461b838f5f938e1f4a50 Mon Sep 17 00:00:00 2001 From: Patrick Date: Tue, 5 Nov 2024 02:25:56 +0800 Subject: [PATCH 11/87] add doc about limitation --- ghcide/session-loader/Development/IDE/Session.hs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index cb2571e046..127af00f2d 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -669,6 +669,10 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do if (not $ null extraToLoads) then do -- mark as less loaded files as failedLoadingFiles as possible + -- limitation is that when we are loading files, and the dependencies of old_files + -- are changed, and old_files are not valid anymore. + -- but they will still be in the old_files, and will not move to error_loading_files. + -- And make other files failed to load in batch mode. let failedLoadingFiles = (Set.insert cfp extraToLoads) `Set.difference` old_files atomicModifyIORef' error_loading_files (\xs -> (failedLoadingFiles <> xs,())) -- retry without other files @@ -726,7 +730,6 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do deps_ok <- checkDependencyInfo old_di if not deps_ok then do - -- todo invoke the action to recompile the file -- if deps are old, we can try to load the error files again atomicModifyIORef' error_loading_files (\xs -> (Set.delete file xs,())) atomicModifyIORef' cradle_files (\xs -> (Set.delete file xs,())) From f3eb580d1217f8fde81d2dc334df22482b6588a3 Mon Sep 17 00:00:00 2001 From: Patrick Date: Tue, 5 Nov 2024 16:51:10 +0800 Subject: [PATCH 12/87] absolute file at the beginning --- ghcide/session-loader/Development/IDE/Session.hs | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 127af00f2d..57c9a73024 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -677,6 +677,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do atomicModifyIORef' error_loading_files (\xs -> (failedLoadingFiles <> xs,())) -- retry without other files atomically $ forM_ pendingFiles (writeTQueue pendingFilesTQueue) + logWith recorder Info $ LogSessionReloadOnError cfp (Set.toList pendingFiles) consultCradle hieYaml cfp else do dep_info <- getDependencyInfo (maybeToList hieYaml) @@ -724,8 +725,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do atomicModifyIORef' cradle_files (\_ -> (Set.empty,())) v <- Map.findWithDefault HM.empty hieYaml <$> readVar fileToFlags - let cfp = toAbsolutePath file - case HM.lookup (toNormalizedFilePath' cfp) v of + case HM.lookup (toNormalizedFilePath' file) v of Just (opts, old_di) -> do deps_ok <- checkDependencyInfo old_di if not deps_ok @@ -739,9 +739,9 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do modifyVar_ filesMap (const (return HM.empty)) -- Keep the same name cache modifyVar_ hscEnvs (return . Map.adjust (const []) hieYaml ) - consultCradle hieYaml cfp + consultCradle hieYaml file else return (opts, Map.keys old_di) - Nothing -> consultCradle hieYaml cfp + Nothing -> consultCradle hieYaml file -- The main function which gets options for a file. We only want one of these running -- at a time. Therefore the IORef contains the currently running cradle, if we try @@ -749,16 +749,17 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do -- before attempting to do so. let getOptions :: FilePath -> IO (IdeResult HscEnvEq, [FilePath]) getOptions file = do - let ncfp = toNormalizedFilePath' (toAbsolutePath file) + let ncfp = toNormalizedFilePath' file cachedHieYamlLocation <- HM.lookup ncfp <$> readVar filesMap hieYaml <- cradleLoc file sessionOpts (join cachedHieYamlLocation <|> hieYaml, file) `Safe.catch` \e -> return (([renderPackageSetupException file e], Nothing), maybe [] pure hieYaml) returnWithVersion $ \file -> do - atomically $ writeTQueue pendingFilesTQueue $ toAbsolutePath file + let absFile = toAbsolutePath file + atomically $ writeTQueue pendingFilesTQueue absFile -- see Note [Serializing runs in separate thread] - awaitRunInThread que $ getOptions file + awaitRunInThread que $ getOptions absFile -- | Run the specific cradle on a specific FilePath via hie-bios. -- This then builds dependencies or whatever based on the cradle, gets the From e7bd3d42045fb9680c23f995ff8b98c63a4772c8 Mon Sep 17 00:00:00 2001 From: Patrick Date: Fri, 8 Nov 2024 01:45:56 +0800 Subject: [PATCH 13/87] run session loader and worker in sperate --- ghcide/ghcide.cabal | 2 + .../session-loader/Development/IDE/Session.hs | 137 +++++++++++++----- .../Development/IDE/Session/OrderedSet.hs | 39 +++++ 3 files changed, 141 insertions(+), 37 deletions(-) create mode 100644 ghcide/session-loader/Development/IDE/Session/OrderedSet.hs diff --git a/ghcide/ghcide.cabal b/ghcide/ghcide.cabal index bf88a55ed3..81e33aa2fa 100644 --- a/ghcide/ghcide.cabal +++ b/ghcide/ghcide.cabal @@ -110,6 +110,7 @@ library , unliftio-core , unordered-containers >=0.2.10.0 , vector + , ListT if os(windows) build-depends: Win32 @@ -204,6 +205,7 @@ library Development.IDE.GHC.CPP Development.IDE.GHC.Warnings Development.IDE.Types.Action + Development.IDE.Session.OrderedSet if flag(pedantic) ghc-options: diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 57c9a73024..6cbf6ea370 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -25,6 +25,7 @@ import Control.Exception.Safe as Safe import Control.Monad import Control.Monad.Extra as Extra import Control.Monad.IO.Class +import Control.Monad.Trans.Maybe (MaybeT (MaybeT, runMaybeT)) import qualified Crypto.Hash.SHA1 as H import Data.Aeson hiding (Error) import Data.Bifunctor @@ -103,8 +104,7 @@ import qualified Data.HashSet as Set import qualified Data.Set as OS import Database.SQLite.Simple import Development.IDE.Core.Tracing (withTrace) -import Development.IDE.Core.WorkerThread (awaitRunInThread, - withWorkerQueue) +import Development.IDE.Core.WorkerThread (withWorkerQueue) import qualified Development.IDE.GHC.Compat.Util as Compat import Development.IDE.Session.Diagnostics (renderCradleError) import Development.IDE.Types.Shake (WithHieDb, @@ -119,12 +119,17 @@ import qualified System.Random as Random import System.Random (RandomGen) import Text.ParserCombinators.ReadP (readP_to_S) +import Control.Concurrent.STM (STM) +import qualified Control.Monad.STM as STM +import qualified Development.IDE.Session.OrderedSet as S +import qualified Focus import GHC.Data.Bag import GHC.Driver.Env (hsc_all_home_unit_ids) import GHC.Driver.Errors.Types import GHC.Types.Error (errMsgDiagnostic, singleMessage) import GHC.Unit.State +import qualified StmContainers.Map as STM data Log = LogSettingInitialDynFlags @@ -148,10 +153,14 @@ data Log | LogSessionLoadingChanged | LogSessionNewLoadedFiles ![FilePath] | LogSessionReloadOnError FilePath ![FilePath] + | LogGetOptionsLoop !FilePath + | LogGetSessionRetry !FilePath deriving instance Show Log instance Pretty Log where pretty = \case + LogGetSessionRetry path -> "Retrying get session for" <+> pretty path + LogGetOptionsLoop fp -> "Loop: getOptions for" <+> pretty fp LogSessionReloadOnError path files -> "Reloading file due to error in" <+> pretty path <+> "with files:" <+> pretty files LogSessionNewLoadedFiles files -> @@ -435,14 +444,14 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do -- Mapping from hie.yaml file to HscEnv, one per hie.yaml file hscEnvs <- newVar Map.empty :: IO (Var HieMap) -- Mapping from a Filepath to HscEnv - fileToFlags <- newVar Map.empty :: IO (Var FlagsMap) + fileToFlags <- STM.newIO :: IO FlagsMap -- Mapping from a Filepath to its 'hie.yaml' location. -- Should hold the same Filepaths as 'fileToFlags', otherwise -- they are inconsistent. So, everywhere you modify 'fileToFlags', -- you have to modify 'filesMap' as well. - filesMap <- newVar HM.empty :: IO (Var FilesMap) + filesMap <- STM.newIO :: IO FilesMap -- Pending files waiting to be loaded - pendingFilesTQueue <- newTQueueIO + pendingFileSet <- S.newIO :: IO (S.OrderedSet FilePath) -- Version of the mappings above version <- newVar 0 biosSessionLoadingVar <- newVar Nothing :: IO (Var (Maybe SessionLoadingPreferenceConfig)) @@ -559,7 +568,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let session :: (Maybe FilePath, NormalizedFilePath, ComponentOptions, FilePath) - -> IO ((IdeResult HscEnvEq,[FilePath]), HashSet FilePath) + -> IO ((IdeResult HscEnvEq,DependencyInfo), HashSet FilePath) session args@(hieYaml, _cfp, _opts, _libDir) = do (new_deps, old_deps) <- packageSetup args @@ -589,8 +598,11 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do , "If you are using a .cabal file, please ensure that this module is listed in either the exposed-modules or other-modules section" ] - void $ modifyVar' fileToFlags $ Map.insert hieYaml this_flags_map - void $ modifyVar' filesMap $ flip HM.union (HM.fromList (map ((,hieYaml) . fst) $ concatMap toFlagsMap all_targets)) + let insertAll m xs = mapM_ (flip (uncurry STM.insert) m) xs + atomically $ do + STM.insert this_flags_map hieYaml fileToFlags + insertAll filesMap $ map ((hieYaml,) . fst) $ concatMap toFlagsMap all_targets + -- Typecheck all files in the project on startup checkProject <- getCheckProject -- The VFS doesn't change on cradle edits, re-use the old one. @@ -609,9 +621,9 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let !exportsMap' = createExportsMap $ mapMaybe (fmap hirModIface) modIfaces liftIO $ atomically $ modifyTVar' (exportsMap shakeExtras) (exportsMap' <>) return [keys1, keys2] - return $ (second Map.keys this_options, Set.fromList $ fromNormalizedFilePath <$> newLoaded) + return $ (this_options, Set.fromList $ fromNormalizedFilePath <$> newLoaded) - let consultCradle :: Maybe FilePath -> FilePath -> IO (IdeResult HscEnvEq, [FilePath]) + let consultCradle :: Maybe FilePath -> FilePath -> IO (IdeResult HscEnvEq, DependencyInfo) consultCradle hieYaml cfp = do let lfpLog = makeRelative rootDir cfp logWith recorder Info $ LogCradlePath lfpLog @@ -625,7 +637,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let progMsg = "Setting up " <> T.pack (takeBaseName (cradleRootDir cradle)) <> " (for " <> T.pack lfpLog <> ")" - pendingFiles <- Set.insert cfp . Set.fromList <$> (atomically $ flushTQueue pendingFilesTQueue) + pendingFiles <- Set.insert cfp . Set.fromList <$> (atomically $ S.toUnOrderedList pendingFileSet) errorFiles <- readIORef error_loading_files old_files <- readIORef cradle_files -- if the file is in error loading files, we fall back to single loading mode @@ -652,18 +664,20 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do ((runTime, _):_) | compileTime == runTime -> do (results, allNewLoaded) <- session (hieYaml, toNormalizedFilePath' cfp, opts, libDir) - -- put back to pending que if not listed in the results -- delete cfp even if we report No cradle target found for the cfp - let remainPendingFiles = Set.delete cfp $ pendingFiles `Set.difference` allNewLoaded let newLoaded = pendingFiles `Set.intersection` allNewLoaded - atomically $ forM_ remainPendingFiles (writeTQueue pendingFilesTQueue) + -- delete all new loaded + atomically $ forM_ allNewLoaded $ flip S.delete pendingFileSet -- log new loaded files logWith recorder Info $ LogSessionNewLoadedFiles $ Set.toList newLoaded -- remove all new loaded file from error loading files atomicModifyIORef' error_loading_files (\old -> (old `Set.difference` allNewLoaded, ())) atomicModifyIORef' cradle_files (\xs -> (newLoaded <> xs,())) return results - | otherwise -> return (([renderPackageSetupException cfp GhcVersionMismatch{..}], Nothing),[]) + | otherwise -> do + -- delete cfp from pending files + atomically $ S.delete cfp pendingFileSet + return (([renderPackageSetupException cfp GhcVersionMismatch{..}], Nothing),Map.empty) -- Failure case, either a cradle error or the none cradle Left err -> do if (not $ null extraToLoads) @@ -676,18 +690,19 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let failedLoadingFiles = (Set.insert cfp extraToLoads) `Set.difference` old_files atomicModifyIORef' error_loading_files (\xs -> (failedLoadingFiles <> xs,())) -- retry without other files - atomically $ forM_ pendingFiles (writeTQueue pendingFilesTQueue) logWith recorder Info $ LogSessionReloadOnError cfp (Set.toList pendingFiles) consultCradle hieYaml cfp else do - dep_info <- getDependencyInfo (maybeToList hieYaml) + dep_info <- getDependencyInfo ((maybeToList hieYaml) ++ concatMap cradleErrorDependencies err) let ncfp = toNormalizedFilePath' cfp let res = (map (\err' -> renderCradleError err' cradle ncfp) err, Nothing) - void $ modifyVar' fileToFlags $ - Map.insertWith HM.union hieYaml (HM.singleton ncfp (res, dep_info)) - void $ modifyVar' filesMap $ HM.insert ncfp hieYaml + -- remove cfp from pending files + atomically $ S.delete cfp pendingFileSet + atomically $ do + STM.focus (Focus.insertOrMerge HM.union (HM.singleton ncfp (res, dep_info))) hieYaml fileToFlags + STM.insert hieYaml ncfp filesMap atomicModifyIORef' error_loading_files (\xs -> (Set.insert cfp xs,())) - return (res, maybe [] pure hieYaml ++ concatMap cradleErrorDependencies err) + return (res, dep_info) let -- | We allow users to specify a loading strategy. @@ -710,21 +725,22 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do -- This caches the mapping from hie.yaml + Mod.hs -> [String] -- Returns the Ghc session and the cradle dependencies let sessionOpts :: (Maybe FilePath, FilePath) - -> IO (IdeResult HscEnvEq, [FilePath]) + -> IO (IdeResult HscEnvEq, DependencyInfo) sessionOpts (hieYaml, file) = do Extra.whenM didSessionLoadingPreferenceConfigChange $ do logWith recorder Info LogSessionLoadingChanged -- If the dependencies are out of date then clear both caches and start -- again. - modifyVar_ fileToFlags (const (return Map.empty)) - modifyVar_ filesMap (const (return HM.empty)) + atomically $ do + STM.reset filesMap + STM.reset fileToFlags -- Don't even keep the name cache, we start from scratch here! modifyVar_ hscEnvs (const (return Map.empty)) -- cleanup error loading files and cradle files atomicModifyIORef' error_loading_files (\_ -> (Set.empty,())) atomicModifyIORef' cradle_files (\_ -> (Set.empty,())) - v <- Map.findWithDefault HM.empty hieYaml <$> readVar fileToFlags + v <- atomically $ fromMaybe HM.empty <$> STM.lookup hieYaml fileToFlags case HM.lookup (toNormalizedFilePath' file) v of Just (opts, old_di) -> do deps_ok <- checkDependencyInfo old_di @@ -735,31 +751,77 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do atomicModifyIORef' cradle_files (\xs -> (Set.delete file xs,())) -- If the dependencies are out of date then clear both caches and start -- again. - modifyVar_ fileToFlags (const (return Map.empty)) - modifyVar_ filesMap (const (return HM.empty)) + atomically $ do + STM.reset filesMap + STM.reset fileToFlags -- Keep the same name cache modifyVar_ hscEnvs (return . Map.adjust (const []) hieYaml ) consultCradle hieYaml file - else return (opts, Map.keys old_di) + else return (opts, old_di) Nothing -> consultCradle hieYaml file + let checkInCache ::NormalizedFilePath -> STM (Maybe (IdeResult HscEnvEq, DependencyInfo)) + checkInCache ncfp = runMaybeT $ do + cachedHieYamlLocation <- MaybeT $ STM.lookup ncfp filesMap + m <- MaybeT $ STM.lookup cachedHieYamlLocation fileToFlags + MaybeT $ pure $ HM.lookup ncfp m + -- The main function which gets options for a file. We only want one of these running -- at a time. Therefore the IORef contains the currently running cradle, if we try -- to get some more options then we wait for the currently running action to finish -- before attempting to do so. - let getOptions :: FilePath -> IO (IdeResult HscEnvEq, [FilePath]) + let getOptions :: FilePath -> IO (IdeResult HscEnvEq, DependencyInfo) getOptions file = do let ncfp = toNormalizedFilePath' file - cachedHieYamlLocation <- HM.lookup ncfp <$> readVar filesMap + cachedHieYamlLocation <- atomically $ STM.lookup ncfp filesMap hieYaml <- cradleLoc file - sessionOpts (join cachedHieYamlLocation <|> hieYaml, file) `Safe.catch` \e -> - return (([renderPackageSetupException file e], Nothing), maybe [] pure hieYaml) - + let hieLoc = join cachedHieYamlLocation <|> hieYaml + result <- sessionOpts (hieLoc, file) `Safe.catch` \e -> do + dep <- getDependencyInfo $ maybe [] pure hieYaml + return (([renderPackageSetupException file e], Nothing), dep) + atomically $ STM.focus (Focus.insertOrMerge HM.union (HM.singleton ncfp result)) hieLoc fileToFlags + return result + + let getOptionsLoop :: IO () + getOptionsLoop = do + -- Get the next file to load + absFile <- atomically $ S.readQueue pendingFileSet + logWith recorder Info (LogGetOptionsLoop absFile) + void $ getOptions absFile + getOptionsLoop + + let getSessionRetry :: FilePath -> IO (IdeResult HscEnvEq, DependencyInfo) + getSessionRetry absFile = do + let ncfp = toNormalizedFilePath' absFile + -- check if in the cache + res <- atomically $ checkInCache ncfp + logWith recorder Info $ LogGetSessionRetry absFile + updateDateRes <- case res of + Just r -> do + depOk <- checkDependencyInfo (snd r) + if depOk + then return $ Just r + else return Nothing + _ -> return Nothing + case updateDateRes of + Just r -> return r + Nothing -> do + -- if not ok, we need to reload the session + atomically $ do + S.insert absFile pendingFileSet + atomically $ do + -- wait until pendingFiles is not in pendingFiles + Extra.whenM (S.lookup absFile pendingFileSet) STM.retry + getSessionRetry absFile + + -- Start the getOptionsLoop if the queue is empty + liftIO $ atomically $ Extra.whenM (isEmptyTQueue que) $ writeTQueue que getOptionsLoop returnWithVersion $ \file -> do let absFile = toAbsolutePath file - atomically $ writeTQueue pendingFilesTQueue absFile + second Map.keys <$> getSessionRetry absFile + -- atomically $ writeTQueue pendingFiles absFile -- see Note [Serializing runs in separate thread] - awaitRunInThread que $ getOptions absFile + -- awaitRunInThread que $ second Map.keys <$> getOptions absFile -- | Run the specific cradle on a specific FilePath via hie-bios. -- This then builds dependencies or whatever based on the cradle, gets the @@ -1034,10 +1096,11 @@ setCacheDirs recorder CacheDirs{..} dflags = do type DependencyInfo = Map.Map FilePath (Maybe UTCTime) type HieMap = Map.Map (Maybe FilePath) [RawComponentInfo] -- | Maps a "hie.yaml" location to all its Target Filepaths and options. -type FlagsMap = Map.Map (Maybe FilePath) (HM.HashMap NormalizedFilePath (IdeResult HscEnvEq, DependencyInfo)) +type FlagsMap = STM.Map (Maybe FilePath) (HM.HashMap NormalizedFilePath (IdeResult HscEnvEq, DependencyInfo)) -- | Maps a Filepath to its respective "hie.yaml" location. -- It aims to be the reverse of 'FlagsMap'. -type FilesMap = HM.HashMap NormalizedFilePath (Maybe FilePath) +type FilesMap = STM.Map NormalizedFilePath (Maybe FilePath) + -- This is pristine information about a component data RawComponentInfo = RawComponentInfo diff --git a/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs b/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs new file mode 100644 index 0000000000..e1a5f123c2 --- /dev/null +++ b/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs @@ -0,0 +1,39 @@ +module Development.IDE.Session.OrderedSet where + +import Control.Concurrent.STM (STM, TQueue, newTQueueIO) +import Control.Concurrent.STM.TQueue (readTQueue, writeTQueue) +import Data.Hashable (Hashable) +import qualified ListT as LT +import qualified StmContainers.Set as S +import StmContainers.Set (Set) + + +type OrderedSet a = (TQueue a, Set a) + +insert :: Hashable a => a -> OrderedSet a -> STM () +insert a (que, s) = do + S.insert a s + writeTQueue que a + return () + +newIO :: Hashable a => IO (OrderedSet a) +newIO = do + que <- newTQueueIO + s <- S.newIO + return (que, s) + +readQueue :: Hashable a => OrderedSet a -> STM a +readQueue rs@(que, s) = do + f <- readTQueue que + b <- S.lookup f s + -- retry if the file is already in done + if b then return f else readQueue rs + +lookup :: Hashable a => a -> OrderedSet a -> STM Bool +lookup a (_, s) = S.lookup a s + +delete :: Hashable a => a -> OrderedSet a -> STM () +delete a (_, s) = S.delete a s + +toUnOrderedList :: Hashable a => OrderedSet a -> STM [a] +toUnOrderedList (_, s) = LT.toList $ S.listT s From 1f97c401b5aa5cc86c1e52d397bcd91154662a88 Mon Sep 17 00:00:00 2001 From: Patrick Date: Fri, 8 Nov 2024 02:26:23 +0800 Subject: [PATCH 14/87] cleanup --- .../session-loader/Development/IDE/Session.hs | 24 +++++++++---------- 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 6cbf6ea370..99ca786506 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -790,8 +790,12 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do void $ getOptions absFile getOptionsLoop - let getSessionRetry :: FilePath -> IO (IdeResult HscEnvEq, DependencyInfo) - getSessionRetry absFile = do + -- | Given a file, this function will return the HscEnv and the dependencies + -- it would look up the cache first, if the cache is not available, it would + -- submit a request to the getOptionsLoop to get the options for the file + -- and wait until the options are available + let lookupOrWaitCache :: FilePath -> IO (IdeResult HscEnvEq, DependencyInfo) + lookupOrWaitCache absFile = do let ncfp = toNormalizedFilePath' absFile -- check if in the cache res <- atomically $ checkInCache ncfp @@ -807,21 +811,17 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do Just r -> return r Nothing -> do -- if not ok, we need to reload the session - atomically $ do - S.insert absFile pendingFileSet - atomically $ do - -- wait until pendingFiles is not in pendingFiles - Extra.whenM (S.lookup absFile pendingFileSet) STM.retry - getSessionRetry absFile + atomically $ S.insert absFile pendingFileSet + -- wait until pendingFiles is not in pendingFiles + atomically $ Extra.whenM (S.lookup absFile pendingFileSet) STM.retry + lookupOrWaitCache absFile + -- see Note [Serializing runs in separate thread] -- Start the getOptionsLoop if the queue is empty liftIO $ atomically $ Extra.whenM (isEmptyTQueue que) $ writeTQueue que getOptionsLoop returnWithVersion $ \file -> do let absFile = toAbsolutePath file - second Map.keys <$> getSessionRetry absFile - -- atomically $ writeTQueue pendingFiles absFile - -- see Note [Serializing runs in separate thread] - -- awaitRunInThread que $ second Map.keys <$> getOptions absFile + second Map.keys <$> lookupOrWaitCache absFile -- | Run the specific cradle on a specific FilePath via hie-bios. -- This then builds dependencies or whatever based on the cradle, gets the From 4c998bd487e48dcf85abbb14cc58d217c5dafd6a Mon Sep 17 00:00:00 2001 From: Patrick Date: Fri, 8 Nov 2024 15:24:17 +0800 Subject: [PATCH 15/87] rename LogGetSessionRetry to LogLookupSessionCache for clarity in logging --- ghcide/session-loader/Development/IDE/Session.hs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 99ca786506..c6d2dcbb84 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -154,12 +154,12 @@ data Log | LogSessionNewLoadedFiles ![FilePath] | LogSessionReloadOnError FilePath ![FilePath] | LogGetOptionsLoop !FilePath - | LogGetSessionRetry !FilePath + | LogLookupSessionCache !FilePath deriving instance Show Log instance Pretty Log where pretty = \case - LogGetSessionRetry path -> "Retrying get session for" <+> pretty path + LogLookupSessionCache path -> "Looking up session cache for" <+> pretty path LogGetOptionsLoop fp -> "Loop: getOptions for" <+> pretty fp LogSessionReloadOnError path files -> "Reloading file due to error in" <+> pretty path <+> "with files:" <+> pretty files @@ -799,7 +799,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let ncfp = toNormalizedFilePath' absFile -- check if in the cache res <- atomically $ checkInCache ncfp - logWith recorder Info $ LogGetSessionRetry absFile + logWith recorder Info $ LogLookupSessionCache absFile updateDateRes <- case res of Just r -> do depOk <- checkDependencyInfo (snd r) From 79a43a0cbfa32a226c831a7eb9da0279d9049ab7 Mon Sep 17 00:00:00 2001 From: Patrick Date: Sat, 9 Nov 2024 18:34:13 +0800 Subject: [PATCH 16/87] extract attempt to load files from errors --- cabal.project | 5 +++++ ghcide/session-loader/Development/IDE/Session.hs | 8 +++++--- .../session-loader/Development/IDE/Session/Diagnostics.hs | 2 +- 3 files changed, 11 insertions(+), 4 deletions(-) diff --git a/cabal.project b/cabal.project index 2c872ed46f..2b46365f1f 100644 --- a/cabal.project +++ b/cabal.project @@ -46,3 +46,8 @@ constraints: if impl(ghc >= 9.9) -- https://github.com/haskell/haskell-language-server/issues/4324 benchmarks: False + +source-repository-package + type: git + location: https://github.com/soulomoon/hie-bios.git + tag: 93582c21372af573e5103bad198777a3317a2df2 diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index c6d2dcbb84..f3bbc4d899 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -680,17 +680,19 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do return (([renderPackageSetupException cfp GhcVersionMismatch{..}], Nothing),Map.empty) -- Failure case, either a cradle error or the none cradle Left err -> do - if (not $ null extraToLoads) + let attemptToLoadFiles = (Set.delete cfp $ Set.fromList $ concatMap cradleErrorLoadingFiles err) + `Set.difference` old_files + if (not $ null attemptToLoadFiles) then do -- mark as less loaded files as failedLoadingFiles as possible -- limitation is that when we are loading files, and the dependencies of old_files -- are changed, and old_files are not valid anymore. -- but they will still be in the old_files, and will not move to error_loading_files. -- And make other files failed to load in batch mode. - let failedLoadingFiles = (Set.insert cfp extraToLoads) `Set.difference` old_files + let failedLoadingFiles = (Set.insert cfp attemptToLoadFiles) atomicModifyIORef' error_loading_files (\xs -> (failedLoadingFiles <> xs,())) -- retry without other files - logWith recorder Info $ LogSessionReloadOnError cfp (Set.toList pendingFiles) + logWith recorder Info $ LogSessionReloadOnError cfp (Set.toList attemptToLoadFiles) consultCradle hieYaml cfp else do dep_info <- getDependencyInfo ((maybeToList hieYaml) ++ concatMap cradleErrorDependencies err) diff --git a/ghcide/session-loader/Development/IDE/Session/Diagnostics.hs b/ghcide/session-loader/Development/IDE/Session/Diagnostics.hs index a8e35e5965..8b1136c0c8 100644 --- a/ghcide/session-loader/Development/IDE/Session/Diagnostics.hs +++ b/ghcide/session-loader/Development/IDE/Session/Diagnostics.hs @@ -27,7 +27,7 @@ data CradleErrorDetails = Depicts the cradle error in a user-friendly way. -} renderCradleError :: CradleError -> Cradle a -> NormalizedFilePath -> FileDiagnostic -renderCradleError (CradleError deps _ec ms) cradle nfp +renderCradleError (CradleError deps _ec ms _attemptToLoadFiles) cradle nfp | HieBios.isCabalCradle cradle = let (fp, showDiag, diag) = ideErrorWithSource (Just "cradle") (Just DiagnosticSeverity_Error) nfp $ T.unlines $ map T.pack userFriendlyMessage in (fp, showDiag, diag{_data_ = Just $ Aeson.toJSON CradleErrorDetails{cabalProjectFiles=absDeps}}) From beb1764608b01d8e659ce38ad914474f98880f50 Mon Sep 17 00:00:00 2001 From: Patrick Date: Tue, 12 Nov 2024 19:53:35 +0800 Subject: [PATCH 17/87] refactor session loading to wait for pending files before cache check --- ghcide/session-loader/Development/IDE/Session.hs | 9 +++++---- .../session-loader/Development/IDE/Session/OrderedSet.hs | 7 ++++--- 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index f3bbc4d899..c47cb7b381 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -799,8 +799,11 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let lookupOrWaitCache :: FilePath -> IO (IdeResult HscEnvEq, DependencyInfo) lookupOrWaitCache absFile = do let ncfp = toNormalizedFilePath' absFile - -- check if in the cache - res <- atomically $ checkInCache ncfp + res <- atomically $ do + -- wait until target file is not in pendingFiles + Extra.whenM (S.lookup absFile pendingFileSet) STM.retry + -- check if in the cache + checkInCache ncfp logWith recorder Info $ LogLookupSessionCache absFile updateDateRes <- case res of Just r -> do @@ -814,8 +817,6 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do Nothing -> do -- if not ok, we need to reload the session atomically $ S.insert absFile pendingFileSet - -- wait until pendingFiles is not in pendingFiles - atomically $ Extra.whenM (S.lookup absFile pendingFileSet) STM.retry lookupOrWaitCache absFile -- see Note [Serializing runs in separate thread] diff --git a/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs b/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs index e1a5f123c2..ff67abd8b1 100644 --- a/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs +++ b/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs @@ -2,7 +2,9 @@ module Development.IDE.Session.OrderedSet where import Control.Concurrent.STM (STM, TQueue, newTQueueIO) import Control.Concurrent.STM.TQueue (readTQueue, writeTQueue) +import Control.Monad (when) import Data.Hashable (Hashable) +import qualified Focus import qualified ListT as LT import qualified StmContainers.Set as S import StmContainers.Set (Set) @@ -12,9 +14,8 @@ type OrderedSet a = (TQueue a, Set a) insert :: Hashable a => a -> OrderedSet a -> STM () insert a (que, s) = do - S.insert a s - writeTQueue que a - return () + (_, inserted) <- S.focus (Focus.testingIfInserts $ Focus.insert ()) a s + when inserted $ writeTQueue que a newIO :: Hashable a => IO (OrderedSet a) newIO = do From 61395222f11eb3c1751daf437b936f41ef712961 Mon Sep 17 00:00:00 2001 From: Patrick Date: Tue, 19 Nov 2024 05:18:13 +0800 Subject: [PATCH 18/87] add LogTime to logging for improved time tracking during session loading --- .../session-loader/Development/IDE/Session.hs | 58 +++++++++++-------- 1 file changed, 33 insertions(+), 25 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index c47cb7b381..2b75329c1b 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -155,10 +155,12 @@ data Log | LogSessionReloadOnError FilePath ![FilePath] | LogGetOptionsLoop !FilePath | LogLookupSessionCache !FilePath + | LogTime !String deriving instance Show Log instance Pretty Log where pretty = \case + LogTime s -> "Time:" <+> pretty s LogLookupSessionCache path -> "Looking up session cache for" <+> pretty path LogGetOptionsLoop fp -> "Loop: getOptions for" <+> pretty fp LogSessionReloadOnError path files -> @@ -582,7 +584,6 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let flags_map' = HM.fromList (concatMap toFlagsMap all_targets') all_targets' = concat all_target_details - newLoaded = HM.keys flags_map' this_dep_info <- getDependencyInfo $ maybeToList hieYaml let (all_targets, this_flags_map, this_options) = case HM.lookup _cfp flags_map' of @@ -599,9 +600,11 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do ] let insertAll m xs = mapM_ (flip (uncurry STM.insert) m) xs + newLoaded = Set.fromList $ fromNormalizedFilePath <$> HM.keys this_flags_map atomically $ do STM.insert this_flags_map hieYaml fileToFlags insertAll filesMap $ map ((hieYaml,) . fst) $ concatMap toFlagsMap all_targets + forM_ newLoaded $ flip S.delete pendingFileSet -- Typecheck all files in the project on startup checkProject <- getCheckProject @@ -621,9 +624,9 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let !exportsMap' = createExportsMap $ mapMaybe (fmap hirModIface) modIfaces liftIO $ atomically $ modifyTVar' (exportsMap shakeExtras) (exportsMap' <>) return [keys1, keys2] - return $ (this_options, Set.fromList $ fromNormalizedFilePath <$> newLoaded) + return $ (this_options, newLoaded) - let consultCradle :: Maybe FilePath -> FilePath -> IO (IdeResult HscEnvEq, DependencyInfo) + let consultCradle :: Maybe FilePath -> FilePath -> IO () consultCradle hieYaml cfp = do let lfpLog = makeRelative rootDir cfp logWith recorder Info $ LogCradlePath lfpLog @@ -658,32 +661,36 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do -- The cradle gave us some options so get to work turning them -- into and HscEnv. Right (opts, libDir, version) -> do + let ncfp = toNormalizedFilePath' cfp let compileTime = fullCompilerVersion case reverse $ readP_to_S parseVersion version of [] -> error $ "GHC version could not be parsed: " <> version ((runTime, _):_) | compileTime == runTime -> do - (results, allNewLoaded) <- session (hieYaml, toNormalizedFilePath' cfp, opts, libDir) - -- delete cfp even if we report No cradle target found for the cfp + (_results, allNewLoaded) <- session (hieYaml, ncfp, opts, libDir) let newLoaded = pendingFiles `Set.intersection` allNewLoaded - -- delete all new loaded - atomically $ forM_ allNewLoaded $ flip S.delete pendingFileSet -- log new loaded files logWith recorder Info $ LogSessionNewLoadedFiles $ Set.toList newLoaded -- remove all new loaded file from error loading files atomicModifyIORef' error_loading_files (\old -> (old `Set.difference` allNewLoaded, ())) atomicModifyIORef' cradle_files (\xs -> (newLoaded <> xs,())) - return results | otherwise -> do -- delete cfp from pending files - atomically $ S.delete cfp pendingFileSet - return (([renderPackageSetupException cfp GhcVersionMismatch{..}], Nothing),Map.empty) + atomically $ do + STM.focus (Focus.insertOrMerge HM.union + (HM.singleton ncfp (([renderPackageSetupException cfp GhcVersionMismatch{..}], Nothing), mempty))) + hieYaml fileToFlags + STM.insert hieYaml ncfp filesMap + S.delete cfp pendingFileSet -- Failure case, either a cradle error or the none cradle Left err -> do let attemptToLoadFiles = (Set.delete cfp $ Set.fromList $ concatMap cradleErrorLoadingFiles err) `Set.difference` old_files if (not $ null attemptToLoadFiles) + then do + -- we are loading more files and failed, we need to retry + -- mark as less loaded files as failedLoadingFiles as possible -- limitation is that when we are loading files, and the dependencies of old_files -- are changed, and old_files are not valid anymore. @@ -695,16 +702,16 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do logWith recorder Info $ LogSessionReloadOnError cfp (Set.toList attemptToLoadFiles) consultCradle hieYaml cfp else do + -- we are only loading this file and it failed dep_info <- getDependencyInfo ((maybeToList hieYaml) ++ concatMap cradleErrorDependencies err) let ncfp = toNormalizedFilePath' cfp let res = (map (\err' -> renderCradleError err' cradle ncfp) err, Nothing) -- remove cfp from pending files - atomically $ S.delete cfp pendingFileSet + atomicModifyIORef' error_loading_files (\xs -> (Set.insert cfp xs,())) atomically $ do STM.focus (Focus.insertOrMerge HM.union (HM.singleton ncfp (res, dep_info))) hieYaml fileToFlags - STM.insert hieYaml ncfp filesMap - atomicModifyIORef' error_loading_files (\xs -> (Set.insert cfp xs,())) - return (res, dep_info) + STM.insert hieYaml ncfp filesMap + S.delete cfp pendingFileSet let -- | We allow users to specify a loading strategy. @@ -727,7 +734,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do -- This caches the mapping from hie.yaml + Mod.hs -> [String] -- Returns the Ghc session and the cradle dependencies let sessionOpts :: (Maybe FilePath, FilePath) - -> IO (IdeResult HscEnvEq, DependencyInfo) + -> IO () sessionOpts (hieYaml, file) = do Extra.whenM didSessionLoadingPreferenceConfigChange $ do logWith recorder Info LogSessionLoadingChanged @@ -744,10 +751,9 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do v <- atomically $ fromMaybe HM.empty <$> STM.lookup hieYaml fileToFlags case HM.lookup (toNormalizedFilePath' file) v of - Just (opts, old_di) -> do + Just (_opts, old_di) -> do deps_ok <- checkDependencyInfo old_di - if not deps_ok - then do + when (not deps_ok) $ do -- if deps are old, we can try to load the error files again atomicModifyIORef' error_loading_files (\xs -> (Set.delete file xs,())) atomicModifyIORef' cradle_files (\xs -> (Set.delete file xs,())) @@ -759,7 +765,6 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do -- Keep the same name cache modifyVar_ hscEnvs (return . Map.adjust (const []) hieYaml ) consultCradle hieYaml file - else return (opts, old_di) Nothing -> consultCradle hieYaml file let checkInCache ::NormalizedFilePath -> STM (Maybe (IdeResult HscEnvEq, DependencyInfo)) @@ -772,24 +777,27 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do -- at a time. Therefore the IORef contains the currently running cradle, if we try -- to get some more options then we wait for the currently running action to finish -- before attempting to do so. - let getOptions :: FilePath -> IO (IdeResult HscEnvEq, DependencyInfo) + let getOptions :: FilePath -> IO () getOptions file = do let ncfp = toNormalizedFilePath' file cachedHieYamlLocation <- atomically $ STM.lookup ncfp filesMap hieYaml <- cradleLoc file let hieLoc = join cachedHieYamlLocation <|> hieYaml - result <- sessionOpts (hieLoc, file) `Safe.catch` \e -> do + sessionOpts (hieLoc, file) `Safe.catch` \e -> do dep <- getDependencyInfo $ maybe [] pure hieYaml - return (([renderPackageSetupException file e], Nothing), dep) - atomically $ STM.focus (Focus.insertOrMerge HM.union (HM.singleton ncfp result)) hieLoc fileToFlags - return result + let errorResult = (([renderPackageSetupException file e], Nothing), dep) + atomically $ do + STM.focus (Focus.insertOrMerge HM.union (HM.singleton ncfp errorResult)) hieLoc fileToFlags + STM.insert hieYaml ncfp filesMap + -- delete file from pending files + S.delete file pendingFileSet let getOptionsLoop :: IO () getOptionsLoop = do -- Get the next file to load absFile <- atomically $ S.readQueue pendingFileSet logWith recorder Info (LogGetOptionsLoop absFile) - void $ getOptions absFile + getOptions absFile getOptionsLoop -- | Given a file, this function will return the HscEnv and the dependencies From 73145097fbff80f27d7d8d6411411a96de97bf22 Mon Sep 17 00:00:00 2001 From: Patrick Date: Tue, 19 Nov 2024 18:13:49 +0800 Subject: [PATCH 19/87] refactor session loading to handle dependency checks more clearly --- ghcide/session-loader/Development/IDE/Session.hs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 2b75329c1b..02f3988f29 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -753,7 +753,8 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do case HM.lookup (toNormalizedFilePath' file) v of Just (_opts, old_di) -> do deps_ok <- checkDependencyInfo old_di - when (not deps_ok) $ do + if (not deps_ok) + then do -- if deps are old, we can try to load the error files again atomicModifyIORef' error_loading_files (\xs -> (Set.delete file xs,())) atomicModifyIORef' cradle_files (\xs -> (Set.delete file xs,())) @@ -765,6 +766,8 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do -- Keep the same name cache modifyVar_ hscEnvs (return . Map.adjust (const []) hieYaml ) consultCradle hieYaml file + -- if deps are ok, we can just remove the file from pending files + else atomically $ S.delete file pendingFileSet Nothing -> consultCradle hieYaml file let checkInCache ::NormalizedFilePath -> STM (Maybe (IdeResult HscEnvEq, DependencyInfo)) From cddcc55b9bbe40659ba5e7f25a3584ce20c41c8a Mon Sep 17 00:00:00 2001 From: Patrick Date: Wed, 20 Nov 2024 07:14:55 +0800 Subject: [PATCH 20/87] Refactors session loading logic Renames getOptions to getOptionsWorker for clarity Removes redundant getOptionsLoop function Ensures session loading is called under the same `Action` context --- ghcide/session-loader/Development/IDE/Session.hs | 16 +++++----------- 1 file changed, 5 insertions(+), 11 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 02f3988f29..74eabcc021 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -780,8 +780,9 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do -- at a time. Therefore the IORef contains the currently running cradle, if we try -- to get some more options then we wait for the currently running action to finish -- before attempting to do so. - let getOptions :: FilePath -> IO () - getOptions file = do + let getOptionsWorker :: FilePath -> IO () + getOptionsWorker file = do + logWith recorder Info (LogGetOptionsLoop file) let ncfp = toNormalizedFilePath' file cachedHieYamlLocation <- atomically $ STM.lookup ncfp filesMap hieYaml <- cradleLoc file @@ -795,14 +796,6 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do -- delete file from pending files S.delete file pendingFileSet - let getOptionsLoop :: IO () - getOptionsLoop = do - -- Get the next file to load - absFile <- atomically $ S.readQueue pendingFileSet - logWith recorder Info (LogGetOptionsLoop absFile) - getOptions absFile - getOptionsLoop - -- | Given a file, this function will return the HscEnv and the dependencies -- it would look up the cache first, if the cache is not available, it would -- submit a request to the getOptionsLoop to get the options for the file @@ -828,11 +821,12 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do Nothing -> do -- if not ok, we need to reload the session atomically $ S.insert absFile pendingFileSet + -- line up the session to load + atomically $ writeTQueue que (getOptionsWorker absFile) lookupOrWaitCache absFile -- see Note [Serializing runs in separate thread] -- Start the getOptionsLoop if the queue is empty - liftIO $ atomically $ Extra.whenM (isEmptyTQueue que) $ writeTQueue que getOptionsLoop returnWithVersion $ \file -> do let absFile = toAbsolutePath file second Map.keys <$> lookupOrWaitCache absFile From bb78a36f473aa7439203d6e33e71d2b3a9a7fada Mon Sep 17 00:00:00 2001 From: Patrick Date: Sat, 7 Dec 2024 03:52:07 +0800 Subject: [PATCH 21/87] delay the restart --- .../session-loader/Development/IDE/Session.hs | 39 ++++++++++--------- 1 file changed, 20 insertions(+), 19 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 74eabcc021..9b31bb0188 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -570,7 +570,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let session :: (Maybe FilePath, NormalizedFilePath, ComponentOptions, FilePath) - -> IO ((IdeResult HscEnvEq,DependencyInfo), HashSet FilePath) + -> IO ((IdeResult HscEnvEq,DependencyInfo), HashSet FilePath, IO ()) session args@(hieYaml, _cfp, _opts, _libDir) = do (new_deps, old_deps) <- packageSetup args @@ -610,21 +610,21 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do checkProject <- getCheckProject -- The VFS doesn't change on cradle edits, re-use the old one. -- Invalidate all the existing GhcSession build nodes by restarting the Shake session - restartShakeSession VFSUnmodified "new component" [] $ do - keys2 <- invalidateShakeCache - keys1 <- extendKnownTargets all_targets - unless (null new_deps || not checkProject) $ do - cfps' <- liftIO $ filterM (IO.doesFileExist . fromNormalizedFilePath) (concatMap targetLocations all_targets) - void $ shakeEnqueue extras $ mkDelayedAction "InitialLoad" Debug $ void $ do - mmt <- uses GetModificationTime cfps' - let cs_exist = catMaybes (zipWith (<$) cfps' mmt) - modIfaces <- uses GetModIface cs_exist - -- update exports map - shakeExtras <- getShakeExtras - let !exportsMap' = createExportsMap $ mapMaybe (fmap hirModIface) modIfaces - liftIO $ atomically $ modifyTVar' (exportsMap shakeExtras) (exportsMap' <>) - return [keys1, keys2] - return $ (this_options, newLoaded) + let restart = restartShakeSession VFSUnmodified "new component" [] $ do + keys2 <- invalidateShakeCache + keys1 <- extendKnownTargets all_targets + unless (null new_deps || not checkProject) $ do + cfps' <- liftIO $ filterM (IO.doesFileExist . fromNormalizedFilePath) (concatMap targetLocations all_targets) + void $ shakeEnqueue extras $ mkDelayedAction "InitialLoad" Debug $ void $ do + mmt <- uses GetModificationTime cfps' + let cs_exist = catMaybes (zipWith (<$) cfps' mmt) + modIfaces <- uses GetModIface cs_exist + -- update exports map + shakeExtras <- getShakeExtras + let !exportsMap' = createExportsMap $ mapMaybe (fmap hirModIface) modIfaces + liftIO $ atomically $ modifyTVar' (exportsMap shakeExtras) (exportsMap' <>) + return [keys1, keys2] + return (this_options, newLoaded, restart) let consultCradle :: Maybe FilePath -> FilePath -> IO () consultCradle hieYaml cfp = do @@ -667,13 +667,14 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do [] -> error $ "GHC version could not be parsed: " <> version ((runTime, _):_) | compileTime == runTime -> do - (_results, allNewLoaded) <- session (hieYaml, ncfp, opts, libDir) + (_results, allNewLoaded, restart) <- session (hieYaml, ncfp, opts, libDir) let newLoaded = pendingFiles `Set.intersection` allNewLoaded -- log new loaded files logWith recorder Info $ LogSessionNewLoadedFiles $ Set.toList newLoaded -- remove all new loaded file from error loading files atomicModifyIORef' error_loading_files (\old -> (old `Set.difference` allNewLoaded, ())) atomicModifyIORef' cradle_files (\xs -> (newLoaded <> xs,())) + restart | otherwise -> do -- delete cfp from pending files atomically $ do @@ -782,7 +783,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do -- before attempting to do so. let getOptionsWorker :: FilePath -> IO () getOptionsWorker file = do - logWith recorder Info (LogGetOptionsLoop file) + logWith recorder Debug (LogGetOptionsLoop file) let ncfp = toNormalizedFilePath' file cachedHieYamlLocation <- atomically $ STM.lookup ncfp filesMap hieYaml <- cradleLoc file @@ -808,7 +809,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do Extra.whenM (S.lookup absFile pendingFileSet) STM.retry -- check if in the cache checkInCache ncfp - logWith recorder Info $ LogLookupSessionCache absFile + logWith recorder Debug $ LogLookupSessionCache absFile updateDateRes <- case res of Just r -> do depOk <- checkDependencyInfo (snd r) From 58ec7eac149a4504d4084519a97bb3ffa255595b Mon Sep 17 00:00:00 2001 From: Patrick Date: Tue, 18 Feb 2025 05:16:01 +0800 Subject: [PATCH 22/87] re-inline the old file instead of loading it twice --- ghcide/session-loader/Development/IDE/Session.hs | 16 +++++++++++----- .../Development/IDE/Session/OrderedSet.hs | 14 +++++++++++--- 2 files changed, 22 insertions(+), 8 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 1b19561c54..3ec7db2e6c 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -772,9 +772,8 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do -- at a time. Therefore the IORef contains the currently running cradle, if we try -- to get some more options then we wait for the currently running action to finish -- before attempting to do so. - let getOptionsWorker :: FilePath -> IO () - getOptionsWorker file = do - logWith recorder Debug (LogGetOptionsLoop file) + let getOptions :: FilePath -> IO () + getOptions file = do let ncfp = toNormalizedFilePath' file cachedHieYamlLocation <- atomically $ STM.lookup ncfp filesMap hieYaml <- cradleLoc file @@ -788,6 +787,14 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do -- delete file from pending files S.delete file pendingFileSet + let getOptionsLoop :: IO () + getOptionsLoop = do + -- Get the next file to load + absFile <- atomically $ S.readQueue pendingFileSet + logWith recorder Debug (LogGetOptionsLoop absFile) + getOptions absFile + getOptionsLoop + -- | Given a file, this function will return the HscEnv and the dependencies -- it would look up the cache first, if the cache is not available, it would -- submit a request to the getOptionsLoop to get the options for the file @@ -813,12 +820,11 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do Nothing -> do -- if not ok, we need to reload the session atomically $ S.insert absFile pendingFileSet - -- line up the session to load - atomically $ writeTQueue que (getOptionsWorker absFile) lookupOrWaitCache absFile -- see Note [Serializing runs in separate thread] -- Start the getOptionsLoop if the queue is empty + liftIO $ atomically $ Extra.whenM (isEmptyTQueue que) $ writeTQueue que getOptionsLoop returnWithVersion $ \file -> do let absFile = toAbsolutePath file second Map.keys <$> lookupOrWaitCache absFile diff --git a/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs b/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs index ff67abd8b1..a2b0a76565 100644 --- a/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs +++ b/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs @@ -1,6 +1,7 @@ module Development.IDE.Session.OrderedSet where -import Control.Concurrent.STM (STM, TQueue, newTQueueIO) +import Control.Concurrent.STM (STM, TQueue, flushTQueue, + newTQueueIO) import Control.Concurrent.STM.TQueue (readTQueue, writeTQueue) import Control.Monad (when) import Data.Hashable (Hashable) @@ -15,7 +16,14 @@ type OrderedSet a = (TQueue a, Set a) insert :: Hashable a => a -> OrderedSet a -> STM () insert a (que, s) = do (_, inserted) <- S.focus (Focus.testingIfInserts $ Focus.insert ()) a s - when inserted $ writeTQueue que a + -- if already in the set + -- update the position of the element in the queue + when (not inserted) $ do + items <- filter (==a) <$> flushTQueue que + mapM_ (writeTQueue que) items + return () + writeTQueue que a + -- when que $ writeTQueue que a newIO :: Hashable a => IO (OrderedSet a) newIO = do @@ -27,7 +35,7 @@ readQueue :: Hashable a => OrderedSet a -> STM a readQueue rs@(que, s) = do f <- readTQueue que b <- S.lookup f s - -- retry if the file is already in done + -- retry if no files are left in the queue if b then return f else readQueue rs lookup :: Hashable a => a -> OrderedSet a -> STM Bool From d9439637ec7f5ccee4491bb9fb396aef4d3a44f3 Mon Sep 17 00:00:00 2001 From: Patrick Date: Wed, 19 Feb 2025 01:05:49 +0800 Subject: [PATCH 23/87] update upload artifact action version --- .github/workflows/bench.yml | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/bench.yml b/.github/workflows/bench.yml index 659352e4e6..b9d6d49059 100644 --- a/.github/workflows/bench.yml +++ b/.github/workflows/bench.yml @@ -62,7 +62,7 @@ jobs: # change of the strategy may require changing the bootstrapping/run code steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: # By default, the `pull_request` event has a `GITHUB_SHA` env variable @@ -101,14 +101,14 @@ jobs: tar -czf cabal.tar.gz * - name: Upload workspace - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: workspace-${{ matrix.ghc }}-${{ matrix.os }} retention-days: 1 path: workspace.tar.gz - name: Upload .cabal - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: cabal-home-${{ matrix.ghc }}-${{ matrix.os }} retention-days: 1 @@ -134,13 +134,13 @@ jobs: enable-stack: false - name: Download cabal home - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: cabal-home-${{ matrix.ghc }}-${{ matrix.os }} path: . - name: Download workspace - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: workspace-${{ matrix.ghc }}-${{ matrix.os }} path: . @@ -165,7 +165,7 @@ jobs: run: find bench-results -name "*.csv" -or -name "*.svg" -or -name "*.html" | xargs tar -czf benchmark-artifacts.tar.gz - name: Archive benchmarking artifacts - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: bench-results-${{ matrix.example }}-${{ runner.os }}-${{ matrix.ghc }} path: benchmark-artifacts.tar.gz @@ -175,7 +175,7 @@ jobs: run: find bench-results -name "*.log" -or -name "*.hp" | xargs tar -czf benchmark-logs.tar.gz - name: Archive benchmark logs - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: bench-logs-${{ matrix.example }}-${{ runner.os }}-${{ matrix.ghc }} path: benchmark-logs.tar.gz From 5d015001976a1562b0f9ca612ab44a494656c081 Mon Sep 17 00:00:00 2001 From: Patrick Date: Wed, 26 Feb 2025 05:01:54 +0800 Subject: [PATCH 24/87] update hie-bios tag --- cabal.project | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cabal.project b/cabal.project index 2e32d6715f..b45f1ba86d 100644 --- a/cabal.project +++ b/cabal.project @@ -66,4 +66,4 @@ if impl(ghc >= 9.8.4) && impl(ghc < 9.8.5) source-repository-package type: git location: https://github.com/soulomoon/hie-bios.git - tag: 93582c21372af573e5103bad198777a3317a2df2 \ No newline at end of file + tag: 84febb04ba152b03fd42b551ffb2ea6e7506cf9b From 10a6f7e7c69dfc150aff34e0c6cebc37a127eca6 Mon Sep 17 00:00:00 2001 From: Patrick Date: Thu, 27 Feb 2025 05:17:54 +0800 Subject: [PATCH 25/87] Update hie-bios tag to latest commit --- cabal.project | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cabal.project b/cabal.project index b45f1ba86d..54c46e6eca 100644 --- a/cabal.project +++ b/cabal.project @@ -66,4 +66,4 @@ if impl(ghc >= 9.8.4) && impl(ghc < 9.8.5) source-repository-package type: git location: https://github.com/soulomoon/hie-bios.git - tag: 84febb04ba152b03fd42b551ffb2ea6e7506cf9b + tag: 3351cfc5becee6a09df47df4772598fb2207b746 From 45b124137d7111274bcb06470ed855bb0377b8eb Mon Sep 17 00:00:00 2001 From: Patrick Date: Mon, 3 Mar 2025 00:18:43 +0800 Subject: [PATCH 26/87] update hie-bios --- cabal.project | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/cabal.project b/cabal.project index 54c46e6eca..794ccb4fb2 100644 --- a/cabal.project +++ b/cabal.project @@ -65,5 +65,5 @@ if impl(ghc >= 9.8.4) && impl(ghc < 9.8.5) source-repository-package type: git - location: https://github.com/soulomoon/hie-bios.git - tag: 3351cfc5becee6a09df47df4772598fb2207b746 + location: https://github.com/haskell/hie-bios + tag: bc502c94b891719f07e5ada9f6f59ca4ba8e08ff From 219db463049bfac6408f6cde3f9a0b3262a9e059 Mon Sep 17 00:00:00 2001 From: Patrick Date: Mon, 3 Mar 2025 01:19:42 +0800 Subject: [PATCH 27/87] update index-state to reflect the latest project state --- cabal.project | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cabal.project b/cabal.project index 794ccb4fb2..4c9d2b25e0 100644 --- a/cabal.project +++ b/cabal.project @@ -8,7 +8,7 @@ packages: ./hls-test-utils -index-state: 2024-12-02T00:00:00Z +index-state: 2025-03-02T16:10:12Z tests: True test-show-details: direct From 2f86db0b0abd5f86af42ec96a0b2e7f4da077a1f Mon Sep 17 00:00:00 2001 From: Patrick Date: Tue, 4 Mar 2025 00:13:35 +0800 Subject: [PATCH 28/87] update index-state to reflect the new date --- cabal.project | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cabal.project b/cabal.project index 4c9d2b25e0..794ccb4fb2 100644 --- a/cabal.project +++ b/cabal.project @@ -8,7 +8,7 @@ packages: ./hls-test-utils -index-state: 2025-03-02T16:10:12Z +index-state: 2024-12-02T00:00:00Z tests: True test-show-details: direct From de98232569ab0104b5ad34a7fbf1e49316e63375 Mon Sep 17 00:00:00 2001 From: Patrick Date: Fri, 7 Mar 2025 06:08:57 +0800 Subject: [PATCH 29/87] update fourmolu dependency version constraints --- haskell-language-server.cabal | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/haskell-language-server.cabal b/haskell-language-server.cabal index dcbb546733..eece96f992 100644 --- a/haskell-language-server.cabal +++ b/haskell-language-server.cabal @@ -1497,7 +1497,7 @@ library hls-fourmolu-plugin build-depends: , base >=4.12 && <5 , filepath - , fourmolu ^>= 0.14 || ^>= 0.15 || ^>= 0.16 + , fourmolu ^>= 0.14 || ^>= 0.15 || >= 0.16 && < 0.16.2 , ghc-boot-th , ghcide == 2.9.0.1 , hls-plugin-api == 2.9.0.1 From f0a881d1a5b24ca648230e53ef63d28c14524a49 Mon Sep 17 00:00:00 2001 From: Patrick Date: Fri, 7 Mar 2025 06:23:24 +0800 Subject: [PATCH 30/87] remove ListT from library dependencies --- ghcide/ghcide.cabal | 1 - 1 file changed, 1 deletion(-) diff --git a/ghcide/ghcide.cabal b/ghcide/ghcide.cabal index 3b88a2024c..1468128d9a 100644 --- a/ghcide/ghcide.cabal +++ b/ghcide/ghcide.cabal @@ -111,7 +111,6 @@ library , unliftio-core , unordered-containers >=0.2.10.0 , vector - , ListT if os(windows) build-depends: Win32 From 14f6a3b93a693e1daf141a2f9172c32509fcb166 Mon Sep 17 00:00:00 2001 From: Patrick Date: Fri, 7 Mar 2025 06:23:46 +0800 Subject: [PATCH 31/87] update hie-bios to a new tag --- cabal.project | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cabal.project b/cabal.project index 794ccb4fb2..66fa8a3ff8 100644 --- a/cabal.project +++ b/cabal.project @@ -66,4 +66,4 @@ if impl(ghc >= 9.8.4) && impl(ghc < 9.8.5) source-repository-package type: git location: https://github.com/haskell/hie-bios - tag: bc502c94b891719f07e5ada9f6f59ca4ba8e08ff + tag: 65df091a4464b43043661d70c1dcfed1ed1fa9a9 From 8b0e246a9be0f513813ac8728801ecd3b3a81873 Mon Sep 17 00:00:00 2001 From: Patrick Date: Fri, 7 Mar 2025 06:24:01 +0800 Subject: [PATCH 32/87] update fourmolu dependency version constraints to allow 0.16 --- haskell-language-server.cabal | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/haskell-language-server.cabal b/haskell-language-server.cabal index eece96f992..dcbb546733 100644 --- a/haskell-language-server.cabal +++ b/haskell-language-server.cabal @@ -1497,7 +1497,7 @@ library hls-fourmolu-plugin build-depends: , base >=4.12 && <5 , filepath - , fourmolu ^>= 0.14 || ^>= 0.15 || >= 0.16 && < 0.16.2 + , fourmolu ^>= 0.14 || ^>= 0.15 || ^>= 0.16 , ghc-boot-th , ghcide == 2.9.0.1 , hls-plugin-api == 2.9.0.1 From 2dd71c00f30032d30e339f250c6c334e5a978f29 Mon Sep 17 00:00:00 2001 From: Patrick Date: Fri, 7 Mar 2025 06:34:50 +0800 Subject: [PATCH 33/87] add allow-newer constraint for Cabal-syntax --- cabal.project | 2 ++ 1 file changed, 2 insertions(+) diff --git a/cabal.project b/cabal.project index 66fa8a3ff8..f46df91127 100644 --- a/cabal.project +++ b/cabal.project @@ -63,6 +63,8 @@ if impl(ghc >= 9.8.4) && impl(ghc < 9.8.5) constraints: ghc-lib-parser==9.8.4.20241130 +allow-newer: + Cabal-syntax source-repository-package type: git location: https://github.com/haskell/hie-bios From b8406d60b991a8d92435405579a77ff6f80bbf5b Mon Sep 17 00:00:00 2001 From: Patrick Date: Fri, 7 Mar 2025 06:37:53 +0800 Subject: [PATCH 34/87] remove allow-newer constraint for Cabal-syntax --- cabal.project | 2 -- 1 file changed, 2 deletions(-) diff --git a/cabal.project b/cabal.project index f46df91127..66fa8a3ff8 100644 --- a/cabal.project +++ b/cabal.project @@ -63,8 +63,6 @@ if impl(ghc >= 9.8.4) && impl(ghc < 9.8.5) constraints: ghc-lib-parser==9.8.4.20241130 -allow-newer: - Cabal-syntax source-repository-package type: git location: https://github.com/haskell/hie-bios From 5ea3d87b8ef711949a4fb73eab2472bc21dc19cd Mon Sep 17 00:00:00 2001 From: Patrick Date: Fri, 7 Mar 2025 06:50:27 +0800 Subject: [PATCH 35/87] bump actions/checkout and actions/upload-artifact to v3 --- .github/workflows/bench.yml | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/bench.yml b/.github/workflows/bench.yml index b9d6d49059..659352e4e6 100644 --- a/.github/workflows/bench.yml +++ b/.github/workflows/bench.yml @@ -62,7 +62,7 @@ jobs: # change of the strategy may require changing the bootstrapping/run code steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v3 with: # By default, the `pull_request` event has a `GITHUB_SHA` env variable @@ -101,14 +101,14 @@ jobs: tar -czf cabal.tar.gz * - name: Upload workspace - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v3 with: name: workspace-${{ matrix.ghc }}-${{ matrix.os }} retention-days: 1 path: workspace.tar.gz - name: Upload .cabal - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v3 with: name: cabal-home-${{ matrix.ghc }}-${{ matrix.os }} retention-days: 1 @@ -134,13 +134,13 @@ jobs: enable-stack: false - name: Download cabal home - uses: actions/download-artifact@v4 + uses: actions/download-artifact@v3 with: name: cabal-home-${{ matrix.ghc }}-${{ matrix.os }} path: . - name: Download workspace - uses: actions/download-artifact@v4 + uses: actions/download-artifact@v3 with: name: workspace-${{ matrix.ghc }}-${{ matrix.os }} path: . @@ -165,7 +165,7 @@ jobs: run: find bench-results -name "*.csv" -or -name "*.svg" -or -name "*.html" | xargs tar -czf benchmark-artifacts.tar.gz - name: Archive benchmarking artifacts - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v3 with: name: bench-results-${{ matrix.example }}-${{ runner.os }}-${{ matrix.ghc }} path: benchmark-artifacts.tar.gz @@ -175,7 +175,7 @@ jobs: run: find bench-results -name "*.log" -or -name "*.hp" | xargs tar -czf benchmark-logs.tar.gz - name: Archive benchmark logs - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v3 with: name: bench-logs-${{ matrix.example }}-${{ runner.os }}-${{ matrix.ghc }} path: benchmark-logs.tar.gz From 3e0c27b1de952c84cd4f8598907be79d0be4d735 Mon Sep 17 00:00:00 2001 From: Patrick Date: Fri, 7 Mar 2025 06:53:05 +0800 Subject: [PATCH 36/87] Revert "bump actions/checkout and actions/upload-artifact to v3" This reverts commit 5ea3d87b8ef711949a4fb73eab2472bc21dc19cd. --- .github/workflows/bench.yml | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/bench.yml b/.github/workflows/bench.yml index 659352e4e6..b9d6d49059 100644 --- a/.github/workflows/bench.yml +++ b/.github/workflows/bench.yml @@ -62,7 +62,7 @@ jobs: # change of the strategy may require changing the bootstrapping/run code steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: # By default, the `pull_request` event has a `GITHUB_SHA` env variable @@ -101,14 +101,14 @@ jobs: tar -czf cabal.tar.gz * - name: Upload workspace - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: workspace-${{ matrix.ghc }}-${{ matrix.os }} retention-days: 1 path: workspace.tar.gz - name: Upload .cabal - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: cabal-home-${{ matrix.ghc }}-${{ matrix.os }} retention-days: 1 @@ -134,13 +134,13 @@ jobs: enable-stack: false - name: Download cabal home - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: cabal-home-${{ matrix.ghc }}-${{ matrix.os }} path: . - name: Download workspace - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: workspace-${{ matrix.ghc }}-${{ matrix.os }} path: . @@ -165,7 +165,7 @@ jobs: run: find bench-results -name "*.csv" -or -name "*.svg" -or -name "*.html" | xargs tar -czf benchmark-artifacts.tar.gz - name: Archive benchmarking artifacts - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: bench-results-${{ matrix.example }}-${{ runner.os }}-${{ matrix.ghc }} path: benchmark-artifacts.tar.gz @@ -175,7 +175,7 @@ jobs: run: find bench-results -name "*.log" -or -name "*.hp" | xargs tar -czf benchmark-logs.tar.gz - name: Archive benchmark logs - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: bench-logs-${{ matrix.example }}-${{ runner.os }}-${{ matrix.ghc }} path: benchmark-logs.tar.gz From 8c27e3479bd4ba7fd699ba76396a8f4419d60ce6 Mon Sep 17 00:00:00 2001 From: Patrick Date: Fri, 7 Mar 2025 22:21:37 +0800 Subject: [PATCH 37/87] add allow-older constraint for optparse-applicative in cabal.project --- cabal.project | 2 ++ 1 file changed, 2 insertions(+) diff --git a/cabal.project b/cabal.project index 66fa8a3ff8..efc8e3a895 100644 --- a/cabal.project +++ b/cabal.project @@ -67,3 +67,5 @@ source-repository-package type: git location: https://github.com/haskell/hie-bios tag: 65df091a4464b43043661d70c1dcfed1ed1fa9a9 +-- see https://github.com/haskell/haskell-language-server/pull/4445#issuecomment-2705787655 +allow-older: hie-bios:optparse-applicative From b0af63434ea35f4c40911a630f043c4eb51215cf Mon Sep 17 00:00:00 2001 From: Patrick Date: Thu, 24 Apr 2025 19:03:06 +0800 Subject: [PATCH 38/87] update hie-bios --- cabal.project | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cabal.project b/cabal.project index c69496e295..e54c7f4bfe 100644 --- a/cabal.project +++ b/cabal.project @@ -56,7 +56,7 @@ allow-newer: source-repository-package type: git location: https://github.com/haskell/hie-bios - tag: 65df091a4464b43043661d70c1dcfed1ed1fa9a9 + tag: e372a62b780b1314a35238a698a9e3813096b122 -- see https://github.com/haskell/haskell-language-server/pull/4445#issuecomment-2705787655 allow-older: hie-bios:optparse-applicative From 06fa5de52bd4b10e28bab42c9c076f409b356d43 Mon Sep 17 00:00:00 2001 From: soulomoon Date: Sun, 27 Apr 2025 16:19:26 +0800 Subject: [PATCH 39/87] introduce SessionState --- .../session-loader/Development/IDE/Session.hs | 259 ++++++++++++------ 1 file changed, 173 insertions(+), 86 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index fd50fa5bc0..76d10c9d66 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -418,6 +418,125 @@ getHieDbLoc dir = do createDirectoryIfMissing True cDir pure (cDir db) +data SessionState = SessionState + { cradle_files :: !(IORef (HashSet FilePath)) + , error_loading_files :: !(IORef (HashSet FilePath)) + , hscEnvs :: !(Var HieMap) + , fileToFlags :: !(STM.Map (Maybe FilePath) (HashMap NormalizedFilePath (IdeResult HscEnvEq, DependencyInfo))) + , filesMap :: !(STM.Map NormalizedFilePath (Maybe FilePath)) + , pendingFileSet :: !(S.OrderedSet FilePath) + , version :: !(Var Int) + } + +-- | Helper functions for SessionState management +-- These functions encapsulate common operations on the SessionState + +-- | Add a file to the set of files with errors during loading +addErrorLoadingFile :: SessionState -> FilePath -> IO () +addErrorLoadingFile state file = + atomicModifyIORef' (error_loading_files state) (\xs -> (Set.insert file xs, ())) + +addErrorLoadingFiles :: SessionState -> [FilePath] -> IO () +addErrorLoadingFiles = mapM_ . addErrorLoadingFile + +-- | Remove a file from the set of files with errors during loading +removeErrorLoadingFile :: SessionState -> FilePath -> IO () +removeErrorLoadingFile state file = + atomicModifyIORef' (error_loading_files state) (\xs -> (Set.delete file xs, ())) + +addCradleFiles :: SessionState -> HashSet FilePath -> IO () +addCradleFiles state files = + atomicModifyIORef' (cradle_files state) (\xs -> (files <> xs, ())) + +-- | Remove a file from the cradle files set +removeCradleFile :: SessionState -> FilePath -> IO () +removeCradleFile state file = + atomicModifyIORef' (cradle_files state) (\xs -> (Set.delete file xs, ())) + +-- | Clear error loading files and reset to empty set +clearErrorLoadingFiles :: SessionState -> IO () +clearErrorLoadingFiles state = + atomicModifyIORef' (error_loading_files state) (\_ -> (Set.empty, ())) + +-- | Clear cradle files and reset to empty set +clearCradleFiles :: SessionState -> IO () +clearCradleFiles state = + atomicModifyIORef' (cradle_files state) (\_ -> (Set.empty, ())) + +-- | Reset the file maps in the session state +resetFileMaps :: SessionState -> STM () +resetFileMaps state = do + STM.reset (filesMap state) + STM.reset (fileToFlags state) + +-- | Insert or update file flags for a specific hieYaml and normalized file path +insertFileFlags :: SessionState -> Maybe FilePath -> NormalizedFilePath -> (IdeResult HscEnvEq, DependencyInfo) -> STM () +insertFileFlags state hieYaml ncfp flags = + STM.focus (Focus.insertOrMerge HM.union (HM.singleton ncfp flags)) hieYaml (fileToFlags state) + +-- | Insert a file mapping from normalized path to hieYaml location +insertFileMapping :: SessionState -> Maybe FilePath -> NormalizedFilePath -> STM () +insertFileMapping state hieYaml ncfp = + STM.insert hieYaml ncfp (filesMap state) + +-- | Remove a file from the pending file set +removeFromPending :: SessionState -> FilePath -> STM () +removeFromPending state file = + S.delete file (pendingFileSet state) + +-- | Add a file to the pending file set +addToPending :: SessionState -> FilePath -> STM () +addToPending state file = + S.insert file (pendingFileSet state) + +-- | Common pattern: Insert file flags, insert file mapping, and remove from pending +completeFileProcessing :: SessionState -> Maybe FilePath -> NormalizedFilePath -> FilePath -> (IdeResult HscEnvEq, DependencyInfo) -> IO () +completeFileProcessing state hieYaml ncfp file flags = do +-- remove cfp from pending files + addErrorLoadingFile state file + removeCradleFile state file + atomically $ do + insertFileFlags state hieYaml ncfp flags + insertFileMapping state hieYaml ncfp + removeFromPending state file + +-- | Insert multiple file mappings at once +insertAllFileMappings :: SessionState -> [(Maybe FilePath, NormalizedFilePath)] -> STM () +insertAllFileMappings state mappings = + mapM_ (\(yaml, path) -> insertFileMapping state yaml path) mappings + +-- | Increment the version counter +incrementVersion :: SessionState -> IO Int +incrementVersion state = modifyVar' (version state) succ + +-- | Get files from the pending file set +getPendingFiles :: SessionState -> IO (HashSet FilePath) +getPendingFiles state = atomically $ Set.fromList <$> S.toUnOrderedList (pendingFileSet state) + +-- | Handle errors during session loading by recording file as having error and removing from pending +handleSessionError :: SessionState -> Maybe FilePath -> FilePath -> PackageSetupException -> IO () +handleSessionError state hieYaml file e = do + dep <- getDependencyInfo $ maybe [] pure hieYaml + let ncfp = toNormalizedFilePath' file + let errorResult = (([renderPackageSetupException file e], Nothing), dep) + completeFileProcessing state hieYaml ncfp file errorResult + +-- | Get the set of extra files to load based on the current file path +-- If the current file is in error loading files, we fallback to single loading mode (empty set) +-- Otherwise, we remove error files from pending files and also exclude the current file +getExtraFilesToLoad :: SessionState -> FilePath -> IO [FilePath] +getExtraFilesToLoad state cfp = do + pendingFiles <- getPendingFiles state + errorFiles <- readIORef (error_loading_files state) + old_files <- readIORef (cradle_files state) + -- if the file is in error loading files, we fall back to single loading mode + return $ + Set.toList $ + if cfp `Set.member` errorFiles + then Set.empty + -- remove error files from pending files since error loading need to load one by one + else (Set.delete cfp $ pendingFiles `Set.difference` errorFiles) <> old_files + -- | Given a root directory, return a Shake 'Action' which setups an -- 'IdeGhcSession' given a file. -- Some of the many things this does: @@ -435,23 +554,20 @@ getHieDbLoc dir = do loadSessionWithOptions :: Recorder (WithPriority Log) -> SessionLoadingOptions -> FilePath -> TQueue (IO ()) -> IO (Action IdeGhcSession) loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let toAbsolutePath = toAbsolute rootDir -- see Note [Root Directory] - cradle_files <- newIORef (Set.fromList []) - error_loading_files <- newIORef (Set.fromList []) - -- Mapping from hie.yaml file to HscEnv, one per hie.yaml file - hscEnvs <- newVar Map.empty :: IO (Var HieMap) - -- Mapping from a Filepath to HscEnv - fileToFlags <- STM.newIO :: IO FlagsMap - -- Mapping from a Filepath to its 'hie.yaml' location. - -- Should hold the same Filepaths as 'fileToFlags', otherwise - -- they are inconsistent. So, everywhere you modify 'fileToFlags', - -- you have to modify 'filesMap' as well. - filesMap <- STM.newIO :: IO FilesMap - -- Pending files waiting to be loaded - pendingFileSet <- S.newIO :: IO (S.OrderedSet FilePath) - -- Version of the mappings above - version <- newVar 0 + + -- Initialize SessionState + sessionState <- SessionState + <$> newIORef (Set.fromList []) -- cradle_files + <*> newIORef (Set.fromList []) -- error_loading_files + <*> newVar Map.empty -- hscEnvs + <*> STM.newIO -- fileToFlags + <*> STM.newIO -- filesMap + <*> S.newIO -- pendingFileSet + <*> newVar 0 -- version + biosSessionLoadingVar <- newVar Nothing :: IO (Var (Maybe SessionLoadingPreferenceConfig)) - let returnWithVersion fun = IdeGhcSession fun <$> liftIO (readVar version) + let returnWithVersion fun = IdeGhcSession fun <$> liftIO (readVar (version sessionState)) + -- This caches the mapping from Mod.hs -> hie.yaml cradleLoc <- liftIO $ memoIO $ \v -> do res <- findCradle v @@ -466,7 +582,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do extras@ShakeExtras{restartShakeSession, ideNc, knownTargetsVar, lspEnv } <- getShakeExtras let invalidateShakeCache = do - void $ modifyVar' version succ + void $ incrementVersion sessionState return $ toNoFileKey GhcSessionIO IdeOptions{ optTesting = IdeTesting optTesting @@ -523,7 +639,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do -- or making a new one. The lookup returns the HscEnv and a list of -- information about other components loaded into the HscEnv -- (unitId, DynFlag, Targets) - modifyVar hscEnvs $ \m -> do + modifyVar (hscEnvs sessionState) $ \m -> do -- Just deps if there's already an HscEnv -- Nothing is it's the first time we are making an HscEnv let oldDeps = Map.lookup hieYaml m @@ -594,12 +710,11 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do ]) Nothing - let insertAll m xs = mapM_ (flip (uncurry STM.insert) m) xs - newLoaded = Set.fromList $ fromNormalizedFilePath <$> HM.keys this_flags_map + let newLoaded = Set.fromList $ fromNormalizedFilePath <$> HM.keys this_flags_map atomically $ do - STM.insert this_flags_map hieYaml fileToFlags - insertAll filesMap $ map ((hieYaml,) . fst) $ concatMap toFlagsMap all_targets - forM_ newLoaded $ flip S.delete pendingFileSet + STM.insert this_flags_map hieYaml (fileToFlags sessionState) + insertAllFileMappings sessionState $ map ((hieYaml,) . fst) $ concatMap toFlagsMap all_targets + forM_ newLoaded $ flip S.delete (pendingFileSet sessionState) -- Typecheck all files in the project on startup checkProject <- getCheckProject @@ -635,19 +750,11 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let progMsg = "Setting up " <> T.pack (takeBaseName (cradleRootDir cradle)) <> " (for " <> T.pack lfpLog <> ")" - pendingFiles <- Set.insert cfp . Set.fromList <$> (atomically $ S.toUnOrderedList pendingFileSet) - errorFiles <- readIORef error_loading_files - old_files <- readIORef cradle_files - -- if the file is in error loading files, we fall back to single loading mode - let extraToLoads = if cfp `Set.member` errorFiles - then Set.empty - -- remove error files from pending files since error loading need to load one by one - else Set.delete cfp $ pendingFiles `Set.difference` errorFiles - + extraToLoads <- getExtraFilesToLoad sessionState cfp eopts <- mRunLspTCallback lspEnv (\act -> withIndefiniteProgress progMsg Nothing NotCancellable (const act)) $ withTrace "Load cradle" $ \addTag -> do addTag "file" lfpLog - res <- cradleToOptsAndLibDir recorder (sessionLoading clientConfig) cradle cfp (Set.toList $ Set.delete cfp $ extraToLoads <> old_files) + res <- cradleToOptsAndLibDir recorder (sessionLoading clientConfig) cradle cfp extraToLoads addTag "result" (show res) return res @@ -663,51 +770,42 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do ((runTime, _):_) | compileTime == runTime -> do (_results, allNewLoaded, restart) <- session (hieYaml, ncfp, opts, libDir) + pendingFiles <- getPendingFiles sessionState let newLoaded = pendingFiles `Set.intersection` allNewLoaded -- log new loaded files logWith recorder Info $ LogSessionNewLoadedFiles $ Set.toList newLoaded -- remove all new loaded file from error loading files - atomicModifyIORef' error_loading_files (\old -> (old `Set.difference` allNewLoaded, ())) - atomicModifyIORef' cradle_files (\xs -> (newLoaded <> xs,())) + mapM_ (removeErrorLoadingFile sessionState) (Set.toList allNewLoaded) + addCradleFiles sessionState newLoaded restart | otherwise -> do - -- delete cfp from pending files - atomically $ do - STM.focus (Focus.insertOrMerge HM.union - (HM.singleton ncfp (([renderPackageSetupException cfp GhcVersionMismatch{..}], Nothing), mempty))) - hieYaml fileToFlags - STM.insert hieYaml ncfp filesMap - S.delete cfp pendingFileSet + -- Use the common pattern here: updateFileState + completeFileProcessing sessionState hieYaml ncfp cfp + (([renderPackageSetupException cfp GhcVersionMismatch{..}], Nothing), mempty) -- Failure case, either a cradle error or the none cradle Left err -> do - let attemptToLoadFiles = (Set.delete cfp $ Set.fromList $ concatMap cradleErrorLoadingFiles err) - `Set.difference` old_files - if (not $ null attemptToLoadFiles) - + -- what if the error to load file is one of old_files ? + let attemptToLoadFiles = Set.delete cfp $ Set.fromList $ concatMap cradleErrorLoadingFiles err + old_files <- readIORef (cradle_files sessionState) + let errorToLoadNewFiles = attemptToLoadFiles `Set.difference` old_files + if not (null errorToLoadNewFiles) then do -- we are loading more files and failed, we need to retry - -- mark as less loaded files as failedLoadingFiles as possible -- limitation is that when we are loading files, and the dependencies of old_files -- are changed, and old_files are not valid anymore. -- but they will still be in the old_files, and will not move to error_loading_files. -- And make other files failed to load in batch mode. - let failedLoadingFiles = (Set.insert cfp attemptToLoadFiles) - atomicModifyIORef' error_loading_files (\xs -> (failedLoadingFiles <> xs,())) + addErrorLoadingFiles sessionState (Set.toList errorToLoadNewFiles) -- retry without other files logWith recorder Info $ LogSessionReloadOnError cfp (Set.toList attemptToLoadFiles) consultCradle hieYaml cfp else do -- we are only loading this file and it failed - dep_info <- getDependencyInfo ((maybeToList hieYaml) ++ concatMap cradleErrorDependencies err) + dep_info <- getDependencyInfo (maybeToList hieYaml ++ concatMap cradleErrorDependencies err) let ncfp = toNormalizedFilePath' cfp let res = (map (\err' -> renderCradleError err' cradle ncfp) err, Nothing) - -- remove cfp from pending files - atomicModifyIORef' error_loading_files (\xs -> (Set.insert cfp xs,())) - atomically $ do - STM.focus (Focus.insertOrMerge HM.union (HM.singleton ncfp (res, dep_info))) hieYaml fileToFlags - STM.insert hieYaml ncfp filesMap - S.delete cfp pendingFileSet + completeFileProcessing sessionState hieYaml ncfp cfp (res, dep_info) let -- | We allow users to specify a loading strategy. @@ -736,40 +834,36 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do logWith recorder Info LogSessionLoadingChanged -- If the dependencies are out of date then clear both caches and start -- again. - atomically $ do - STM.reset filesMap - STM.reset fileToFlags + atomically $ resetFileMaps sessionState -- Don't even keep the name cache, we start from scratch here! - modifyVar_ hscEnvs (const (return Map.empty)) + modifyVar_ (hscEnvs sessionState) (const (return Map.empty)) -- cleanup error loading files and cradle files - atomicModifyIORef' error_loading_files (\_ -> (Set.empty,())) - atomicModifyIORef' cradle_files (\_ -> (Set.empty,())) + clearErrorLoadingFiles sessionState + clearCradleFiles sessionState - v <- atomically $ fromMaybe HM.empty <$> STM.lookup hieYaml fileToFlags - case HM.lookup (toNormalizedFilePath' file) v of + v <- atomically $ STM.lookup hieYaml (fileToFlags sessionState) + case v >>= HM.lookup (toNormalizedFilePath' file) of Just (_opts, old_di) -> do deps_ok <- checkDependencyInfo old_di - if (not deps_ok) + if not deps_ok then do -- if deps are old, we can try to load the error files again - atomicModifyIORef' error_loading_files (\xs -> (Set.delete file xs,())) - atomicModifyIORef' cradle_files (\xs -> (Set.delete file xs,())) + removeErrorLoadingFile sessionState file + removeCradleFile sessionState file -- If the dependencies are out of date then clear both caches and start -- again. - atomically $ do - STM.reset filesMap - STM.reset fileToFlags + atomically $ resetFileMaps sessionState -- Keep the same name cache - modifyVar_ hscEnvs (return . Map.adjust (const []) hieYaml ) + modifyVar_ (hscEnvs sessionState) (return . Map.adjust (const []) hieYaml) consultCradle hieYaml file -- if deps are ok, we can just remove the file from pending files - else atomically $ S.delete file pendingFileSet + else atomically $ removeFromPending sessionState file Nothing -> consultCradle hieYaml file let checkInCache ::NormalizedFilePath -> STM (Maybe (IdeResult HscEnvEq, DependencyInfo)) checkInCache ncfp = runMaybeT $ do - cachedHieYamlLocation <- MaybeT $ STM.lookup ncfp filesMap - m <- MaybeT $ STM.lookup cachedHieYamlLocation fileToFlags + cachedHieYamlLocation <- MaybeT $ STM.lookup ncfp (filesMap sessionState) + m <- MaybeT $ STM.lookup cachedHieYamlLocation (fileToFlags sessionState) MaybeT $ pure $ HM.lookup ncfp m -- The main function which gets options for a file. We only want one of these running @@ -779,22 +873,15 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let getOptions :: FilePath -> IO () getOptions file = do let ncfp = toNormalizedFilePath' file - cachedHieYamlLocation <- atomically $ STM.lookup ncfp filesMap + cachedHieYamlLocation <- atomically $ STM.lookup ncfp (filesMap sessionState) hieYaml <- cradleLoc file let hieLoc = join cachedHieYamlLocation <|> hieYaml - sessionOpts (hieLoc, file) `Safe.catch` \e -> do - dep <- getDependencyInfo $ maybe [] pure hieYaml - let errorResult = (([renderPackageSetupException file e], Nothing), dep) - atomically $ do - STM.focus (Focus.insertOrMerge HM.union (HM.singleton ncfp errorResult)) hieLoc fileToFlags - STM.insert hieYaml ncfp filesMap - -- delete file from pending files - S.delete file pendingFileSet + sessionOpts (hieLoc, file) `Safe.catch` handleSessionError sessionState hieLoc file let getOptionsLoop :: IO () getOptionsLoop = do -- Get the next file to load - absFile <- atomically $ S.readQueue pendingFileSet + absFile <- atomically $ S.readQueue (pendingFileSet sessionState) logWith recorder Debug (LogGetOptionsLoop absFile) getOptions absFile getOptionsLoop @@ -808,7 +895,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let ncfp = toNormalizedFilePath' absFile res <- atomically $ do -- wait until target file is not in pendingFiles - Extra.whenM (S.lookup absFile pendingFileSet) STM.retry + Extra.whenM (S.lookup absFile (pendingFileSet sessionState)) STM.retry -- check if in the cache checkInCache ncfp logWith recorder Debug $ LogLookupSessionCache absFile @@ -823,7 +910,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do Just r -> return r Nothing -> do -- if not ok, we need to reload the session - atomically $ S.insert absFile pendingFileSet + atomically $ addToPending sessionState absFile lookupOrWaitCache absFile -- see Note [Serializing runs in separate thread] From 80d016094abba178ecff317b186bfe3c52517c43 Mon Sep 17 00:00:00 2001 From: soulomoon Date: Sun, 27 Apr 2025 22:45:14 +0800 Subject: [PATCH 40/87] update hiebois --- cabal.project | 9 +----- ghcide/ghcide.cabal | 2 +- .../session-loader/Development/IDE/Session.hs | 32 +++++++++++-------- 3 files changed, 21 insertions(+), 22 deletions(-) diff --git a/cabal.project b/cabal.project index e54c7f4bfe..59f565677b 100644 --- a/cabal.project +++ b/cabal.project @@ -8,7 +8,7 @@ packages: ./hls-test-utils -index-state: 2025-04-19T07:34:07Z +index-state: 2025-04-26T07:34:07Z tests: True test-show-details: direct @@ -53,13 +53,6 @@ allow-newer: cabal-install-parsers:Cabal-syntax, -source-repository-package - type: git - location: https://github.com/haskell/hie-bios - tag: e372a62b780b1314a35238a698a9e3813096b122 --- see https://github.com/haskell/haskell-language-server/pull/4445#issuecomment-2705787655 -allow-older: hie-bios:optparse-applicative - if impl(ghc >= 9.11) benchmarks: False allow-newer: diff --git a/ghcide/ghcide.cabal b/ghcide/ghcide.cabal index 0ae7b15ce9..eed0ed5919 100644 --- a/ghcide/ghcide.cabal +++ b/ghcide/ghcide.cabal @@ -73,7 +73,7 @@ library , Glob , haddock-library >=1.8 && <1.12 , hashable - , hie-bios ^>=0.14.0 + , hie-bios ^>=0.15.0 , hie-compat ^>=0.3.0.0 , hiedb ^>= 0.6.0.2 , hls-graph == 2.10.0.0 diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 76d10c9d66..a153e15119 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -500,6 +500,23 @@ completeFileProcessing state hieYaml ncfp file flags = do insertFileMapping state hieYaml ncfp removeFromPending state file +-- | Handle successful loading by updating session state with the new file maps +updateSessionOnSuccess :: Recorder (WithPriority Log) -> SessionState -> Maybe FilePath -> HashMap NormalizedFilePath (IdeResult HscEnvEq, DependencyInfo) -> [TargetDetails] -> IO () +updateSessionOnSuccess recorder state hieYaml this_flags_map all_targets = do + let newLoaded = Set.fromList $ fromNormalizedFilePath <$> HM.keys this_flags_map + atomically $ do + STM.insert this_flags_map hieYaml (fileToFlags state) + insertAllFileMappings state $ map ((hieYaml,) . fst) $ concatMap toFlagsMap all_targets + forM_ newLoaded $ flip S.delete (pendingFileSet state) + pendingFiles <- getPendingFiles state + let newLoaded = pendingFiles `Set.intersection` newLoaded + -- log new loaded files + logWith recorder Info $ LogSessionNewLoadedFiles $ Set.toList newLoaded + -- remove all new loaded file from error loading files + mapM_ (removeErrorLoadingFile state) (Set.toList newLoaded) + addCradleFiles state newLoaded + return () + -- | Insert multiple file mappings at once insertAllFileMappings :: SessionState -> [(Maybe FilePath, NormalizedFilePath)] -> STM () insertAllFileMappings state mappings = @@ -711,11 +728,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do Nothing let newLoaded = Set.fromList $ fromNormalizedFilePath <$> HM.keys this_flags_map - atomically $ do - STM.insert this_flags_map hieYaml (fileToFlags sessionState) - insertAllFileMappings sessionState $ map ((hieYaml,) . fst) $ concatMap toFlagsMap all_targets - forM_ newLoaded $ flip S.delete (pendingFileSet sessionState) - + updateSessionOnSuccess recorder sessionState hieYaml this_flags_map all_targets -- Typecheck all files in the project on startup checkProject <- getCheckProject -- The VFS doesn't change on cradle edits, re-use the old one. @@ -769,14 +782,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do [] -> error $ "GHC version could not be parsed: " <> version ((runTime, _):_) | compileTime == runTime -> do - (_results, allNewLoaded, restart) <- session (hieYaml, ncfp, opts, libDir) - pendingFiles <- getPendingFiles sessionState - let newLoaded = pendingFiles `Set.intersection` allNewLoaded - -- log new loaded files - logWith recorder Info $ LogSessionNewLoadedFiles $ Set.toList newLoaded - -- remove all new loaded file from error loading files - mapM_ (removeErrorLoadingFile sessionState) (Set.toList allNewLoaded) - addCradleFiles sessionState newLoaded + (_results, _allNewLoaded, restart) <- session (hieYaml, ncfp, opts, libDir) restart | otherwise -> do -- Use the common pattern here: updateFileState From 24269f6e085d1edda2c2d5c3131c3e4282ab760e Mon Sep 17 00:00:00 2001 From: soulomoon Date: Mon, 28 Apr 2025 18:00:58 +0800 Subject: [PATCH 41/87] revert --- .../session-loader/Development/IDE/Session.hs | 32 ++++++++----------- 1 file changed, 13 insertions(+), 19 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index a153e15119..76d10c9d66 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -500,23 +500,6 @@ completeFileProcessing state hieYaml ncfp file flags = do insertFileMapping state hieYaml ncfp removeFromPending state file --- | Handle successful loading by updating session state with the new file maps -updateSessionOnSuccess :: Recorder (WithPriority Log) -> SessionState -> Maybe FilePath -> HashMap NormalizedFilePath (IdeResult HscEnvEq, DependencyInfo) -> [TargetDetails] -> IO () -updateSessionOnSuccess recorder state hieYaml this_flags_map all_targets = do - let newLoaded = Set.fromList $ fromNormalizedFilePath <$> HM.keys this_flags_map - atomically $ do - STM.insert this_flags_map hieYaml (fileToFlags state) - insertAllFileMappings state $ map ((hieYaml,) . fst) $ concatMap toFlagsMap all_targets - forM_ newLoaded $ flip S.delete (pendingFileSet state) - pendingFiles <- getPendingFiles state - let newLoaded = pendingFiles `Set.intersection` newLoaded - -- log new loaded files - logWith recorder Info $ LogSessionNewLoadedFiles $ Set.toList newLoaded - -- remove all new loaded file from error loading files - mapM_ (removeErrorLoadingFile state) (Set.toList newLoaded) - addCradleFiles state newLoaded - return () - -- | Insert multiple file mappings at once insertAllFileMappings :: SessionState -> [(Maybe FilePath, NormalizedFilePath)] -> STM () insertAllFileMappings state mappings = @@ -728,7 +711,11 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do Nothing let newLoaded = Set.fromList $ fromNormalizedFilePath <$> HM.keys this_flags_map - updateSessionOnSuccess recorder sessionState hieYaml this_flags_map all_targets + atomically $ do + STM.insert this_flags_map hieYaml (fileToFlags sessionState) + insertAllFileMappings sessionState $ map ((hieYaml,) . fst) $ concatMap toFlagsMap all_targets + forM_ newLoaded $ flip S.delete (pendingFileSet sessionState) + -- Typecheck all files in the project on startup checkProject <- getCheckProject -- The VFS doesn't change on cradle edits, re-use the old one. @@ -782,7 +769,14 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do [] -> error $ "GHC version could not be parsed: " <> version ((runTime, _):_) | compileTime == runTime -> do - (_results, _allNewLoaded, restart) <- session (hieYaml, ncfp, opts, libDir) + (_results, allNewLoaded, restart) <- session (hieYaml, ncfp, opts, libDir) + pendingFiles <- getPendingFiles sessionState + let newLoaded = pendingFiles `Set.intersection` allNewLoaded + -- log new loaded files + logWith recorder Info $ LogSessionNewLoadedFiles $ Set.toList newLoaded + -- remove all new loaded file from error loading files + mapM_ (removeErrorLoadingFile sessionState) (Set.toList allNewLoaded) + addCradleFiles sessionState newLoaded restart | otherwise -> do -- Use the common pattern here: updateFileState From 98999f55b674cfcd5cd649f180f9f45366b8e5dc Mon Sep 17 00:00:00 2001 From: soulomoon Date: Mon, 28 Apr 2025 19:20:10 +0800 Subject: [PATCH 42/87] restart the shake if cabal file changed --- .../session-loader/Development/IDE/Session.hs | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 76d10c9d66..96c1016399 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -840,6 +840,8 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do -- cleanup error loading files and cradle files clearErrorLoadingFiles sessionState clearCradleFiles sessionState + cacheKey <- invalidateShakeCache + restartShakeSession VFSUnmodified "didSessionLoadingPreferenceConfigChange" [] (return [cacheKey]) v <- atomically $ STM.lookup hieYaml (fileToFlags sessionState) case v >>= HM.lookup (toNormalizedFilePath' file) of @@ -870,20 +872,17 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do -- at a time. Therefore the IORef contains the currently running cradle, if we try -- to get some more options then we wait for the currently running action to finish -- before attempting to do so. - let getOptions :: FilePath -> IO () - getOptions file = do - let ncfp = toNormalizedFilePath' file - cachedHieYamlLocation <- atomically $ STM.lookup ncfp (filesMap sessionState) - hieYaml <- cradleLoc file - let hieLoc = join cachedHieYamlLocation <|> hieYaml - sessionOpts (hieLoc, file) `Safe.catch` handleSessionError sessionState hieLoc file let getOptionsLoop :: IO () getOptionsLoop = do -- Get the next file to load - absFile <- atomically $ S.readQueue (pendingFileSet sessionState) - logWith recorder Debug (LogGetOptionsLoop absFile) - getOptions absFile + file <- atomically $ S.readQueue (pendingFileSet sessionState) + logWith recorder Debug (LogGetOptionsLoop file) + let ncfp = toNormalizedFilePath' file + cachedHieYamlLocation <- join <$> atomically (STM.lookup ncfp (filesMap sessionState)) + hieYaml <- cradleLoc file + let hieLoc = cachedHieYamlLocation <|> hieYaml + sessionOpts (hieLoc, file) `Safe.catch` handleSessionError sessionState hieLoc file getOptionsLoop -- | Given a file, this function will return the HscEnv and the dependencies From 21dd23314329a989dd5069817c51549a3f761888 Mon Sep 17 00:00:00 2001 From: soulomoon Date: Mon, 28 Apr 2025 23:30:05 +0800 Subject: [PATCH 43/87] better error handling in session loader --- .../session-loader/Development/IDE/Session.hs | 65 +++++++++---------- 1 file changed, 29 insertions(+), 36 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 96c1016399..738af0944a 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -489,16 +489,6 @@ addToPending :: SessionState -> FilePath -> STM () addToPending state file = S.insert file (pendingFileSet state) --- | Common pattern: Insert file flags, insert file mapping, and remove from pending -completeFileProcessing :: SessionState -> Maybe FilePath -> NormalizedFilePath -> FilePath -> (IdeResult HscEnvEq, DependencyInfo) -> IO () -completeFileProcessing state hieYaml ncfp file flags = do --- remove cfp from pending files - addErrorLoadingFile state file - removeCradleFile state file - atomically $ do - insertFileFlags state hieYaml ncfp flags - insertFileMapping state hieYaml ncfp - removeFromPending state file -- | Insert multiple file mappings at once insertAllFileMappings :: SessionState -> [(Maybe FilePath, NormalizedFilePath)] -> STM () @@ -516,10 +506,20 @@ getPendingFiles state = atomically $ Set.fromList <$> S.toUnOrderedList (pending -- | Handle errors during session loading by recording file as having error and removing from pending handleSessionError :: SessionState -> Maybe FilePath -> FilePath -> PackageSetupException -> IO () handleSessionError state hieYaml file e = do - dep <- getDependencyInfo $ maybe [] pure hieYaml + handleFileProcessingError state hieYaml file [renderPackageSetupException file e] mempty + +-- | Common pattern: Insert file flags, insert file mapping, and remove from pending +handleFileProcessingError :: SessionState -> Maybe FilePath -> FilePath -> [FileDiagnostic] -> [FilePath] -> IO () +handleFileProcessingError state hieYaml file diags extraDepFiles = do + addErrorLoadingFile state file + removeCradleFile state file + dep <- getDependencyInfo $ maybeToList hieYaml <> extraDepFiles let ncfp = toNormalizedFilePath' file - let errorResult = (([renderPackageSetupException file e], Nothing), dep) - completeFileProcessing state hieYaml ncfp file errorResult + let flags = ((diags, Nothing), dep) + atomically $ do + insertFileFlags state hieYaml ncfp flags + insertFileMapping state hieYaml ncfp + removeFromPending state file -- | Get the set of extra files to load based on the current file path -- If the current file is in error loading files, we fallback to single loading mode (empty set) @@ -679,8 +679,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do pure (Map.insert hieYaml (NE.toList all_deps) m, (new,old)) - let session :: (Maybe FilePath, NormalizedFilePath, ComponentOptions, FilePath) - -> IO ((IdeResult HscEnvEq,DependencyInfo), HashSet FilePath, IO ()) + let session :: (Maybe FilePath, NormalizedFilePath, ComponentOptions, FilePath) -> IO () session args@(hieYaml, _cfp, _opts, _libDir) = do (new_deps, old_deps) <- packageSetup args @@ -695,7 +694,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let flags_map' = HM.fromList (concatMap toFlagsMap all_targets') all_targets' = concat all_target_details this_dep_info <- getDependencyInfo $ maybeToList hieYaml - let (all_targets, this_flags_map, this_options) + let (all_targets, this_flags_map, _this_options) = case HM.lookup _cfp flags_map' of Just this -> (all_targets', flags_map', this) Nothing -> (this_target_details : all_targets', HM.insert _cfp this_flags flags_map', this_flags) @@ -710,17 +709,24 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do ]) Nothing - let newLoaded = Set.fromList $ fromNormalizedFilePath <$> HM.keys this_flags_map + pendingFiles <- getPendingFiles sessionState + -- this_flags_map might contains files not in pendingFiles, take the intersection + let newLoaded = pendingFiles `Set.intersection` Set.fromList (fromNormalizedFilePath <$> HM.keys this_flags_map) atomically $ do STM.insert this_flags_map hieYaml (fileToFlags sessionState) insertAllFileMappings sessionState $ map ((hieYaml,) . fst) $ concatMap toFlagsMap all_targets forM_ newLoaded $ flip S.delete (pendingFileSet sessionState) + logWith recorder Info $ LogSessionNewLoadedFiles $ Set.toList newLoaded + -- remove all new loaded file from error loading files + mapM_ (removeErrorLoadingFile sessionState) (Set.toList newLoaded) + addCradleFiles sessionState newLoaded -- Typecheck all files in the project on startup checkProject <- getCheckProject + -- The VFS doesn't change on cradle edits, re-use the old one. -- Invalidate all the existing GhcSession build nodes by restarting the Shake session - let restart = restartShakeSession VFSUnmodified "new component" [] $ do + restartShakeSession VFSUnmodified "new component" [] $ do keys2 <- invalidateShakeCache keys1 <- extendKnownTargets all_targets unless (null new_deps || not checkProject) $ do @@ -734,7 +740,6 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let !exportsMap' = createExportsMap $ mapMaybe (fmap hirModIface) modIfaces liftIO $ atomically $ modifyTVar' (exportsMap shakeExtras) (exportsMap' <>) return [keys1, keys2] - return (this_options, newLoaded, restart) let consultCradle :: Maybe FilePath -> FilePath -> IO () consultCradle hieYaml cfp = do @@ -759,29 +764,19 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do return res logWith recorder Debug $ LogSessionLoadingResult eopts + let ncfp = toNormalizedFilePath' cfp case eopts of -- The cradle gave us some options so get to work turning them -- into and HscEnv. Right (opts, libDir, version) -> do - let ncfp = toNormalizedFilePath' cfp let compileTime = fullCompilerVersion case reverse $ readP_to_S parseVersion version of [] -> error $ "GHC version could not be parsed: " <> version ((runTime, _):_) - | compileTime == runTime -> do - (_results, allNewLoaded, restart) <- session (hieYaml, ncfp, opts, libDir) - pendingFiles <- getPendingFiles sessionState - let newLoaded = pendingFiles `Set.intersection` allNewLoaded - -- log new loaded files - logWith recorder Info $ LogSessionNewLoadedFiles $ Set.toList newLoaded - -- remove all new loaded file from error loading files - mapM_ (removeErrorLoadingFile sessionState) (Set.toList allNewLoaded) - addCradleFiles sessionState newLoaded - restart + | compileTime == runTime -> session (hieYaml, ncfp, opts, libDir) | otherwise -> do -- Use the common pattern here: updateFileState - completeFileProcessing sessionState hieYaml ncfp cfp - (([renderPackageSetupException cfp GhcVersionMismatch{..}], Nothing), mempty) + handleFileProcessingError sessionState hieYaml cfp [renderPackageSetupException cfp GhcVersionMismatch{..}] mempty -- Failure case, either a cradle error or the none cradle Left err -> do -- what if the error to load file is one of old_files ? @@ -802,10 +797,8 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do consultCradle hieYaml cfp else do -- we are only loading this file and it failed - dep_info <- getDependencyInfo (maybeToList hieYaml ++ concatMap cradleErrorDependencies err) - let ncfp = toNormalizedFilePath' cfp - let res = (map (\err' -> renderCradleError err' cradle ncfp) err, Nothing) - completeFileProcessing sessionState hieYaml ncfp cfp (res, dep_info) + let res = map (\err' -> renderCradleError err' cradle ncfp) err + handleFileProcessingError sessionState hieYaml cfp res $ concatMap cradleErrorDependencies err let -- | We allow users to specify a loading strategy. From f140a2afc3a19f1e56babc1dd6e9a4a5b627ea7a Mon Sep 17 00:00:00 2001 From: soulomoon Date: Mon, 28 Apr 2025 23:37:10 +0800 Subject: [PATCH 44/87] refactor error handling in loadSessionWithOptions to improve clarity and logic --- ghcide/session-loader/Development/IDE/Session.hs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 738af0944a..caa8bec577 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -782,8 +782,8 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do -- what if the error to load file is one of old_files ? let attemptToLoadFiles = Set.delete cfp $ Set.fromList $ concatMap cradleErrorLoadingFiles err old_files <- readIORef (cradle_files sessionState) - let errorToLoadNewFiles = attemptToLoadFiles `Set.difference` old_files - if not (null errorToLoadNewFiles) + let errorToLoadNewFiles = cfp : Set.toList (attemptToLoadFiles `Set.difference` old_files) + if length errorToLoadNewFiles > 1 then do -- we are loading more files and failed, we need to retry -- mark as less loaded files as failedLoadingFiles as possible @@ -791,7 +791,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do -- are changed, and old_files are not valid anymore. -- but they will still be in the old_files, and will not move to error_loading_files. -- And make other files failed to load in batch mode. - addErrorLoadingFiles sessionState (Set.toList errorToLoadNewFiles) + addErrorLoadingFiles sessionState errorToLoadNewFiles -- retry without other files logWith recorder Info $ LogSessionReloadOnError cfp (Set.toList attemptToLoadFiles) consultCradle hieYaml cfp From e339c1d3e4869aebd104cc9b0dadb80aba03ae13 Mon Sep 17 00:00:00 2001 From: soulomoon Date: Mon, 28 Apr 2025 23:54:37 +0800 Subject: [PATCH 45/87] refactor SessionState management for improved batch loading logic --- .../session-loader/Development/IDE/Session.hs | 93 ++++++++++++------- 1 file changed, 58 insertions(+), 35 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index caa8bec577..3f20e93fc1 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -418,14 +418,33 @@ getHieDbLoc dir = do createDirectoryIfMissing True cDir pure (cDir db) +{- Note [SessionState and batch load] +SessionState manages the state for batch loading files in the session loader. + +- When a new file needs to be loaded, it is added to the pendingFiles set. +- The loader processes files from pendingFiles, attempting to load them in batches. +- If a file is already in failedFiles, it is loaded individually (single-file mode). +- Otherwise, the loader tries to load as many files as possible together (batch mode). + +On success: + - All successfully loaded files are removed from pendingFiles and failedFiles, + and added to loadedFiles. + +On failure: + - If loading a single file fails, it is added to failedFiles and removed from loadedFiles and pendingFiles. + - If batch loading fails, all files attempted are added to failedFiles. + +This approach ensures efficient batch loading while isolating problematic files for individual handling. +-} + data SessionState = SessionState - { cradle_files :: !(IORef (HashSet FilePath)) - , error_loading_files :: !(IORef (HashSet FilePath)) - , hscEnvs :: !(Var HieMap) - , fileToFlags :: !(STM.Map (Maybe FilePath) (HashMap NormalizedFilePath (IdeResult HscEnvEq, DependencyInfo))) - , filesMap :: !(STM.Map NormalizedFilePath (Maybe FilePath)) - , pendingFileSet :: !(S.OrderedSet FilePath) - , version :: !(Var Int) + { loadedFiles :: !(IORef (HashSet FilePath)), + failedFiles :: !(IORef (HashSet FilePath)), + pendingFiles :: !(S.OrderedSet FilePath), + hscEnvs :: !(Var HieMap), + fileToFlags :: !(STM.Map (Maybe FilePath) (HashMap NormalizedFilePath (IdeResult HscEnvEq, DependencyInfo))), + filesMap :: !(STM.Map NormalizedFilePath (Maybe FilePath)), + version :: !(Var Int) } -- | Helper functions for SessionState management @@ -434,7 +453,7 @@ data SessionState = SessionState -- | Add a file to the set of files with errors during loading addErrorLoadingFile :: SessionState -> FilePath -> IO () addErrorLoadingFile state file = - atomicModifyIORef' (error_loading_files state) (\xs -> (Set.insert file xs, ())) + atomicModifyIORef' (failedFiles state) (\xs -> (Set.insert file xs, ())) addErrorLoadingFiles :: SessionState -> [FilePath] -> IO () addErrorLoadingFiles = mapM_ . addErrorLoadingFile @@ -442,26 +461,26 @@ addErrorLoadingFiles = mapM_ . addErrorLoadingFile -- | Remove a file from the set of files with errors during loading removeErrorLoadingFile :: SessionState -> FilePath -> IO () removeErrorLoadingFile state file = - atomicModifyIORef' (error_loading_files state) (\xs -> (Set.delete file xs, ())) + atomicModifyIORef' (failedFiles state) (\xs -> (Set.delete file xs, ())) addCradleFiles :: SessionState -> HashSet FilePath -> IO () addCradleFiles state files = - atomicModifyIORef' (cradle_files state) (\xs -> (files <> xs, ())) + atomicModifyIORef' (loadedFiles state) (\xs -> (files <> xs, ())) -- | Remove a file from the cradle files set removeCradleFile :: SessionState -> FilePath -> IO () removeCradleFile state file = - atomicModifyIORef' (cradle_files state) (\xs -> (Set.delete file xs, ())) + atomicModifyIORef' (loadedFiles state) (\xs -> (Set.delete file xs, ())) -- | Clear error loading files and reset to empty set clearErrorLoadingFiles :: SessionState -> IO () clearErrorLoadingFiles state = - atomicModifyIORef' (error_loading_files state) (\_ -> (Set.empty, ())) + atomicModifyIORef' (failedFiles state) (\_ -> (Set.empty, ())) -- | Clear cradle files and reset to empty set clearCradleFiles :: SessionState -> IO () clearCradleFiles state = - atomicModifyIORef' (cradle_files state) (\_ -> (Set.empty, ())) + atomicModifyIORef' (loadedFiles state) (\_ -> (Set.empty, ())) -- | Reset the file maps in the session state resetFileMaps :: SessionState -> STM () @@ -482,12 +501,12 @@ insertFileMapping state hieYaml ncfp = -- | Remove a file from the pending file set removeFromPending :: SessionState -> FilePath -> STM () removeFromPending state file = - S.delete file (pendingFileSet state) + S.delete file (pendingFiles state) -- | Add a file to the pending file set addToPending :: SessionState -> FilePath -> STM () addToPending state file = - S.insert file (pendingFileSet state) + S.insert file (pendingFiles state) -- | Insert multiple file mappings at once @@ -501,7 +520,7 @@ incrementVersion state = modifyVar' (version state) succ -- | Get files from the pending file set getPendingFiles :: SessionState -> IO (HashSet FilePath) -getPendingFiles state = atomically $ Set.fromList <$> S.toUnOrderedList (pendingFileSet state) +getPendingFiles state = atomically $ Set.fromList <$> S.toUnOrderedList (pendingFiles state) -- | Handle errors during session loading by recording file as having error and removing from pending handleSessionError :: SessionState -> Maybe FilePath -> FilePath -> PackageSetupException -> IO () @@ -527,8 +546,8 @@ handleFileProcessingError state hieYaml file diags extraDepFiles = do getExtraFilesToLoad :: SessionState -> FilePath -> IO [FilePath] getExtraFilesToLoad state cfp = do pendingFiles <- getPendingFiles state - errorFiles <- readIORef (error_loading_files state) - old_files <- readIORef (cradle_files state) + errorFiles <- readIORef (failedFiles state) + old_files <- readIORef (loadedFiles state) -- if the file is in error loading files, we fall back to single loading mode return $ Set.toList $ @@ -537,6 +556,19 @@ getExtraFilesToLoad state cfp = do -- remove error files from pending files since error loading need to load one by one else (Set.delete cfp $ pendingFiles `Set.difference` errorFiles) <> old_files +newSessionState :: IO SessionState +newSessionState = do + -- Initialize SessionState + sessionState <- SessionState + <$> newIORef (Set.fromList []) -- loadedFiles + <*> newIORef (Set.fromList []) -- failedFiles + <*> S.newIO -- pendingFiles + <*> newVar Map.empty -- hscEnvs + <*> STM.newIO -- fileToFlags + <*> STM.newIO -- filesMap + <*> newVar 0 -- version + return sessionState + -- | Given a root directory, return a Shake 'Action' which setups an -- 'IdeGhcSession' given a file. -- Some of the many things this does: @@ -555,16 +587,7 @@ loadSessionWithOptions :: Recorder (WithPriority Log) -> SessionLoadingOptions - loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let toAbsolutePath = toAbsolute rootDir -- see Note [Root Directory] - -- Initialize SessionState - sessionState <- SessionState - <$> newIORef (Set.fromList []) -- cradle_files - <*> newIORef (Set.fromList []) -- error_loading_files - <*> newVar Map.empty -- hscEnvs - <*> STM.newIO -- fileToFlags - <*> STM.newIO -- filesMap - <*> S.newIO -- pendingFileSet - <*> newVar 0 -- version - + sessionState <- newSessionState biosSessionLoadingVar <- newVar Nothing :: IO (Var (Maybe SessionLoadingPreferenceConfig)) let returnWithVersion fun = IdeGhcSession fun <$> liftIO (readVar (version sessionState)) @@ -709,13 +732,13 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do ]) Nothing - pendingFiles <- getPendingFiles sessionState + pendings <- getPendingFiles sessionState -- this_flags_map might contains files not in pendingFiles, take the intersection - let newLoaded = pendingFiles `Set.intersection` Set.fromList (fromNormalizedFilePath <$> HM.keys this_flags_map) + let newLoaded = pendings `Set.intersection` Set.fromList (fromNormalizedFilePath <$> HM.keys this_flags_map) atomically $ do STM.insert this_flags_map hieYaml (fileToFlags sessionState) insertAllFileMappings sessionState $ map ((hieYaml,) . fst) $ concatMap toFlagsMap all_targets - forM_ newLoaded $ flip S.delete (pendingFileSet sessionState) + forM_ newLoaded $ flip S.delete (pendingFiles sessionState) logWith recorder Info $ LogSessionNewLoadedFiles $ Set.toList newLoaded -- remove all new loaded file from error loading files @@ -781,7 +804,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do Left err -> do -- what if the error to load file is one of old_files ? let attemptToLoadFiles = Set.delete cfp $ Set.fromList $ concatMap cradleErrorLoadingFiles err - old_files <- readIORef (cradle_files sessionState) + old_files <- readIORef (loadedFiles sessionState) let errorToLoadNewFiles = cfp : Set.toList (attemptToLoadFiles `Set.difference` old_files) if length errorToLoadNewFiles > 1 then do @@ -789,7 +812,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do -- mark as less loaded files as failedLoadingFiles as possible -- limitation is that when we are loading files, and the dependencies of old_files -- are changed, and old_files are not valid anymore. - -- but they will still be in the old_files, and will not move to error_loading_files. + -- but they will still be in the old_files, and will not move to failedFiles. -- And make other files failed to load in batch mode. addErrorLoadingFiles sessionState errorToLoadNewFiles -- retry without other files @@ -869,7 +892,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let getOptionsLoop :: IO () getOptionsLoop = do -- Get the next file to load - file <- atomically $ S.readQueue (pendingFileSet sessionState) + file <- atomically $ S.readQueue (pendingFiles sessionState) logWith recorder Debug (LogGetOptionsLoop file) let ncfp = toNormalizedFilePath' file cachedHieYamlLocation <- join <$> atomically (STM.lookup ncfp (filesMap sessionState)) @@ -887,7 +910,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let ncfp = toNormalizedFilePath' absFile res <- atomically $ do -- wait until target file is not in pendingFiles - Extra.whenM (S.lookup absFile (pendingFileSet sessionState)) STM.retry + Extra.whenM (S.lookup absFile (pendingFiles sessionState)) STM.retry -- check if in the cache checkInCache ncfp logWith recorder Debug $ LogLookupSessionCache absFile From 1425289cc8fa2ece6b6382e2fb56f76a9eb692d7 Mon Sep 17 00:00:00 2001 From: soulomoon Date: Tue, 29 Apr 2025 00:13:18 +0800 Subject: [PATCH 46/87] refactor session loading error handling for improved clarity and separation of concerns --- .../session-loader/Development/IDE/Session.hs | 39 ++++++++++++------- 1 file changed, 25 insertions(+), 14 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 3f20e93fc1..7f10528e86 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -122,7 +122,6 @@ import Control.Concurrent.STM (STM) import qualified Control.Monad.STM as STM import qualified Development.IDE.Session.OrderedSet as S import qualified Focus -import GHC.Data.Bag import GHC.Driver.Env (hsc_all_home_unit_ids) import GHC.Driver.Errors.Types import GHC.Types.Error (errMsgDiagnostic, @@ -423,20 +422,37 @@ SessionState manages the state for batch loading files in the session loader. - When a new file needs to be loaded, it is added to the pendingFiles set. - The loader processes files from pendingFiles, attempting to load them in batches. -- If a file is already in failedFiles, it is loaded individually (single-file mode). -- Otherwise, the loader tries to load as many files as possible together (batch mode). +- (SBL1) If a file is already in failedFiles, it is loaded individually (single-file mode). +- (SBL2) Otherwise, the loader tries to load as many files as possible together (batch mode). On success: - - All successfully loaded files are removed from pendingFiles and failedFiles, + - (SBL3) All successfully loaded files are removed from pendingFiles and failedFiles, and added to loadedFiles. On failure: - - If loading a single file fails, it is added to failedFiles and removed from loadedFiles and pendingFiles. - - If batch loading fails, all files attempted are added to failedFiles. + - (SBL4) If loading a single file fails, it is added to failedFiles and removed from loadedFiles and pendingFiles. + - (SBL5) If batch loading fails, all files attempted are added to failedFiles. This approach ensures efficient batch loading while isolating problematic files for individual handling. -} +handleLoadingSucc :: SessionState -> HashSet FilePath -> IO () +handleLoadingSucc sessionState files = do + atomically $ forM_ (Set.toList files) $ flip S.delete (pendingFiles sessionState) + mapM_ (removeErrorLoadingFile sessionState) (Set.toList files) + addCradleFiles sessionState files + +handleLoadingFailureBatch :: SessionState -> [FilePath] -> IO () +handleLoadingFailureBatch sessionState files = do + addErrorLoadingFiles sessionState files + +handleLoadingFailureSingle :: SessionState -> FilePath -> IO () +handleLoadingFailureSingle sessionState file = do + addErrorLoadingFile sessionState file + removeErrorLoadingFile sessionState file + atomically $ S.delete file (pendingFiles sessionState) + removeCradleFile sessionState file + data SessionState = SessionState { loadedFiles :: !(IORef (HashSet FilePath)), failedFiles :: !(IORef (HashSet FilePath)), @@ -530,15 +546,13 @@ handleSessionError state hieYaml file e = do -- | Common pattern: Insert file flags, insert file mapping, and remove from pending handleFileProcessingError :: SessionState -> Maybe FilePath -> FilePath -> [FileDiagnostic] -> [FilePath] -> IO () handleFileProcessingError state hieYaml file diags extraDepFiles = do - addErrorLoadingFile state file - removeCradleFile state file dep <- getDependencyInfo $ maybeToList hieYaml <> extraDepFiles let ncfp = toNormalizedFilePath' file let flags = ((diags, Nothing), dep) + handleLoadingFailureSingle state file atomically $ do insertFileFlags state hieYaml ncfp flags insertFileMapping state hieYaml ncfp - removeFromPending state file -- | Get the set of extra files to load based on the current file path -- If the current file is in error loading files, we fallback to single loading mode (empty set) @@ -738,12 +752,9 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do atomically $ do STM.insert this_flags_map hieYaml (fileToFlags sessionState) insertAllFileMappings sessionState $ map ((hieYaml,) . fst) $ concatMap toFlagsMap all_targets - forM_ newLoaded $ flip S.delete (pendingFiles sessionState) logWith recorder Info $ LogSessionNewLoadedFiles $ Set.toList newLoaded - -- remove all new loaded file from error loading files - mapM_ (removeErrorLoadingFile sessionState) (Set.toList newLoaded) - addCradleFiles sessionState newLoaded + handleLoadingSucc sessionState newLoaded -- Typecheck all files in the project on startup checkProject <- getCheckProject @@ -814,7 +825,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do -- are changed, and old_files are not valid anymore. -- but they will still be in the old_files, and will not move to failedFiles. -- And make other files failed to load in batch mode. - addErrorLoadingFiles sessionState errorToLoadNewFiles + handleLoadingFailureBatch sessionState errorToLoadNewFiles -- retry without other files logWith recorder Info $ LogSessionReloadOnError cfp (Set.toList attemptToLoadFiles) consultCradle hieYaml cfp From de6eb9cefebdf27a5487d6251c79b8132a449c79 Mon Sep 17 00:00:00 2001 From: soulomoon Date: Thu, 8 May 2025 16:39:37 +0800 Subject: [PATCH 47/87] cleanup --- ghcide/session-loader/Development/IDE/Session.hs | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 7f10528e86..483487c552 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -454,13 +454,13 @@ handleLoadingFailureSingle sessionState file = do removeCradleFile sessionState file data SessionState = SessionState - { loadedFiles :: !(IORef (HashSet FilePath)), - failedFiles :: !(IORef (HashSet FilePath)), + { loadedFiles :: !(IORef (HashSet FilePath)), + failedFiles :: !(IORef (HashSet FilePath)), pendingFiles :: !(S.OrderedSet FilePath), - hscEnvs :: !(Var HieMap), - fileToFlags :: !(STM.Map (Maybe FilePath) (HashMap NormalizedFilePath (IdeResult HscEnvEq, DependencyInfo))), - filesMap :: !(STM.Map NormalizedFilePath (Maybe FilePath)), - version :: !(Var Int) + hscEnvs :: !(Var HieMap), + fileToFlags :: !FlagsMap, + filesMap :: !FilesMap, + version :: !(Var Int) } -- | Helper functions for SessionState management From c9926d43536cf576521d19dd86d600427e2e2aba Mon Sep 17 00:00:00 2001 From: soulomoon Date: Thu, 8 May 2025 16:56:00 +0800 Subject: [PATCH 48/87] fix --- .../session-loader/Development/IDE/Session.hs | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 7b574a492f..597d7cffaf 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -893,13 +893,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do checkInCache ncfp = runMaybeT $ do cachedHieYamlLocation <- MaybeT $ STM.lookup ncfp (filesMap sessionState) m <- MaybeT $ STM.lookup cachedHieYamlLocation (fileToFlags sessionState) - -- Each one of deps will be registered as a FileSystemWatcher in the GhcSession action - -- so that we can get a workspace/didChangeWatchedFiles notification when a dep changes. - -- The GlobPattern of a FileSystemWatcher can be absolute or relative. - -- We use the absolute one because it is supported by more LSP clients. - -- Here we make sure deps are absolute and later we use those absolute deps as GlobPattern. - let absolutePathsCradleDeps (eq, deps) = (eq, fmap toAbsolutePath deps) - MaybeT $ pure $ absolutePathsCradleDeps <$> HM.lookup ncfp m + MaybeT $ pure $ HM.lookup ncfp m -- The main function which gets options for a file. We only want one of these running -- at a time. Therefore the IORef contains the currently running cradle, if we try @@ -947,9 +941,16 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do -- see Note [Serializing runs in separate thread] -- Start the getOptionsLoop if the queue is empty liftIO $ atomically $ Extra.whenM (isEmptyTQueue que) $ writeTQueue que getOptionsLoop + + -- Each one of deps will be registered as a FileSystemWatcher in the GhcSession action + -- so that we can get a workspace/didChangeWatchedFiles notification when a dep changes. + -- The GlobPattern of a FileSystemWatcher can be absolute or relative. + -- We use the absolute one because it is supported by more LSP clients. + -- Here we make sure deps are absolute and later we use those absolute deps as GlobPattern. + let absolutePathsCradleDeps (eq, deps) = (eq, fmap toAbsolutePath $ Map.keys deps) returnWithVersion $ \file -> do let absFile = toAbsolutePath file - second Map.keys <$> lookupOrWaitCache absFile + absolutePathsCradleDeps <$> lookupOrWaitCache absFile -- | Run the specific cradle on a specific FilePath via hie-bios. -- This then builds dependencies or whatever based on the cradle, gets the From 48a46d1d084eb295383ebc040a8d68c46556edd9 Mon Sep 17 00:00:00 2001 From: soulomoon Date: Thu, 8 May 2025 17:27:50 +0800 Subject: [PATCH 49/87] add sessionLoadingPreferenceConfig var to SessionState --- .../session-loader/Development/IDE/Session.hs | 47 ++++++++++--------- 1 file changed, 24 insertions(+), 23 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 597d7cffaf..87edfc0513 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -78,7 +78,8 @@ import Ide.Logger (Pretty (pretty), nest, toCologActionWithPrio, vcat, viaShow, (<+>)) -import Ide.Types (SessionLoadingPreferenceConfig (..), +import Ide.Types (Config, + SessionLoadingPreferenceConfig (..), sessionLoading) import Language.LSP.Protocol.Message import Language.LSP.Server @@ -460,7 +461,8 @@ data SessionState = SessionState hscEnvs :: !(Var HieMap), fileToFlags :: !FlagsMap, filesMap :: !FilesMap, - version :: !(Var Int) + version :: !(Var Int), + sessionLoadingPreferenceConfig :: !(Var (Maybe SessionLoadingPreferenceConfig)) } -- | Helper functions for SessionState management @@ -570,6 +572,24 @@ getExtraFilesToLoad state cfp = do -- remove error files from pending files since error loading need to load one by one else (Set.delete cfp $ pendingFiles `Set.difference` errorFiles) <> old_files +-- | We allow users to specify a loading strategy. +-- Check whether this config was changed since the last time we have loaded +-- a session. +-- +-- If the loading configuration changed, we likely should restart the session +-- in its entirety. +didSessionLoadingPreferenceConfigChange :: SessionState -> Config -> IO Bool +didSessionLoadingPreferenceConfigChange s clientConfig = do + let biosSessionLoadingVar = sessionLoadingPreferenceConfig s + mLoadingConfig <- readVar biosSessionLoadingVar + case mLoadingConfig of + Nothing -> do + writeVar biosSessionLoadingVar (Just (sessionLoading clientConfig)) + pure False + Just loadingConfig -> do + writeVar biosSessionLoadingVar (Just (sessionLoading clientConfig)) + pure (loadingConfig /= sessionLoading clientConfig) + newSessionState :: IO SessionState newSessionState = do -- Initialize SessionState @@ -581,6 +601,7 @@ newSessionState = do <*> STM.newIO -- fileToFlags <*> STM.newIO -- filesMap <*> newVar 0 -- version + <*> newVar Nothing -- sessionLoadingPreferenceConfig return sessionState -- | Given a root directory, return a Shake 'Action' which setups an @@ -602,7 +623,6 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let toAbsolutePath = toAbsolute rootDir -- see Note [Root Directory] sessionState <- newSessionState - biosSessionLoadingVar <- newVar Nothing :: IO (Var (Maybe SessionLoadingPreferenceConfig)) let returnWithVersion fun = IdeGhcSession fun <$> liftIO (readVar (version sessionState)) -- This caches the mapping from Mod.hs -> hie.yaml @@ -833,31 +853,12 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do -- we are only loading this file and it failed let res = map (\err' -> renderCradleError err' cradle ncfp) err handleFileProcessingError sessionState hieYaml cfp res $ concatMap cradleErrorDependencies err - - let - -- | We allow users to specify a loading strategy. - -- Check whether this config was changed since the last time we have loaded - -- a session. - -- - -- If the loading configuration changed, we likely should restart the session - -- in its entirety. - didSessionLoadingPreferenceConfigChange :: IO Bool - didSessionLoadingPreferenceConfigChange = do - mLoadingConfig <- readVar biosSessionLoadingVar - case mLoadingConfig of - Nothing -> do - writeVar biosSessionLoadingVar (Just (sessionLoading clientConfig)) - pure False - Just loadingConfig -> do - writeVar biosSessionLoadingVar (Just (sessionLoading clientConfig)) - pure (loadingConfig /= sessionLoading clientConfig) - -- This caches the mapping from hie.yaml + Mod.hs -> [String] -- Returns the Ghc session and the cradle dependencies let sessionOpts :: (Maybe FilePath, FilePath) -> IO () sessionOpts (hieYaml, file) = do - Extra.whenM didSessionLoadingPreferenceConfigChange $ do + Extra.whenM (didSessionLoadingPreferenceConfigChange sessionState clientConfig) $ do logWith recorder Info LogSessionLoadingChanged -- If the dependencies are out of date then clear both caches and start -- again. From 702e36752cd63c91c97d4cffbe3332c11cae9881 Mon Sep 17 00:00:00 2001 From: soulomoon Date: Wed, 14 May 2025 18:56:12 +0800 Subject: [PATCH 50/87] refactor SessionState to use Var instead of IORef for loaded and failed files --- .../session-loader/Development/IDE/Session.hs | 26 +++++++++---------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 87edfc0513..5d34423c6c 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -455,8 +455,8 @@ handleLoadingFailureSingle sessionState file = do removeCradleFile sessionState file data SessionState = SessionState - { loadedFiles :: !(IORef (HashSet FilePath)), - failedFiles :: !(IORef (HashSet FilePath)), + { loadedFiles :: !(Var (HashSet FilePath)), + failedFiles :: !(Var (HashSet FilePath)), pendingFiles :: !(S.OrderedSet FilePath), hscEnvs :: !(Var HieMap), fileToFlags :: !FlagsMap, @@ -471,7 +471,7 @@ data SessionState = SessionState -- | Add a file to the set of files with errors during loading addErrorLoadingFile :: SessionState -> FilePath -> IO () addErrorLoadingFile state file = - atomicModifyIORef' (failedFiles state) (\xs -> (Set.insert file xs, ())) + modifyVar_' (failedFiles state) (\xs -> return $ Set.insert file xs) addErrorLoadingFiles :: SessionState -> [FilePath] -> IO () addErrorLoadingFiles = mapM_ . addErrorLoadingFile @@ -479,26 +479,26 @@ addErrorLoadingFiles = mapM_ . addErrorLoadingFile -- | Remove a file from the set of files with errors during loading removeErrorLoadingFile :: SessionState -> FilePath -> IO () removeErrorLoadingFile state file = - atomicModifyIORef' (failedFiles state) (\xs -> (Set.delete file xs, ())) + modifyVar_' (failedFiles state) (\xs -> return $ Set.delete file xs) addCradleFiles :: SessionState -> HashSet FilePath -> IO () addCradleFiles state files = - atomicModifyIORef' (loadedFiles state) (\xs -> (files <> xs, ())) + modifyVar_' (loadedFiles state) (\xs -> return $ files <> xs) -- | Remove a file from the cradle files set removeCradleFile :: SessionState -> FilePath -> IO () removeCradleFile state file = - atomicModifyIORef' (loadedFiles state) (\xs -> (Set.delete file xs, ())) + modifyVar_' (loadedFiles state) (\xs -> return $ Set.delete file xs) -- | Clear error loading files and reset to empty set clearErrorLoadingFiles :: SessionState -> IO () clearErrorLoadingFiles state = - atomicModifyIORef' (failedFiles state) (\_ -> (Set.empty, ())) + modifyVar_' (failedFiles state) (const $ return Set.empty) -- | Clear cradle files and reset to empty set clearCradleFiles :: SessionState -> IO () clearCradleFiles state = - atomicModifyIORef' (loadedFiles state) (\_ -> (Set.empty, ())) + modifyVar_' (loadedFiles state) (const $ return Set.empty) -- | Reset the file maps in the session state resetFileMaps :: SessionState -> STM () @@ -562,8 +562,8 @@ handleFileProcessingError state hieYaml file diags extraDepFiles = do getExtraFilesToLoad :: SessionState -> FilePath -> IO [FilePath] getExtraFilesToLoad state cfp = do pendingFiles <- getPendingFiles state - errorFiles <- readIORef (failedFiles state) - old_files <- readIORef (loadedFiles state) + errorFiles <- readVar (failedFiles state) + old_files <- readVar (loadedFiles state) -- if the file is in error loading files, we fall back to single loading mode return $ Set.toList $ @@ -594,8 +594,8 @@ newSessionState :: IO SessionState newSessionState = do -- Initialize SessionState sessionState <- SessionState - <$> newIORef (Set.fromList []) -- loadedFiles - <*> newIORef (Set.fromList []) -- failedFiles + <$> newVar (Set.fromList []) -- loadedFiles + <*> newVar (Set.fromList []) -- failedFiles <*> S.newIO -- pendingFiles <*> newVar Map.empty -- hscEnvs <*> STM.newIO -- fileToFlags @@ -835,7 +835,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do Left err -> do -- what if the error to load file is one of old_files ? let attemptToLoadFiles = Set.delete cfp $ Set.fromList $ concatMap cradleErrorLoadingFiles err - old_files <- readIORef (loadedFiles sessionState) + old_files <- readVar (loadedFiles sessionState) let errorToLoadNewFiles = cfp : Set.toList (attemptToLoadFiles `Set.difference` old_files) if length errorToLoadNewFiles > 1 then do From 09213d333342398b46aba48f12764c05cd490926 Mon Sep 17 00:00:00 2001 From: soulomoon Date: Mon, 19 May 2025 22:24:59 +0800 Subject: [PATCH 51/87] simplified --- .../session-loader/Development/IDE/Session.hs | 54 +++++++++---------- 1 file changed, 25 insertions(+), 29 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 5d34423c6c..42290e87e5 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -437,16 +437,26 @@ On failure: This approach ensures efficient batch loading while isolating problematic files for individual handling. -} -handleLoadingSucc :: SessionState -> HashSet FilePath -> IO () -handleLoadingSucc sessionState files = do - atomically $ forM_ (Set.toList files) $ flip S.delete (pendingFiles sessionState) - mapM_ (removeErrorLoadingFile sessionState) (Set.toList files) - addCradleFiles sessionState files +-- SBL3 +handleLoadingSuccBatch :: Foldable t => Recorder (WithPriority Log) -> SessionState -> Maybe FilePath -> HashMap NormalizedFilePath (IdeResult HscEnvEq, DependencyInfo) -> t TargetDetails -> IO () +handleLoadingSuccBatch recorder sessionState hieYaml this_flags_map all_targets = do + pendings <- getPendingFiles sessionState + -- this_flags_map might contains files not in pendingFiles, take the intersection + let newLoaded = pendings `Set.intersection` Set.fromList (fromNormalizedFilePath <$> HM.keys this_flags_map) + atomically $ do + STM.insert this_flags_map hieYaml (fileToFlags sessionState) + insertAllFileMappings sessionState $ map ((hieYaml,) . fst) $ concatMap toFlagsMap all_targets + logWith recorder Info $ LogSessionNewLoadedFiles $ Set.toList newLoaded + atomically $ forM_ (Set.toList newLoaded) $ flip S.delete (pendingFiles sessionState) + mapM_ (removeErrorLoadingFile sessionState) (Set.toList newLoaded) + addCradleFiles sessionState newLoaded +-- SBL5 handleLoadingFailureBatch :: SessionState -> [FilePath] -> IO () handleLoadingFailureBatch sessionState files = do - addErrorLoadingFiles sessionState files + mapM_ (addErrorLoadingFile sessionState) files +-- SBL4 handleLoadingFailureSingle :: SessionState -> FilePath -> IO () handleLoadingFailureSingle sessionState file = do addErrorLoadingFile sessionState file @@ -473,9 +483,6 @@ addErrorLoadingFile :: SessionState -> FilePath -> IO () addErrorLoadingFile state file = modifyVar_' (failedFiles state) (\xs -> return $ Set.insert file xs) -addErrorLoadingFiles :: SessionState -> [FilePath] -> IO () -addErrorLoadingFiles = mapM_ . addErrorLoadingFile - -- | Remove a file from the set of files with errors during loading removeErrorLoadingFile :: SessionState -> FilePath -> IO () removeErrorLoadingFile state file = @@ -526,7 +533,6 @@ addToPending :: SessionState -> FilePath -> STM () addToPending state file = S.insert file (pendingFiles state) - -- | Insert multiple file mappings at once insertAllFileMappings :: SessionState -> [(Maybe FilePath, NormalizedFilePath)] -> STM () insertAllFileMappings state mappings = @@ -541,13 +547,13 @@ getPendingFiles :: SessionState -> IO (HashSet FilePath) getPendingFiles state = atomically $ Set.fromList <$> S.toUnOrderedList (pendingFiles state) -- | Handle errors during session loading by recording file as having error and removing from pending -handleSessionError :: SessionState -> Maybe FilePath -> FilePath -> PackageSetupException -> IO () -handleSessionError state hieYaml file e = do - handleFileProcessingError state hieYaml file [renderPackageSetupException file e] mempty +handleSingleFileProcessingError' :: SessionState -> Maybe FilePath -> FilePath -> PackageSetupException -> IO () +handleSingleFileProcessingError' state hieYaml file e = do + handleSingleFileProcessingError state hieYaml file [renderPackageSetupException file e] mempty -- | Common pattern: Insert file flags, insert file mapping, and remove from pending -handleFileProcessingError :: SessionState -> Maybe FilePath -> FilePath -> [FileDiagnostic] -> [FilePath] -> IO () -handleFileProcessingError state hieYaml file diags extraDepFiles = do +handleSingleFileProcessingError :: SessionState -> Maybe FilePath -> FilePath -> [FileDiagnostic] -> [FilePath] -> IO () +handleSingleFileProcessingError state hieYaml file diags extraDepFiles = do dep <- getDependencyInfo $ maybeToList hieYaml <> extraDepFiles let ncfp = toNormalizedFilePath' file let flags = ((diags, Nothing), dep) @@ -766,15 +772,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do ]) Nothing - pendings <- getPendingFiles sessionState - -- this_flags_map might contains files not in pendingFiles, take the intersection - let newLoaded = pendings `Set.intersection` Set.fromList (fromNormalizedFilePath <$> HM.keys this_flags_map) - atomically $ do - STM.insert this_flags_map hieYaml (fileToFlags sessionState) - insertAllFileMappings sessionState $ map ((hieYaml,) . fst) $ concatMap toFlagsMap all_targets - - logWith recorder Info $ LogSessionNewLoadedFiles $ Set.toList newLoaded - handleLoadingSucc sessionState newLoaded + handleLoadingSuccBatch recorder sessionState hieYaml this_flags_map all_targets -- Typecheck all files in the project on startup checkProject <- getCheckProject @@ -828,9 +826,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do [] -> error $ "GHC version could not be parsed: " <> version ((runTime, _):_) | compileTime == runTime -> session (hieYaml, ncfp, opts, libDir) - | otherwise -> do - -- Use the common pattern here: updateFileState - handleFileProcessingError sessionState hieYaml cfp [renderPackageSetupException cfp GhcVersionMismatch{..}] mempty + | otherwise -> handleSingleFileProcessingError' sessionState hieYaml cfp (GhcVersionMismatch{..}) -- Failure case, either a cradle error or the none cradle Left err -> do -- what if the error to load file is one of old_files ? @@ -852,7 +848,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do else do -- we are only loading this file and it failed let res = map (\err' -> renderCradleError err' cradle ncfp) err - handleFileProcessingError sessionState hieYaml cfp res $ concatMap cradleErrorDependencies err + handleSingleFileProcessingError sessionState hieYaml cfp res $ concatMap cradleErrorDependencies err -- This caches the mapping from hie.yaml + Mod.hs -> [String] -- Returns the Ghc session and the cradle dependencies let sessionOpts :: (Maybe FilePath, FilePath) @@ -909,7 +905,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do cachedHieYamlLocation <- join <$> atomically (STM.lookup ncfp (filesMap sessionState)) hieYaml <- cradleLoc file let hieLoc = cachedHieYamlLocation <|> hieYaml - sessionOpts (hieLoc, file) `Safe.catch` handleSessionError sessionState hieLoc file + sessionOpts (hieLoc, file) `Safe.catch` handleSingleFileProcessingError' sessionState hieLoc file getOptionsLoop -- | Given a file, this function will return the HscEnv and the dependencies From f768db08e2b737604bc72b854602a0ec244ce85d Mon Sep 17 00:00:00 2001 From: fendor Date: Sat, 21 Jun 2025 13:06:55 +0200 Subject: [PATCH 52/87] Extract top-level functions for session initialisation The session initialisation has too many implicit dependencies. To break these apart, we extract local functions and turn them into top-level definition with all parameters explicitly given. This commit only makes sure session initialisation functions are promoted to top-level definitions and tries to simplify them. The top-level definitions are lacking type signatures to make it easier to change them, but we plan to add them back. --- .../session-loader/Development/IDE/Session.hs | 641 +++++++++--------- 1 file changed, 333 insertions(+), 308 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 6060f5ca05..045bdcbc54 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -27,7 +27,7 @@ import Control.Monad.Extra as Extra import Control.Monad.IO.Class import Control.Monad.Trans.Maybe (MaybeT (MaybeT, runMaybeT)) import qualified Crypto.Hash.SHA1 as H -import Data.Aeson hiding (Error) +import Data.Aeson hiding (Error, Key) import Data.Bifunctor import qualified Data.ByteString.Base16 as B16 import qualified Data.ByteString.Char8 as B @@ -59,7 +59,7 @@ import qualified Development.IDE.GHC.Compat.Core as GHC import Development.IDE.GHC.Compat.Env hiding (Logger) import Development.IDE.GHC.Compat.Units (UnitId) import Development.IDE.GHC.Util -import Development.IDE.Graph (Action) +import Development.IDE.Graph (Action, Key) import qualified Development.IDE.Session.Implicit as GhcIde import Development.IDE.Types.Diagnostics import Development.IDE.Types.Exports @@ -438,27 +438,27 @@ This approach ensures efficient batch loading while isolating problematic files -} -- SBL3 -handleLoadingSuccBatch :: Foldable t => Recorder (WithPriority Log) -> SessionState -> Maybe FilePath -> HashMap NormalizedFilePath (IdeResult HscEnvEq, DependencyInfo) -> t TargetDetails -> IO () -handleLoadingSuccBatch recorder sessionState hieYaml this_flags_map all_targets = do - pendings <- getPendingFiles sessionState - -- this_flags_map might contains files not in pendingFiles, take the intersection - let newLoaded = pendings `Set.intersection` Set.fromList (fromNormalizedFilePath <$> HM.keys this_flags_map) - atomically $ do - STM.insert this_flags_map hieYaml (fileToFlags sessionState) - insertAllFileMappings sessionState $ map ((hieYaml,) . fst) $ concatMap toFlagsMap all_targets - logWith recorder Info $ LogSessionNewLoadedFiles $ Set.toList newLoaded - atomically $ forM_ (Set.toList newLoaded) $ flip S.delete (pendingFiles sessionState) - mapM_ (removeErrorLoadingFile sessionState) (Set.toList newLoaded) - addCradleFiles sessionState newLoaded +handleBatchLoadSuccess :: Foldable t => Recorder (WithPriority Log) -> SessionState -> Maybe FilePath -> HashMap NormalizedFilePath (IdeResult HscEnvEq, DependencyInfo) -> t TargetDetails -> IO () +handleBatchLoadSuccess recorder sessionState hieYaml this_flags_map all_targets = do + pendings <- getPendingFiles sessionState + -- this_flags_map might contains files not in pendingFiles, take the intersection + let newLoaded = pendings `Set.intersection` Set.fromList (fromNormalizedFilePath <$> HM.keys this_flags_map) + atomically $ do + STM.insert this_flags_map hieYaml (fileToFlags sessionState) + insertAllFileMappings sessionState $ map ((hieYaml,) . fst) $ concatMap toFlagsMap all_targets + logWith recorder Info $ LogSessionNewLoadedFiles $ Set.toList newLoaded + atomically $ forM_ (Set.toList newLoaded) $ flip S.delete (pendingFiles sessionState) + mapM_ (removeErrorLoadingFile sessionState) (Set.toList newLoaded) + addCradleFiles sessionState newLoaded -- SBL5 -handleLoadingFailureBatch :: SessionState -> [FilePath] -> IO () -handleLoadingFailureBatch sessionState files = do +handleBatchLoadFailure :: SessionState -> [FilePath] -> IO () +handleBatchLoadFailure sessionState files = do mapM_ (addErrorLoadingFile sessionState) files -- SBL4 -handleLoadingFailureSingle :: SessionState -> FilePath -> IO () -handleLoadingFailureSingle sessionState file = do +handleSingleLoadFailure :: SessionState -> FilePath -> IO () +handleSingleLoadFailure sessionState file = do addErrorLoadingFile sessionState file removeErrorLoadingFile sessionState file atomically $ S.delete file (pendingFiles sessionState) @@ -557,7 +557,7 @@ handleSingleFileProcessingError state hieYaml file diags extraDepFiles = do dep <- getDependencyInfo $ maybeToList hieYaml <> extraDepFiles let ncfp = toNormalizedFilePath' file let flags = ((diags, Nothing), dep) - handleLoadingFailureSingle state file + handleSingleLoadFailure state file atomically $ do insertFileFlags state hieYaml ncfp flags insertFileMapping state hieYaml ncfp @@ -642,302 +642,29 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do return $ do clientConfig <- getClientConfigAction - extras@ShakeExtras{restartShakeSession, ideNc, knownTargetsVar, lspEnv + extras@ShakeExtras{ideNc, knownTargetsVar } <- getShakeExtras let invalidateShakeCache = do void $ incrementVersion sessionState return $ toNoFileKey GhcSessionIO - IdeOptions{ optTesting = IdeTesting optTesting - , optCheckProject = getCheckProject - , optExtensions - } <- getIdeOptions - - -- populate the knownTargetsVar with all the - -- files in the project so that `knownFiles` can learn about them and - -- we can generate a complete module graph - let extendKnownTargets newTargets = do - knownTargets <- concatForM newTargets $ \TargetDetails{..} -> - case targetTarget of - TargetFile f -> do - -- If a target file has multiple possible locations, then we - -- assume they are all separate file targets. - -- This happens with '.hs-boot' files if they are in the root directory of the project. - -- GHC reports options such as '-i. A' as 'TargetFile A.hs' instead of 'TargetModule A'. - -- In 'fromTargetId', we dutifully look for '.hs-boot' files and add them to the - -- targetLocations of the TargetDetails. Then we add everything to the 'knownTargetsVar'. - -- However, when we look for a 'Foo.hs-boot' file in 'FindImports.hs', we look for either - -- - -- * TargetFile Foo.hs-boot - -- * TargetModule Foo - -- - -- If we don't generate a TargetFile for each potential location, we will only have - -- 'TargetFile Foo.hs' in the 'knownTargetsVar', thus not find 'TargetFile Foo.hs-boot' - -- and also not find 'TargetModule Foo'. - fs <- filterM (IO.doesFileExist . fromNormalizedFilePath) targetLocations - pure $ map (\fp -> (TargetFile fp, Set.singleton fp)) (nubOrd (f:fs)) - TargetModule _ -> do - found <- filterM (IO.doesFileExist . fromNormalizedFilePath) targetLocations - return [(targetTarget, Set.fromList found)] - hasUpdate <- atomically $ do - known <- readTVar knownTargetsVar - let known' = flip mapHashed known $ \k -> unionKnownTargets k (mkKnownTargets knownTargets) - hasUpdate = if known /= known' then Just (unhashed known') else Nothing - writeTVar knownTargetsVar known' - pure hasUpdate - for_ hasUpdate $ \x -> - logWith recorder Debug $ LogKnownFilesUpdated (targetMap x) - return $ toNoFileKey GetKnownTargets - - -- Create a new HscEnv from a hieYaml root and a set of options - let packageSetup :: (Maybe FilePath, NormalizedFilePath, ComponentOptions, FilePath) - -> IO ([ComponentInfo], [ComponentInfo]) - packageSetup (hieYaml, cfp, opts, libDir) = do - -- Parse DynFlags for the newly discovered component - hscEnv <- emptyHscEnv ideNc libDir - newTargetDfs <- evalGhcEnv hscEnv $ setOptions cfp opts (hsc_dflags hscEnv) rootDir - let deps = componentDependencies opts ++ maybeToList hieYaml - dep_info <- getDependencyInfo deps - -- Now lookup to see whether we are combining with an existing HscEnv - -- or making a new one. The lookup returns the HscEnv and a list of - -- information about other components loaded into the HscEnv - -- (unitId, DynFlag, Targets) - modifyVar (hscEnvs sessionState) $ \m -> do - -- Just deps if there's already an HscEnv - -- Nothing is it's the first time we are making an HscEnv - let oldDeps = Map.lookup hieYaml m - let -- Add the raw information about this component to the list - -- We will modify the unitId and DynFlags used for - -- compilation but these are the true source of - -- information. - new_deps = fmap (\(df, targets) -> RawComponentInfo (homeUnitId_ df) df targets cfp opts dep_info) newTargetDfs - all_deps = new_deps `NE.appendList` fromMaybe [] oldDeps - -- Get all the unit-ids for things in this component - _inplace = map rawComponentUnitId $ NE.toList all_deps - - all_deps' <- forM all_deps $ \RawComponentInfo{..} -> do - let prefix = show rawComponentUnitId - -- See Note [Avoiding bad interface files] - let cacheDirOpts = componentOptions opts - cacheDirs <- liftIO $ getCacheDirs prefix cacheDirOpts - processed_df <- setCacheDirs recorder cacheDirs rawComponentDynFlags - -- The final component information, mostly the same but the DynFlags don't - -- contain any packages which are also loaded - -- into the same component. - pure $ ComponentInfo - { componentUnitId = rawComponentUnitId - , componentDynFlags = processed_df - , componentTargets = rawComponentTargets - , componentFP = rawComponentFP - , componentCOptions = rawComponentCOptions - , componentDependencyInfo = rawComponentDependencyInfo - } - -- Modify the map so the hieYaml now maps to the newly updated - -- ComponentInfos - -- Returns - -- . The information for the new component which caused this cache miss - -- . The modified information (without -inplace flags) for - -- existing packages - let (new,old) = NE.splitAt (NE.length new_deps) all_deps' - pure (Map.insert hieYaml (NE.toList all_deps) m, (new,old)) - - - let session :: (Maybe FilePath, NormalizedFilePath, ComponentOptions, FilePath) -> IO () - session args@(hieYaml, _cfp, _opts, _libDir) = do - (new_deps, old_deps) <- packageSetup args - - -- For each component, now make a new HscEnvEq which contains the - -- HscEnv for the hie.yaml file but the DynFlags for that component - -- For GHC's supporting multi component sessions, we create a shared - -- HscEnv but set the active component accordingly - hscEnv <- emptyHscEnv ideNc _libDir - let new_cache = newComponentCache recorder optExtensions _cfp hscEnv - all_target_details <- new_cache old_deps new_deps - - let flags_map' = HM.fromList (concatMap toFlagsMap all_targets') - all_targets' = concat all_target_details - this_dep_info <- getDependencyInfo $ maybeToList hieYaml - let (all_targets, this_flags_map, _this_options) - = case HM.lookup _cfp flags_map' of - Just this -> (all_targets', flags_map', this) - Nothing -> (this_target_details : all_targets', HM.insert _cfp this_flags flags_map', this_flags) - where - this_target_details = TargetDetails (TargetFile _cfp) this_error_env this_dep_info [_cfp] - this_flags = (this_error_env, this_dep_info) - this_error_env = ([this_error], Nothing) - this_error = ideErrorWithSource (Just "cradle") (Just DiagnosticSeverity_Error) _cfp - (T.unlines - [ "No cradle target found. Is this file listed in the targets of your cradle?" - , "If you are using a .cabal file, please ensure that this module is listed in either the exposed-modules or other-modules section" - ]) - Nothing - - handleLoadingSuccBatch recorder sessionState hieYaml this_flags_map all_targets - -- Typecheck all files in the project on startup - checkProject <- getCheckProject - - -- The VFS doesn't change on cradle edits, re-use the old one. - -- Invalidate all the existing GhcSession build nodes by restarting the Shake session - restartShakeSession VFSUnmodified "new component" [] $ do - keys2 <- invalidateShakeCache - keys1 <- extendKnownTargets all_targets - unless (null new_deps || not checkProject) $ do - cfps' <- liftIO $ filterM (IO.doesFileExist . fromNormalizedFilePath) (concatMap targetLocations all_targets) - void $ shakeEnqueue extras $ mkDelayedAction "InitialLoad" Debug $ void $ do - mmt <- uses GetModificationTime cfps' - let cs_exist = catMaybes (zipWith (<$) cfps' mmt) - modIfaces <- uses GetModIface cs_exist - -- update exports map - shakeExtras <- getShakeExtras - let !exportsMap' = createExportsMap $ mapMaybe (fmap hirModIface) modIfaces - liftIO $ atomically $ modifyTVar' (exportsMap shakeExtras) (exportsMap' <>) - return [keys1, keys2] - - let consultCradle :: Maybe FilePath -> FilePath -> IO () - consultCradle hieYaml cfp = do - let lfpLog = makeRelative rootDir cfp - logWith recorder Info $ LogCradlePath lfpLog - when (isNothing hieYaml) $ - logWith recorder Warning $ LogCradleNotFound lfpLog - cradle <- loadCradle recorder hieYaml rootDir - when optTesting $ mRunLspT lspEnv $ - sendNotification (SMethod_CustomMethod (Proxy @"ghcide/cradle/loaded")) (toJSON cfp) - - -- Display a user friendly progress message here: They probably don't know what a cradle is - let progMsg = "Setting up " <> T.pack (takeBaseName (cradleRootDir cradle)) - <> " (for " <> T.pack lfpLog <> ")" - - extraToLoads <- getExtraFilesToLoad sessionState cfp - eopts <- mRunLspTCallback lspEnv (\act -> withIndefiniteProgress progMsg Nothing NotCancellable (const act)) $ - withTrace "Load cradle" $ \addTag -> do - addTag "file" lfpLog - res <- cradleToOptsAndLibDir recorder (sessionLoading clientConfig) cradle cfp extraToLoads - addTag "result" (show res) - return res - - logWith recorder Debug $ LogSessionLoadingResult eopts - let ncfp = toNormalizedFilePath' cfp - case eopts of - -- The cradle gave us some options so get to work turning them - -- into and HscEnv. - Right (opts, libDir, version) -> do - let compileTime = fullCompilerVersion - case reverse $ readP_to_S parseVersion version of - [] -> error $ "GHC version could not be parsed: " <> version - ((runTime, _):_) - | compileTime == runTime -> session (hieYaml, ncfp, opts, libDir) - | otherwise -> handleSingleFileProcessingError' sessionState hieYaml cfp (GhcVersionMismatch{..}) - -- Failure case, either a cradle error or the none cradle - Left err -> do - -- what if the error to load file is one of old_files ? - let attemptToLoadFiles = Set.delete cfp $ Set.fromList $ concatMap cradleErrorLoadingFiles err - old_files <- readVar (loadedFiles sessionState) - let errorToLoadNewFiles = cfp : Set.toList (attemptToLoadFiles `Set.difference` old_files) - if length errorToLoadNewFiles > 1 - then do - -- we are loading more files and failed, we need to retry - -- mark as less loaded files as failedLoadingFiles as possible - -- limitation is that when we are loading files, and the dependencies of old_files - -- are changed, and old_files are not valid anymore. - -- but they will still be in the old_files, and will not move to failedFiles. - -- And make other files failed to load in batch mode. - handleLoadingFailureBatch sessionState errorToLoadNewFiles - -- retry without other files - logWith recorder Info $ LogSessionReloadOnError cfp (Set.toList attemptToLoadFiles) - consultCradle hieYaml cfp - else do - -- we are only loading this file and it failed - let res = map (\err' -> renderCradleError err' cradle ncfp) err - handleSingleFileProcessingError sessionState hieYaml cfp res $ concatMap cradleErrorDependencies err - -- This caches the mapping from hie.yaml + Mod.hs -> [String] - -- Returns the Ghc session and the cradle dependencies - let sessionOpts :: (Maybe FilePath, FilePath) - -> IO () - sessionOpts (hieYaml, file) = do - Extra.whenM (didSessionLoadingPreferenceConfigChange sessionState clientConfig) $ do - logWith recorder Info LogSessionLoadingChanged - -- If the dependencies are out of date then clear both caches and start - -- again. - atomically $ resetFileMaps sessionState - -- Don't even keep the name cache, we start from scratch here! - modifyVar_ (hscEnvs sessionState) (const (return Map.empty)) - -- cleanup error loading files and cradle files - clearErrorLoadingFiles sessionState - clearCradleFiles sessionState - cacheKey <- invalidateShakeCache - restartShakeSession VFSUnmodified "didSessionLoadingPreferenceConfigChange" [] (return [cacheKey]) - - v <- atomically $ STM.lookup hieYaml (fileToFlags sessionState) - case v >>= HM.lookup (toNormalizedFilePath' file) of - Just (_opts, old_di) -> do - deps_ok <- checkDependencyInfo old_di - if not deps_ok - then do - -- if deps are old, we can try to load the error files again - removeErrorLoadingFile sessionState file - removeCradleFile sessionState file - -- If the dependencies are out of date then clear both caches and start - -- again. - atomically $ resetFileMaps sessionState - -- Keep the same name cache - modifyVar_ (hscEnvs sessionState) (return . Map.adjust (const []) hieYaml) - consultCradle hieYaml file - -- if deps are ok, we can just remove the file from pending files - else atomically $ removeFromPending sessionState file - Nothing -> consultCradle hieYaml file - - let checkInCache ::NormalizedFilePath -> STM (Maybe (IdeResult HscEnvEq, DependencyInfo)) - checkInCache ncfp = runMaybeT $ do - cachedHieYamlLocation <- MaybeT $ STM.lookup ncfp (filesMap sessionState) - m <- MaybeT $ STM.lookup cachedHieYamlLocation (fileToFlags sessionState) - MaybeT $ pure $ HM.lookup ncfp m - - -- The main function which gets options for a file. We only want one of these running - -- at a time. Therefore the IORef contains the currently running cradle, if we try - -- to get some more options then we wait for the currently running action to finish - -- before attempting to do so. - let getOptionsLoop :: IO () - getOptionsLoop = do - -- Get the next file to load - file <- atomically $ S.readQueue (pendingFiles sessionState) - logWith recorder Debug (LogGetOptionsLoop file) - let ncfp = toNormalizedFilePath' file - cachedHieYamlLocation <- join <$> atomically (STM.lookup ncfp (filesMap sessionState)) - hieYaml <- cradleLoc file - let hieLoc = cachedHieYamlLocation <|> hieYaml - sessionOpts (hieLoc, file) `Safe.catch` handleSingleFileProcessingError' sessionState hieLoc file - getOptionsLoop - - -- | Given a file, this function will return the HscEnv and the dependencies - -- it would look up the cache first, if the cache is not available, it would - -- submit a request to the getOptionsLoop to get the options for the file - -- and wait until the options are available - let lookupOrWaitCache :: FilePath -> IO (IdeResult HscEnvEq, DependencyInfo) - lookupOrWaitCache absFile = do - let ncfp = toNormalizedFilePath' absFile - res <- atomically $ do - -- wait until target file is not in pendingFiles - Extra.whenM (S.lookup absFile (pendingFiles sessionState)) STM.retry - -- check if in the cache - checkInCache ncfp - logWith recorder Debug $ LogLookupSessionCache absFile - updateDateRes <- case res of - Just r -> do - depOk <- checkDependencyInfo (snd r) - if depOk - then return $ Just r - else return Nothing - _ -> return Nothing - case updateDateRes of - Just r -> return r - Nothing -> do - -- if not ok, we need to reload the session - atomically $ addToPending sessionState absFile - lookupOrWaitCache absFile + ideOptions <- getIdeOptions -- see Note [Serializing runs in separate thread] -- Start the getOptionsLoop if the queue is empty - liftIO $ atomically $ Extra.whenM (isEmptyTQueue que) $ writeTQueue que getOptionsLoop + liftIO $ atomically $ Extra.whenM (isEmptyTQueue que) $ do + let newSessionLoadingOptions = SessionLoadingOptions + { findCradle = cradleLoc + , .. + } + sessionShake = SessionShake + { restartSession = restartShakeSession extras + , invalidateCache = invalidateShakeCache + , enqueueActions = shakeEnqueue extras + , lspContext = lspEnv extras + } + + writeTQueue que (getOptionsLoop recorder sessionShake sessionState newSessionLoadingOptions ideOptions clientConfig knownTargetsVar rootDir ideNc) -- Each one of deps will be registered as a FileSystemWatcher in the GhcSession action -- so that we can get a workspace/didChangeWatchedFiles notification when a dep changes. @@ -947,7 +674,305 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let absolutePathsCradleDeps (eq, deps) = (eq, fmap toAbsolutePath $ Map.keys deps) returnWithVersion $ \file -> do let absFile = toAbsolutePath file - absolutePathsCradleDeps <$> lookupOrWaitCache absFile + absolutePathsCradleDeps <$> lookupOrWaitCache recorder sessionState absFile + +-- | Given a file, this function will return the HscEnv and the dependencies +-- it would look up the cache first, if the cache is not available, it would +-- submit a request to the getOptionsLoop to get the options for the file +-- and wait until the options are available +lookupOrWaitCache :: Recorder (WithPriority Log) -> SessionState -> FilePath -> IO (IdeResult HscEnvEq, DependencyInfo) +lookupOrWaitCache recorder sessionState absFile = do + let ncfp = toNormalizedFilePath' absFile + res <- atomically $ do + -- wait until target file is not in pendingFiles + Extra.whenM (S.lookup absFile (pendingFiles sessionState)) STM.retry + -- check if in the cache + checkInCache sessionState ncfp + logWith recorder Debug $ LogLookupSessionCache absFile + updateDateRes <- case res of + Just r -> do + depOk <- checkDependencyInfo (snd r) + if depOk + then return $ Just r + else return Nothing + _ -> return Nothing + case updateDateRes of + Just r -> return r + Nothing -> do + -- if not ok, we need to reload the session + atomically $ addToPending sessionState absFile + lookupOrWaitCache recorder sessionState absFile + +checkInCache :: SessionState -> NormalizedFilePath -> STM (Maybe (IdeResult HscEnvEq, DependencyInfo)) +checkInCache sessionState ncfp = runMaybeT $ do + cachedHieYamlLocation <- MaybeT $ STM.lookup ncfp (filesMap sessionState) + m <- MaybeT $ STM.lookup cachedHieYamlLocation (fileToFlags sessionState) + MaybeT $ pure $ HM.lookup ncfp m + +data SessionShake = SessionShake + { restartSession :: VFSModified -> String -> [DelayedAction ()] -> IO [Key] -> IO () + , invalidateCache :: IO Key + , enqueueActions :: DelayedAction () -> IO (IO ()) + , lspContext :: Maybe (LanguageContextEnv Config) + } + +-- The main function which gets options for a file. We only want one of these running +-- at a time. Therefore the IORef contains the currently running cradle, if we try +-- to get some more options then we wait for the currently running action to finish +-- before attempting to do so. +getOptionsLoop recorder sessionShake sessionState sessionLoadingOptions ideOptions clientConfig knownTargetsVar rootDir ideNc = do + -- Get the next file to load + file <- atomically $ S.readQueue (pendingFiles sessionState) + logWith recorder Debug (LogGetOptionsLoop file) + let ncfp = toNormalizedFilePath' file + cachedHieYamlLocation <- join <$> atomically (STM.lookup ncfp (filesMap sessionState)) + hieYaml <- findCradle sessionLoadingOptions file + let hieLoc = cachedHieYamlLocation <|> hieYaml + sessionOpts recorder sessionShake ideOptions sessionState sessionLoadingOptions clientConfig knownTargetsVar rootDir ideNc (hieLoc, file) + `Safe.catch` handleSingleFileProcessingError' sessionState hieLoc file + getOptionsLoop recorder sessionShake sessionState sessionLoadingOptions ideOptions clientConfig knownTargetsVar rootDir ideNc + +-- | This caches the mapping from hie.yaml + Mod.hs -> [String] +-- Returns the Ghc session and the cradle dependencies +sessionOpts recorder sessionShake ideOptions sessionState sessionLoadingOptions clientConfig knownTargetsVar rootDir ideNc (hieYaml, file) = do + Extra.whenM (didSessionLoadingPreferenceConfigChange sessionState clientConfig) $ do + logWith recorder Info LogSessionLoadingChanged + -- If the dependencies are out of date then clear both caches and start + -- again. + atomically $ resetFileMaps sessionState + -- Don't even keep the name cache, we start from scratch here! + modifyVar_ (hscEnvs sessionState) (const (return Map.empty)) + -- cleanup error loading files and cradle files + clearErrorLoadingFiles sessionState + clearCradleFiles sessionState + cacheKey <- invalidateCache sessionShake + restartSession sessionShake VFSUnmodified "didSessionLoadingPreferenceConfigChange" [] (return [cacheKey]) + + v <- atomically $ STM.lookup hieYaml (fileToFlags sessionState) + case v >>= HM.lookup (toNormalizedFilePath' file) of + Just (_opts, old_di) -> do + deps_ok <- checkDependencyInfo old_di + if not deps_ok + then do + -- if deps are old, we can try to load the error files again + removeErrorLoadingFile sessionState file + removeCradleFile sessionState file + -- If the dependencies are out of date then clear both caches and start + -- again. + atomically $ resetFileMaps sessionState + -- Keep the same name cache + modifyVar_ (hscEnvs sessionState) (return . Map.adjust (const []) hieYaml) + consultCradle recorder sessionShake ideOptions sessionState sessionLoadingOptions clientConfig knownTargetsVar rootDir ideNc hieYaml file + -- if deps are ok, we can just remove the file from pending files + else atomically $ removeFromPending sessionState file + Nothing -> consultCradle recorder sessionShake ideOptions sessionState sessionLoadingOptions clientConfig knownTargetsVar rootDir ideNc hieYaml file + +consultCradle recorder sessionShake ideOptions sessionState sessionLoadingOptions clientConfig knownTargetsVar rootDir ideNc hieYaml cfp = do + (cradle, eopts) <- loadCradleWithNotifications recorder (optTesting ideOptions) + (lspContext sessionShake) sessionState (sessionLoading clientConfig) + (loadCradle sessionLoadingOptions) + rootDir hieYaml cfp + logWith recorder Debug $ LogSessionLoadingResult eopts + let ncfp = toNormalizedFilePath' cfp + case eopts of + -- The cradle gave us some options so get to work turning them + -- into and HscEnv. + Right (opts, libDir, version) -> do + let compileTime = fullCompilerVersion + case reverse $ readP_to_S parseVersion version of + [] -> error $ "GHC version could not be parsed: " <> version + ((runTime, _):_) + | compileTime == runTime -> session recorder sessionShake sessionState ideOptions sessionLoadingOptions knownTargetsVar rootDir ideNc (hieYaml, ncfp, opts, libDir) + | otherwise -> handleSingleFileProcessingError' sessionState hieYaml cfp (GhcVersionMismatch{..}) + -- Failure case, either a cradle error or the none cradle + Left err -> do + -- what if the error to load file is one of old_files ? + let attemptToLoadFiles = Set.delete cfp $ Set.fromList $ concatMap cradleErrorLoadingFiles err + old_files <- readVar (loadedFiles sessionState) + let errorToLoadNewFiles = cfp : Set.toList (attemptToLoadFiles `Set.difference` old_files) + if length errorToLoadNewFiles > 1 + then do + -- we are loading more files and failed, we need to retry + -- mark as less loaded files as failedLoadingFiles as possible + -- limitation is that when we are loading files, and the dependencies of old_files + -- are changed, and old_files are not valid anymore. + -- but they will still be in the old_files, and will not move to failedFiles. + -- And make other files failed to load in batch mode. + handleBatchLoadFailure sessionState errorToLoadNewFiles + -- retry without other files + logWith recorder Info $ LogSessionReloadOnError cfp (Set.toList attemptToLoadFiles) + consultCradle recorder sessionShake ideOptions sessionState sessionLoadingOptions clientConfig knownTargetsVar rootDir ideNc hieYaml cfp + else do + -- we are only loading this file and it failed + let res = map (\err' -> renderCradleError err' cradle ncfp) err + handleSingleFileProcessingError sessionState hieYaml cfp res $ concatMap cradleErrorDependencies err + +session recorder sessionShake sessionState ideOptions sessionLoadingOptions knownTargetsVar rootDir ideNc (hieYaml, cfp, opts, libDir) = do + let initEmptyHscEnv = emptyHscEnv ideNc libDir + (new_deps, old_deps) <- packageSetup recorder sessionState rootDir (getCacheDirs sessionLoadingOptions) initEmptyHscEnv (hieYaml, cfp, opts) + + -- For each component, now make a new HscEnvEq which contains the + -- HscEnv for the hie.yaml file but the DynFlags for that component + -- For GHC's supporting multi component sessions, we create a shared + -- HscEnv but set the active component accordingly + hscEnv <- initEmptyHscEnv + let new_cache = newComponentCache recorder (optExtensions ideOptions) cfp hscEnv + all_target_details <- new_cache old_deps new_deps + (all_targets, this_flags_map) <- addErrorTargetIfUnknown all_target_details hieYaml cfp + + handleBatchLoadSuccess recorder sessionState hieYaml this_flags_map all_targets + -- Typecheck all files in the project on startup + loadKnownTargets recorder sessionShake (optCheckProject ideOptions) knownTargetsVar new_deps all_targets + +-- | Create a new HscEnv from a hieYaml root and a set of options +packageSetup recorder sessionState rootDir getCacheDirs newEmptyHscEnv (hieYaml, cfp, opts) = do + -- Parse DynFlags for the newly discovered component + hscEnv <- newEmptyHscEnv + newTargetDfs <- evalGhcEnv hscEnv $ setOptions cfp opts (hsc_dflags hscEnv) rootDir + let deps = componentDependencies opts ++ maybeToList hieYaml + dep_info <- getDependencyInfo deps + -- Now lookup to see whether we are combining with an existing HscEnv + -- or making a new one. The lookup returns the HscEnv and a list of + -- information about other components loaded into the HscEnv + -- (unitId, DynFlag, Targets) + modifyVar (hscEnvs sessionState) $ + addComponentInfo recorder getCacheDirs dep_info newTargetDfs (hieYaml, cfp, opts) + +addComponentInfo recorder getCacheDirs dep_info newDynFlags (hieYaml, cfp, opts) m = do + -- Just deps if there's already an HscEnv + -- Nothing is it's the first time we are making an HscEnv + let oldDeps = Map.lookup hieYaml m + let -- Add the raw information about this component to the list + -- We will modify the unitId and DynFlags used for + -- compilation but these are the true source of + -- information. + new_deps = fmap (\(df, targets) -> RawComponentInfo (homeUnitId_ df) df targets cfp opts dep_info) newDynFlags + all_deps = new_deps `NE.appendList` fromMaybe [] oldDeps + -- Get all the unit-ids for things in this component + + all_deps' <- forM all_deps $ \RawComponentInfo{..} -> do + let prefix = show rawComponentUnitId + -- See Note [Avoiding bad interface files] + let cacheDirOpts = componentOptions opts + cacheDirs <- liftIO $ getCacheDirs prefix cacheDirOpts + processed_df <- setCacheDirs recorder cacheDirs rawComponentDynFlags + -- The final component information, mostly the same but the DynFlags don't + -- contain any packages which are also loaded + -- into the same component. + pure $ ComponentInfo + { componentUnitId = rawComponentUnitId + , componentDynFlags = processed_df + , componentTargets = rawComponentTargets + , componentFP = rawComponentFP + , componentCOptions = rawComponentCOptions + , componentDependencyInfo = rawComponentDependencyInfo + } + -- Modify the map so the hieYaml now maps to the newly updated + -- ComponentInfos + -- Returns + -- . The information for the new component which caused this cache miss + -- . The modified information (without -inplace flags) for + -- existing packages + let (new,old) = NE.splitAt (NE.length new_deps) all_deps' + pure (Map.insert hieYaml (NE.toList all_deps) m, (new,old)) + +addErrorTargetIfUnknown all_target_details hieYaml cfp = do + let flags_map' = HM.fromList (concatMap toFlagsMap all_targets') + all_targets' = concat all_target_details + this_dep_info <- getDependencyInfo $ maybeToList hieYaml + let (all_targets, this_flags_map) = case HM.lookup cfp flags_map' of + Just _ -> (all_targets', flags_map') + Nothing -> (this_target_details : all_targets', HM.insert cfp this_flags flags_map') + where + this_target_details = TargetDetails (TargetFile cfp) this_error_env this_dep_info [cfp] + this_flags = (this_error_env, this_dep_info) + this_error_env = ([this_error], Nothing) + this_error = ideErrorWithSource (Just "cradle") (Just DiagnosticSeverity_Error) cfp + (T.unlines + [ "No cradle target found. Is this file listed in the targets of your cradle?" + , "If you are using a .cabal file, please ensure that this module is listed in either the exposed-modules or other-modules section" + ]) + Nothing + pure (all_targets, this_flags_map) + +-- | Populate the knownTargetsVar with all the +-- files in the project so that `knownFiles` can learn about them and +-- we can generate a complete module graph +extendKnownTargets recorder knownTargetsVar newTargets = do + knownTargets <- concatForM newTargets $ \TargetDetails{..} -> + case targetTarget of + TargetFile f -> do + -- If a target file has multiple possible locations, then we + -- assume they are all separate file targets. + -- This happens with '.hs-boot' files if they are in the root directory of the project. + -- GHC reports options such as '-i. A' as 'TargetFile A.hs' instead of 'TargetModule A'. + -- In 'fromTargetId', we dutifully look for '.hs-boot' files and add them to the + -- targetLocations of the TargetDetails. Then we add everything to the 'knownTargetsVar'. + -- However, when we look for a 'Foo.hs-boot' file in 'FindImports.hs', we look for either + -- + -- * TargetFile Foo.hs-boot + -- * TargetModule Foo + -- + -- If we don't generate a TargetFile for each potential location, we will only have + -- 'TargetFile Foo.hs' in the 'knownTargetsVar', thus not find 'TargetFile Foo.hs-boot' + -- and also not find 'TargetModule Foo'. + fs <- filterM (IO.doesFileExist . fromNormalizedFilePath) targetLocations + pure $ map (\fp -> (TargetFile fp, Set.singleton fp)) (nubOrd (f:fs)) + TargetModule _ -> do + found <- filterM (IO.doesFileExist . fromNormalizedFilePath) targetLocations + return [(targetTarget, Set.fromList found)] + hasUpdate <- atomically $ do + known <- readTVar knownTargetsVar + let known' = flip mapHashed known $ \k -> unionKnownTargets k (mkKnownTargets knownTargets) + hasUpdate = if known /= known' then Just (unhashed known') else Nothing + writeTVar knownTargetsVar known' + pure hasUpdate + for_ hasUpdate $ \x -> + logWith recorder Debug $ LogKnownFilesUpdated (targetMap x) + return $ toNoFileKey GetKnownTargets + +loadKnownTargets recorder sessionShake getCheckProject knownTargetsVar new_deps targets = do + checkProject <- getCheckProject + + -- The VFS doesn't change on cradle edits, re-use the old one. + -- Invalidate all the existing GhcSession build nodes by restarting the Shake session + restartSession sessionShake VFSUnmodified "new component" [] $ do + keys2 <- invalidateCache sessionShake + keys1 <- extendKnownTargets recorder knownTargetsVar targets + unless (null new_deps || not checkProject) $ do + cfps' <- liftIO $ filterM (IO.doesFileExist . fromNormalizedFilePath) (concatMap targetLocations targets) + void $ enqueueActions sessionShake $ mkDelayedAction "InitialLoad" Debug $ void $ do + mmt <- uses GetModificationTime cfps' + let cs_exist = catMaybes (zipWith (<$) cfps' mmt) + modIfaces <- uses GetModIface cs_exist + -- update exports map + shakeExtras <- getShakeExtras + let !exportsMap' = createExportsMap $ mapMaybe (fmap hirModIface) modIfaces + liftIO $ atomically $ modifyTVar' (exportsMap shakeExtras) (exportsMap' <>) + return [keys1, keys2] + +loadCradleWithNotifications recorder (IdeTesting isTesting) lspEnv sessionState sessionPref loadCradle rootDir hieYaml cfp= do + let lfpLog = makeRelative rootDir cfp + logWith recorder Info $ LogCradlePath lfpLog + when (isNothing hieYaml) $ + logWith recorder Warning $ LogCradleNotFound lfpLog + cradle <- loadCradle recorder hieYaml rootDir + when (isTesting) $ mRunLspT lspEnv $ + sendNotification (SMethod_CustomMethod (Proxy @"ghcide/cradle/loaded")) (toJSON cfp) + + -- Display a user friendly progress message here: They probably don't know what a cradle is + let progMsg = "Setting up " <> T.pack (takeBaseName (cradleRootDir cradle)) + <> " (for " <> T.pack lfpLog <> ")" + + extraToLoads <- getExtraFilesToLoad sessionState cfp + eopts <- mRunLspTCallback lspEnv (\act -> withIndefiniteProgress progMsg Nothing NotCancellable (const act)) $ + withTrace "Load cradle" $ \addTag -> do + addTag "file" lfpLog + res <- cradleToOptsAndLibDir recorder sessionPref cradle cfp extraToLoads + addTag "result" (show res) + return res + pure (cradle, eopts) + -- | Run the specific cradle on a specific FilePath via hie-bios. -- This then builds dependencies or whatever based on the cradle, gets the From d4fbc2c339e16b60df12788b2d272bd0884640ff Mon Sep 17 00:00:00 2001 From: fendor Date: Fri, 4 Jul 2025 18:42:25 +0200 Subject: [PATCH 53/87] Remove unused _removeInplacePackages function --- .../session-loader/Development/IDE/Session.hs | 18 ------------------ 1 file changed, 18 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 045bdcbc54..f6ebe43481 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -1324,24 +1324,6 @@ getDependencyInfo fs = Map.fromList <$> mapM do_one fs do_one :: FilePath -> IO (FilePath, Maybe UTCTime) do_one fp = (fp,) . eitherToMaybe <$> safeTryIO (getModificationTime fp) --- | This function removes all the -package flags which refer to packages we --- are going to deal with ourselves. For example, if a executable depends --- on a library component, then this function will remove the library flag --- from the package flags for the executable --- --- There are several places in GHC (for example the call to hptInstances in --- tcRnImports) which assume that all modules in the HPT have the same unit --- ID. Therefore we create a fake one and give them all the same unit id. -_removeInplacePackages --Only used in ghc < 9.4 - :: UnitId -- ^ fake uid to use for our internal component - -> [UnitId] - -> DynFlags - -> (DynFlags, [UnitId]) -_removeInplacePackages fake_uid us df = (setHomeUnitId_ fake_uid $ - df { packageFlags = ps }, uids) - where - (uids, ps) = Compat.filterInplaceUnits us (packageFlags df) - -- | Memoize an IO function, with the characteristics: -- -- * If multiple people ask for a result simultaneously, make sure you only compute it once. From ff807c335960be11df8c8d08271a2c6e73c628c1 Mon Sep 17 00:00:00 2001 From: fendor Date: Fri, 4 Jul 2025 19:37:36 +0200 Subject: [PATCH 54/87] Introduce SessionM for bundling read-only variables --- .../session-loader/Development/IDE/Session.hs | 207 +++++++++++------- 1 file changed, 131 insertions(+), 76 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index f6ebe43481..abeb9cb5d1 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -28,7 +28,6 @@ import Control.Monad.IO.Class import Control.Monad.Trans.Maybe (MaybeT (MaybeT, runMaybeT)) import qualified Crypto.Hash.SHA1 as H import Data.Aeson hiding (Error, Key) -import Data.Bifunctor import qualified Data.ByteString.Base16 as B16 import qualified Data.ByteString.Char8 as B import Data.Default @@ -36,7 +35,6 @@ import Data.Either.Extra import Data.Function import Data.Hashable hiding (hash) import qualified Data.HashMap.Strict as HM -import Data.IORef import Data.List import Data.List.Extra as L import Data.List.NonEmpty (NonEmpty (..)) @@ -119,7 +117,7 @@ import qualified System.Random as Random import System.Random (RandomGen) import Text.ParserCombinators.ReadP (readP_to_S) -import Control.Concurrent.STM (STM) +import Control.Concurrent.STM (STM, TVar) import qualified Control.Monad.STM as STM import qualified Development.IDE.Session.OrderedSet as S import qualified Focus @@ -129,6 +127,7 @@ import GHC.Types.Error (errMsgDiagnostic, singleMessage) import GHC.Unit.State import qualified StmContainers.Map as STM +import Control.Monad.Trans.Reader #if MIN_VERSION_ghc(9,13,0) import GHC.Driver.Make (checkHomeUnitsClosed) @@ -479,33 +478,33 @@ data SessionState = SessionState -- These functions encapsulate common operations on the SessionState -- | Add a file to the set of files with errors during loading -addErrorLoadingFile :: SessionState -> FilePath -> IO () +addErrorLoadingFile :: MonadIO m => SessionState -> FilePath -> m () addErrorLoadingFile state file = - modifyVar_' (failedFiles state) (\xs -> return $ Set.insert file xs) + liftIO $ modifyVar_' (failedFiles state) (\xs -> return $ Set.insert file xs) -- | Remove a file from the set of files with errors during loading -removeErrorLoadingFile :: SessionState -> FilePath -> IO () +removeErrorLoadingFile :: MonadIO m => SessionState -> FilePath -> m () removeErrorLoadingFile state file = - modifyVar_' (failedFiles state) (\xs -> return $ Set.delete file xs) + liftIO $ modifyVar_' (failedFiles state) (\xs -> return $ Set.delete file xs) -addCradleFiles :: SessionState -> HashSet FilePath -> IO () +addCradleFiles :: MonadIO m => SessionState -> HashSet FilePath -> m () addCradleFiles state files = - modifyVar_' (loadedFiles state) (\xs -> return $ files <> xs) + liftIO $ modifyVar_' (loadedFiles state) (\xs -> return $ files <> xs) -- | Remove a file from the cradle files set -removeCradleFile :: SessionState -> FilePath -> IO () +removeCradleFile :: MonadIO m => SessionState -> FilePath -> m () removeCradleFile state file = - modifyVar_' (loadedFiles state) (\xs -> return $ Set.delete file xs) + liftIO $ modifyVar_' (loadedFiles state) (\xs -> return $ Set.delete file xs) -- | Clear error loading files and reset to empty set -clearErrorLoadingFiles :: SessionState -> IO () +clearErrorLoadingFiles :: MonadIO m => SessionState -> m () clearErrorLoadingFiles state = - modifyVar_' (failedFiles state) (const $ return Set.empty) + liftIO $ modifyVar_' (failedFiles state) (const $ return Set.empty) -- | Clear cradle files and reset to empty set -clearCradleFiles :: SessionState -> IO () +clearCradleFiles :: MonadIO m => SessionState -> m () clearCradleFiles state = - modifyVar_' (loadedFiles state) (const $ return Set.empty) + liftIO $ modifyVar_' (loadedFiles state) (const $ return Set.empty) -- | Reset the file maps in the session state resetFileMaps :: SessionState -> STM () @@ -547,13 +546,13 @@ getPendingFiles :: SessionState -> IO (HashSet FilePath) getPendingFiles state = atomically $ Set.fromList <$> S.toUnOrderedList (pendingFiles state) -- | Handle errors during session loading by recording file as having error and removing from pending -handleSingleFileProcessingError' :: SessionState -> Maybe FilePath -> FilePath -> PackageSetupException -> IO () +handleSingleFileProcessingError' :: SessionState -> Maybe FilePath -> FilePath -> PackageSetupException -> SessionM () handleSingleFileProcessingError' state hieYaml file e = do handleSingleFileProcessingError state hieYaml file [renderPackageSetupException file e] mempty -- | Common pattern: Insert file flags, insert file mapping, and remove from pending -handleSingleFileProcessingError :: SessionState -> Maybe FilePath -> FilePath -> [FileDiagnostic] -> [FilePath] -> IO () -handleSingleFileProcessingError state hieYaml file diags extraDepFiles = do +handleSingleFileProcessingError :: SessionState -> Maybe FilePath -> FilePath -> [FileDiagnostic] -> [FilePath] -> SessionM () +handleSingleFileProcessingError state hieYaml file diags extraDepFiles = liftIO $ do dep <- getDependencyInfo $ maybeToList hieYaml <> extraDepFiles let ncfp = toNormalizedFilePath' file let flags = ((diags, Nothing), dep) @@ -584,16 +583,17 @@ getExtraFilesToLoad state cfp = do -- -- If the loading configuration changed, we likely should restart the session -- in its entirety. -didSessionLoadingPreferenceConfigChange :: SessionState -> Config -> IO Bool -didSessionLoadingPreferenceConfigChange s clientConfig = do +didSessionLoadingPreferenceConfigChange :: SessionState -> SessionM Bool +didSessionLoadingPreferenceConfigChange s = do + clientConfig <- asks sessionClientConfig let biosSessionLoadingVar = sessionLoadingPreferenceConfig s - mLoadingConfig <- readVar biosSessionLoadingVar + mLoadingConfig <- liftIO $ readVar biosSessionLoadingVar case mLoadingConfig of Nothing -> do - writeVar biosSessionLoadingVar (Just (sessionLoading clientConfig)) + liftIO $ writeVar biosSessionLoadingVar (Just (sessionLoading clientConfig)) pure False Just loadingConfig -> do - writeVar biosSessionLoadingVar (Just (sessionLoading clientConfig)) + liftIO $ writeVar biosSessionLoadingVar (Just (sessionLoading clientConfig)) pure (loadingConfig /= sessionLoading clientConfig) newSessionState :: IO SessionState @@ -661,10 +661,17 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do { restartSession = restartShakeSession extras , invalidateCache = invalidateShakeCache , enqueueActions = shakeEnqueue extras - , lspContext = lspEnv extras + } + sessionEnv = SessionEnv + { sessionLspContext = lspEnv extras + , sessionRootDir = rootDir + , sessionIdeOptions = ideOptions + , sessionClientConfig = clientConfig + , sessionSharedNameCache = ideNc + , sessionLoadingOptions = newSessionLoadingOptions } - writeTQueue que (getOptionsLoop recorder sessionShake sessionState newSessionLoadingOptions ideOptions clientConfig knownTargetsVar rootDir ideNc) + writeTQueue que (runReaderT (getOptionsLoop recorder sessionShake sessionState knownTargetsVar) sessionEnv) -- Each one of deps will be registered as a FileSystemWatcher in the GhcSession action -- so that we can get a workspace/didChangeWatchedFiles notification when a dep changes. @@ -713,45 +720,55 @@ data SessionShake = SessionShake { restartSession :: VFSModified -> String -> [DelayedAction ()] -> IO [Key] -> IO () , invalidateCache :: IO Key , enqueueActions :: DelayedAction () -> IO (IO ()) - , lspContext :: Maybe (LanguageContextEnv Config) } --- The main function which gets options for a file. We only want one of these running +data SessionEnv = SessionEnv + { sessionLspContext :: Maybe (LanguageContextEnv Config) + , sessionRootDir :: FilePath + , sessionIdeOptions :: IdeOptions + , sessionClientConfig :: Config + , sessionSharedNameCache :: NameCache + , sessionLoadingOptions :: SessionLoadingOptions + } + +type SessionM = ReaderT SessionEnv IO + +-- | The main function which gets options for a file. We only want one of these running -- at a time. Therefore the IORef contains the currently running cradle, if we try -- to get some more options then we wait for the currently running action to finish -- before attempting to do so. -getOptionsLoop recorder sessionShake sessionState sessionLoadingOptions ideOptions clientConfig knownTargetsVar rootDir ideNc = do +getOptionsLoop :: Recorder (WithPriority Log) -> SessionShake -> SessionState -> TVar (Hashed KnownTargets) -> SessionM () +getOptionsLoop recorder sessionShake sessionState knownTargetsVar = forever $ do + sessionLoadingOptions <- asks sessionLoadingOptions -- Get the next file to load - file <- atomically $ S.readQueue (pendingFiles sessionState) + file <- liftIO $ atomically $ S.readQueue (pendingFiles sessionState) logWith recorder Debug (LogGetOptionsLoop file) let ncfp = toNormalizedFilePath' file - cachedHieYamlLocation <- join <$> atomically (STM.lookup ncfp (filesMap sessionState)) - hieYaml <- findCradle sessionLoadingOptions file + cachedHieYamlLocation <- join <$> liftIO (atomically (STM.lookup ncfp (filesMap sessionState))) + hieYaml <- liftIO $ findCradle sessionLoadingOptions file let hieLoc = cachedHieYamlLocation <|> hieYaml - sessionOpts recorder sessionShake ideOptions sessionState sessionLoadingOptions clientConfig knownTargetsVar rootDir ideNc (hieLoc, file) + sessionOpts recorder sessionShake sessionState knownTargetsVar (hieLoc, file) `Safe.catch` handleSingleFileProcessingError' sessionState hieLoc file - getOptionsLoop recorder sessionShake sessionState sessionLoadingOptions ideOptions clientConfig knownTargetsVar rootDir ideNc -- | This caches the mapping from hie.yaml + Mod.hs -> [String] -- Returns the Ghc session and the cradle dependencies -sessionOpts recorder sessionShake ideOptions sessionState sessionLoadingOptions clientConfig knownTargetsVar rootDir ideNc (hieYaml, file) = do - Extra.whenM (didSessionLoadingPreferenceConfigChange sessionState clientConfig) $ do +sessionOpts :: Recorder (WithPriority Log) -> SessionShake -> SessionState -> TVar (Hashed KnownTargets) -> (Maybe FilePath, FilePath) -> SessionM () +sessionOpts recorder sessionShake sessionState knownTargetsVar (hieYaml, file) = do + Extra.whenM (didSessionLoadingPreferenceConfigChange sessionState) $ do logWith recorder Info LogSessionLoadingChanged - -- If the dependencies are out of date then clear both caches and start - -- again. - atomically $ resetFileMaps sessionState + liftIO $ atomically $ resetFileMaps sessionState -- Don't even keep the name cache, we start from scratch here! - modifyVar_ (hscEnvs sessionState) (const (return Map.empty)) + liftIO $ modifyVar_ (hscEnvs sessionState) (const (return Map.empty)) -- cleanup error loading files and cradle files clearErrorLoadingFiles sessionState clearCradleFiles sessionState - cacheKey <- invalidateCache sessionShake - restartSession sessionShake VFSUnmodified "didSessionLoadingPreferenceConfigChange" [] (return [cacheKey]) + cacheKey <- liftIO $ invalidateCache sessionShake + liftIO $ restartSession sessionShake VFSUnmodified "didSessionLoadingPreferenceConfigChange" [] (return [cacheKey]) - v <- atomically $ STM.lookup hieYaml (fileToFlags sessionState) + v <- liftIO $ atomically $ STM.lookup hieYaml (fileToFlags sessionState) case v >>= HM.lookup (toNormalizedFilePath' file) of Just (_opts, old_di) -> do - deps_ok <- checkDependencyInfo old_di + deps_ok <- liftIO $ checkDependencyInfo old_di if not deps_ok then do -- if deps are old, we can try to load the error files again @@ -759,19 +776,22 @@ sessionOpts recorder sessionShake ideOptions sessionState sessionLoadingOptions removeCradleFile sessionState file -- If the dependencies are out of date then clear both caches and start -- again. - atomically $ resetFileMaps sessionState + liftIO $ atomically $ resetFileMaps sessionState -- Keep the same name cache - modifyVar_ (hscEnvs sessionState) (return . Map.adjust (const []) hieYaml) - consultCradle recorder sessionShake ideOptions sessionState sessionLoadingOptions clientConfig knownTargetsVar rootDir ideNc hieYaml file + liftIO $ modifyVar_ (hscEnvs sessionState) (return . Map.adjust (const []) hieYaml) + consultCradle recorder sessionShake sessionState knownTargetsVar hieYaml file -- if deps are ok, we can just remove the file from pending files - else atomically $ removeFromPending sessionState file - Nothing -> consultCradle recorder sessionShake ideOptions sessionState sessionLoadingOptions clientConfig knownTargetsVar rootDir ideNc hieYaml file - -consultCradle recorder sessionShake ideOptions sessionState sessionLoadingOptions clientConfig knownTargetsVar rootDir ideNc hieYaml cfp = do - (cradle, eopts) <- loadCradleWithNotifications recorder (optTesting ideOptions) - (lspContext sessionShake) sessionState (sessionLoading clientConfig) - (loadCradle sessionLoadingOptions) - rootDir hieYaml cfp + else liftIO $ atomically $ removeFromPending sessionState file + Nothing -> + consultCradle recorder sessionShake sessionState knownTargetsVar hieYaml file + +consultCradle :: Recorder (WithPriority Log) -> SessionShake -> SessionState -> TVar (Hashed KnownTargets) -> Maybe FilePath -> FilePath -> SessionM () +consultCradle recorder sessionShake sessionState knownTargetsVar hieYaml cfp = do + loadingOptions <- asks sessionLoadingOptions + (cradle, eopts) <- loadCradleWithNotifications recorder + sessionState + (loadCradle loadingOptions recorder) + hieYaml cfp logWith recorder Debug $ LogSessionLoadingResult eopts let ncfp = toNormalizedFilePath' cfp case eopts of @@ -782,13 +802,13 @@ consultCradle recorder sessionShake ideOptions sessionState sessionLoadingOption case reverse $ readP_to_S parseVersion version of [] -> error $ "GHC version could not be parsed: " <> version ((runTime, _):_) - | compileTime == runTime -> session recorder sessionShake sessionState ideOptions sessionLoadingOptions knownTargetsVar rootDir ideNc (hieYaml, ncfp, opts, libDir) + | compileTime == runTime -> session recorder sessionShake sessionState knownTargetsVar (hieYaml, ncfp, opts, libDir) | otherwise -> handleSingleFileProcessingError' sessionState hieYaml cfp (GhcVersionMismatch{..}) -- Failure case, either a cradle error or the none cradle Left err -> do -- what if the error to load file is one of old_files ? let attemptToLoadFiles = Set.delete cfp $ Set.fromList $ concatMap cradleErrorLoadingFiles err - old_files <- readVar (loadedFiles sessionState) + old_files <- liftIO $ readVar (loadedFiles sessionState) let errorToLoadNewFiles = cfp : Set.toList (attemptToLoadFiles `Set.difference` old_files) if length errorToLoadNewFiles > 1 then do @@ -798,46 +818,66 @@ consultCradle recorder sessionShake ideOptions sessionState sessionLoadingOption -- are changed, and old_files are not valid anymore. -- but they will still be in the old_files, and will not move to failedFiles. -- And make other files failed to load in batch mode. - handleBatchLoadFailure sessionState errorToLoadNewFiles + liftIO $ handleBatchLoadFailure sessionState errorToLoadNewFiles -- retry without other files logWith recorder Info $ LogSessionReloadOnError cfp (Set.toList attemptToLoadFiles) - consultCradle recorder sessionShake ideOptions sessionState sessionLoadingOptions clientConfig knownTargetsVar rootDir ideNc hieYaml cfp + consultCradle recorder sessionShake sessionState knownTargetsVar hieYaml cfp else do -- we are only loading this file and it failed let res = map (\err' -> renderCradleError err' cradle ncfp) err handleSingleFileProcessingError sessionState hieYaml cfp res $ concatMap cradleErrorDependencies err -session recorder sessionShake sessionState ideOptions sessionLoadingOptions knownTargetsVar rootDir ideNc (hieYaml, cfp, opts, libDir) = do - let initEmptyHscEnv = emptyHscEnv ideNc libDir - (new_deps, old_deps) <- packageSetup recorder sessionState rootDir (getCacheDirs sessionLoadingOptions) initEmptyHscEnv (hieYaml, cfp, opts) +session :: + Recorder (WithPriority Log) -> + SessionShake -> + SessionState -> + TVar (Hashed KnownTargets) -> + (Maybe FilePath, NormalizedFilePath, ComponentOptions, FilePath) -> + SessionM () +session recorder sessionShake sessionState knownTargetsVar(hieYaml, cfp, opts, libDir) = do + let initEmptyHscEnv = emptyHscEnv libDir + (new_deps, old_deps) <- packageSetup recorder sessionState initEmptyHscEnv (hieYaml, cfp, opts) -- For each component, now make a new HscEnvEq which contains the -- HscEnv for the hie.yaml file but the DynFlags for that component -- For GHC's supporting multi component sessions, we create a shared -- HscEnv but set the active component accordingly hscEnv <- initEmptyHscEnv + ideOptions <- asks sessionIdeOptions let new_cache = newComponentCache recorder (optExtensions ideOptions) cfp hscEnv - all_target_details <- new_cache old_deps new_deps - (all_targets, this_flags_map) <- addErrorTargetIfUnknown all_target_details hieYaml cfp + all_target_details <- liftIO $ new_cache old_deps new_deps + (all_targets, this_flags_map) <- liftIO $ addErrorTargetIfUnknown all_target_details hieYaml cfp - handleBatchLoadSuccess recorder sessionState hieYaml this_flags_map all_targets + liftIO $ handleBatchLoadSuccess recorder sessionState hieYaml this_flags_map all_targets -- Typecheck all files in the project on startup - loadKnownTargets recorder sessionShake (optCheckProject ideOptions) knownTargetsVar new_deps all_targets + liftIO $ loadKnownTargets recorder sessionShake (optCheckProject ideOptions) knownTargetsVar new_deps all_targets -- | Create a new HscEnv from a hieYaml root and a set of options -packageSetup recorder sessionState rootDir getCacheDirs newEmptyHscEnv (hieYaml, cfp, opts) = do +packageSetup :: Recorder (WithPriority Log) -> SessionState -> SessionM HscEnv -> (Maybe FilePath, NormalizedFilePath, ComponentOptions) -> SessionM ([ComponentInfo], [ComponentInfo]) +packageSetup recorder sessionState newEmptyHscEnv (hieYaml, cfp, opts) = do + getCacheDirs <- asks (getCacheDirs . sessionLoadingOptions) + rootDir <- asks sessionRootDir -- Parse DynFlags for the newly discovered component hscEnv <- newEmptyHscEnv - newTargetDfs <- evalGhcEnv hscEnv $ setOptions cfp opts (hsc_dflags hscEnv) rootDir + newTargetDfs <- liftIO $ evalGhcEnv hscEnv $ setOptions cfp opts (hsc_dflags hscEnv) rootDir let deps = componentDependencies opts ++ maybeToList hieYaml - dep_info <- getDependencyInfo deps + dep_info <- liftIO $ getDependencyInfo deps -- Now lookup to see whether we are combining with an existing HscEnv -- or making a new one. The lookup returns the HscEnv and a list of -- information about other components loaded into the HscEnv -- (unitId, DynFlag, Targets) - modifyVar (hscEnvs sessionState) $ + liftIO $ modifyVar (hscEnvs sessionState) $ addComponentInfo recorder getCacheDirs dep_info newTargetDfs (hieYaml, cfp, opts) +addComponentInfo :: + MonadUnliftIO m => + Recorder (WithPriority Log) -> + (String -> [String] -> IO CacheDirs) -> + DependencyInfo -> + NonEmpty (DynFlags, [GHC.Target]) -> + (Maybe FilePath, NormalizedFilePath, ComponentOptions) -> + Map.Map (Maybe FilePath) [RawComponentInfo] -> + m (Map.Map (Maybe FilePath) [RawComponentInfo], ([ComponentInfo], [ComponentInfo])) addComponentInfo recorder getCacheDirs dep_info newDynFlags (hieYaml, cfp, opts) m = do -- Just deps if there's already an HscEnv -- Nothing is it's the first time we are making an HscEnv @@ -876,6 +916,7 @@ addComponentInfo recorder getCacheDirs dep_info newDynFlags (hieYaml, cfp, opts) let (new,old) = NE.splitAt (NE.length new_deps) all_deps' pure (Map.insert hieYaml (NE.toList all_deps) m, (new,old)) +addErrorTargetIfUnknown :: Foldable t => t [TargetDetails] -> Maybe FilePath -> NormalizedFilePath -> IO ([TargetDetails], HashMap NormalizedFilePath (IdeResult HscEnvEq, DependencyInfo)) addErrorTargetIfUnknown all_target_details hieYaml cfp = do let flags_map' = HM.fromList (concatMap toFlagsMap all_targets') all_targets' = concat all_target_details @@ -898,6 +939,7 @@ addErrorTargetIfUnknown all_target_details hieYaml cfp = do -- | Populate the knownTargetsVar with all the -- files in the project so that `knownFiles` can learn about them and -- we can generate a complete module graph +extendKnownTargets :: Recorder (WithPriority Log) -> TVar (Hashed KnownTargets) -> [TargetDetails] -> IO Key extendKnownTargets recorder knownTargetsVar newTargets = do knownTargets <- concatForM newTargets $ \TargetDetails{..} -> case targetTarget of @@ -931,6 +973,7 @@ extendKnownTargets recorder knownTargetsVar newTargets = do logWith recorder Debug $ LogKnownFilesUpdated (targetMap x) return $ toNoFileKey GetKnownTargets +loadKnownTargets :: Recorder (WithPriority Log) -> SessionShake -> IO Bool -> TVar (Hashed KnownTargets) -> [ComponentInfo] -> [TargetDetails] -> IO () loadKnownTargets recorder sessionShake getCheckProject knownTargetsVar new_deps targets = do checkProject <- getCheckProject @@ -951,12 +994,23 @@ loadKnownTargets recorder sessionShake getCheckProject knownTargetsVar new_deps liftIO $ atomically $ modifyTVar' (exportsMap shakeExtras) (exportsMap' <>) return [keys1, keys2] -loadCradleWithNotifications recorder (IdeTesting isTesting) lspEnv sessionState sessionPref loadCradle rootDir hieYaml cfp= do +loadCradleWithNotifications :: + Recorder (WithPriority Log) -> + SessionState -> + (Maybe FilePath -> FilePath -> IO (Cradle Void)) -> + Maybe FilePath -> + FilePath -> + SessionM (Cradle Void, Either [CradleError] (ComponentOptions, FilePath, String)) +loadCradleWithNotifications recorder sessionState loadCradle hieYaml cfp = do + IdeTesting isTesting <- asks (optTesting . sessionIdeOptions) + sessionPref <- asks (sessionLoading . sessionClientConfig) + lspEnv <- asks sessionLspContext + rootDir <- asks sessionRootDir let lfpLog = makeRelative rootDir cfp logWith recorder Info $ LogCradlePath lfpLog when (isNothing hieYaml) $ logWith recorder Warning $ LogCradleNotFound lfpLog - cradle <- loadCradle recorder hieYaml rootDir + cradle <- liftIO $ loadCradle hieYaml rootDir when (isTesting) $ mRunLspT lspEnv $ sendNotification (SMethod_CustomMethod (Proxy @"ghcide/cradle/loaded")) (toJSON cfp) @@ -964,11 +1018,11 @@ loadCradleWithNotifications recorder (IdeTesting isTesting) lspEnv sessionState let progMsg = "Setting up " <> T.pack (takeBaseName (cradleRootDir cradle)) <> " (for " <> T.pack lfpLog <> ")" - extraToLoads <- getExtraFilesToLoad sessionState cfp + extraToLoads <- liftIO $ getExtraFilesToLoad sessionState cfp eopts <- mRunLspTCallback lspEnv (\act -> withIndefiniteProgress progMsg Nothing NotCancellable (const act)) $ withTrace "Load cradle" $ \addTag -> do addTag "file" lfpLog - res <- cradleToOptsAndLibDir recorder sessionPref cradle cfp extraToLoads + res <- liftIO $ cradleToOptsAndLibDir recorder sessionPref cradle cfp extraToLoads addTag "result" (show res) return res pure (cradle, eopts) @@ -1008,11 +1062,12 @@ cradleToOptsAndLibDir recorder loadConfig cradle file old_fps = do PreferSingleComponentLoading -> LoadFile PreferMultiComponentLoading -> LoadWithContext old_fps -emptyHscEnv :: NameCache -> FilePath -> IO HscEnv -emptyHscEnv nc libDir = do +emptyHscEnv :: FilePath -> SessionM HscEnv +emptyHscEnv libDir = do + nc <- asks sessionSharedNameCache -- We call setSessionDynFlags so that the loader is initialised -- We need to do this before we call initUnits. - env <- runGhc (Just libDir) $ + env <- liftIO $ runGhc (Just libDir) $ getSessionDynFlags >>= setSessionDynFlags >> getSession pure $ setNameCache nc (hscSetFlags ((hsc_dflags env){useUnicode = True }) env) From 1aa1e2c55b621e8edad45baf4dfc0ebe1124005e Mon Sep 17 00:00:00 2001 From: fendor Date: Fri, 4 Jul 2025 20:10:33 +0200 Subject: [PATCH 55/87] Extract ghc specific functions into separate module --- ghcide/ghcide.cabal | 2 + .../session-loader/Development/IDE/Session.hs | 541 +----------------- .../Development/IDE/Session/Dependency.hs | 35 ++ .../Development/IDE/Session/Ghc.hs | 522 +++++++++++++++++ 4 files changed, 587 insertions(+), 513 deletions(-) create mode 100644 ghcide/session-loader/Development/IDE/Session/Dependency.hs create mode 100644 ghcide/session-loader/Development/IDE/Session/Ghc.hs diff --git a/ghcide/ghcide.cabal b/ghcide/ghcide.cabal index 8218883077..dead03f36c 100644 --- a/ghcide/ghcide.cabal +++ b/ghcide/ghcide.cabal @@ -180,7 +180,9 @@ library Development.IDE.Plugin.Test Development.IDE.Plugin.TypeLenses Development.IDE.Session + Development.IDE.Session.Dependency Development.IDE.Session.Diagnostics + Development.IDE.Session.Ghc Development.IDE.Session.Implicit Development.IDE.Spans.AtPoint Development.IDE.Spans.Common diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index abeb9cb5d1..996f757303 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -1,4 +1,3 @@ -{-# LANGUAGE CPP #-} {-# LANGUAGE TypeFamilies #-} {-| @@ -31,40 +30,31 @@ import Data.Aeson hiding (Error, Key) import qualified Data.ByteString.Base16 as B16 import qualified Data.ByteString.Char8 as B import Data.Default -import Data.Either.Extra -import Data.Function import Data.Hashable hiding (hash) import qualified Data.HashMap.Strict as HM import Data.List import Data.List.Extra as L -import Data.List.NonEmpty (NonEmpty (..)) -import qualified Data.List.NonEmpty as NE import qualified Data.Map.Strict as Map import Data.Maybe import Data.Proxy import qualified Data.Text as T -import Data.Time.Clock import Data.Version import Development.IDE.Core.RuleTypes import Development.IDE.Core.Shake hiding (Log, knownTargets, withHieDb) import qualified Development.IDE.GHC.Compat as Compat -import Development.IDE.GHC.Compat.CmdLine import Development.IDE.GHC.Compat.Core hiding (Target, TargetFile, TargetModule, Var, Warning, getOptions) -import qualified Development.IDE.GHC.Compat.Core as GHC import Development.IDE.GHC.Compat.Env hiding (Logger) -import Development.IDE.GHC.Compat.Units (UnitId) import Development.IDE.GHC.Util import Development.IDE.Graph (Action, Key) import qualified Development.IDE.Session.Implicit as GhcIde import Development.IDE.Types.Diagnostics import Development.IDE.Types.Exports -import Development.IDE.Types.HscEnvEq (HscEnvEq, newHscEnvEq) +import Development.IDE.Types.HscEnvEq (HscEnvEq) import Development.IDE.Types.Location import Development.IDE.Types.Options -import GHC.ResponseFile import qualified HIE.Bios as HieBios import HIE.Bios.Environment hiding (getCacheDir) import HIE.Bios.Types hiding (Log) @@ -92,24 +82,20 @@ import Data.Void import Control.Concurrent.STM.Stats (atomically, modifyTVar', readTVar, writeTVar) import Control.Concurrent.STM.TQueue -import Control.DeepSeq -import Control.Exception (evaluate) -import Control.Monad.IO.Unlift (MonadUnliftIO) import Control.Monad.Trans.Cont (ContT (ContT, runContT)) import Data.Foldable (for_) import Data.HashMap.Strict (HashMap) import Data.HashSet (HashSet) import qualified Data.HashSet as Set -import qualified Data.Set as OS import Database.SQLite.Simple import Development.IDE.Core.Tracing (withTrace) import Development.IDE.Core.WorkerThread (withWorkerQueue) -import qualified Development.IDE.GHC.Compat.Util as Compat import Development.IDE.Session.Diagnostics (renderCradleError) +import Development.IDE.Session.Dependency +import Development.IDE.Session.Ghc hiding (Log) import Development.IDE.Types.Shake (WithHieDb, WithHieDbShield (..), toNoFileKey) -import GHC.Data.Graph.Directed import HieDb.Create import HieDb.Types import Ide.PluginUtils (toAbsolute) @@ -121,17 +107,9 @@ import Control.Concurrent.STM (STM, TVar) import qualified Control.Monad.STM as STM import qualified Development.IDE.Session.OrderedSet as S import qualified Focus -import GHC.Driver.Env (hsc_all_home_unit_ids) -import GHC.Driver.Errors.Types -import GHC.Types.Error (errMsgDiagnostic, - singleMessage) -import GHC.Unit.State import qualified StmContainers.Map as STM import Control.Monad.Trans.Reader - -#if MIN_VERSION_ghc(9,13,0) -import GHC.Driver.Make (checkHomeUnitsClosed) -#endif +import qualified Development.IDE.Session.Ghc as Ghc data Log = LogSettingInitialDynFlags @@ -141,16 +119,12 @@ data Log | LogHieDbRetriesExhausted !Int !Int !Int !SomeException | LogHieDbWriterThreadSQLiteError !SQLError | LogHieDbWriterThreadException !SomeException - | LogInterfaceFilesCacheDir !FilePath | LogKnownFilesUpdated !(HashMap Target (HashSet NormalizedFilePath)) - | LogMakingNewHscEnv ![UnitId] - | LogDLLLoadError !String | LogCradlePath !FilePath | LogCradleNotFound !FilePath | LogSessionLoadingResult !(Either [CradleError] (ComponentOptions, FilePath, String)) | LogCradle !(Cradle Void) | LogNoneCradleFound FilePath - | LogNewComponentCache !(([FileDiagnostic], Maybe HscEnvEq), DependencyInfo) | LogHieBios HieBios.Log | LogSessionLoadingChanged | LogSessionNewLoadedFiles ![FilePath] @@ -158,6 +132,7 @@ data Log | LogGetOptionsLoop !FilePath | LogLookupSessionCache !FilePath | LogTime !String + | LogSessionGhc Ghc.Log deriving instance Show Log instance Pretty Log where @@ -209,18 +184,12 @@ instance Pretty Log where vcat [ "HieDb writer thread exception:" , pretty (displayException e) ] - LogInterfaceFilesCacheDir path -> - "Interface files cache directory:" <+> pretty path LogKnownFilesUpdated targetToPathsMap -> nest 2 $ vcat [ "Known files updated:" , viaShow $ (HM.map . Set.map) fromNormalizedFilePath targetToPathsMap ] - LogMakingNewHscEnv inPlaceUnitIds -> - "Making new HscEnv. In-place unit ids:" <+> pretty (map show inPlaceUnitIds) - LogDLLLoadError errorString -> - "Error dynamically loading libm.so.6:" <+> pretty errorString LogCradlePath path -> "Cradle path:" <+> pretty path LogCradleNotFound path -> @@ -232,9 +201,8 @@ instance Pretty Log where "Session loading result:" <+> viaShow e LogCradle cradle -> "Cradle:" <+> viaShow cradle - LogNewComponentCache componentCache -> - "New component cache HscEnvEq:" <+> viaShow componentCache LogHieBios msg -> pretty msg + LogSessionGhc msg -> pretty msg LogSessionLoadingChanged -> "Session Loading config changed, reloading the full session." @@ -242,9 +210,6 @@ instance Pretty Log where hiedbDataVersion :: String hiedbDataVersion = "1" -data CacheDirs = CacheDirs - { hiCacheDir, hieCacheDir, oCacheDir :: Maybe FilePath} - data SessionLoadingOptions = SessionLoadingOptions { findCradle :: FilePath -> IO (Maybe FilePath) -- | Load the cradle with an optional 'hie.yaml' location. @@ -733,18 +698,22 @@ data SessionEnv = SessionEnv type SessionM = ReaderT SessionEnv IO --- | The main function which gets options for a file. We only want one of these running --- at a time. Therefore the IORef contains the currently running cradle, if we try --- to get some more options then we wait for the currently running action to finish --- before attempting to do so. +-- | The main function which gets options for a file. +-- +-- The general approach is as follows: +-- 1. Find the 'hie.yaml' for the next file target, if there is any. +-- 2. Check in the cache, whether the given 'hie.yaml' was already loaded before +-- 3.1. If it wasn't, initialise a new session and continue with step 4. +-- 3.2. If it is loaded, check whether we need to reload the session, e.g. because the `.cabal` file was modified +-- 3.2.1. If we need to reload, remove the getOptionsLoop :: Recorder (WithPriority Log) -> SessionShake -> SessionState -> TVar (Hashed KnownTargets) -> SessionM () getOptionsLoop recorder sessionShake sessionState knownTargetsVar = forever $ do - sessionLoadingOptions <- asks sessionLoadingOptions -- Get the next file to load file <- liftIO $ atomically $ S.readQueue (pendingFiles sessionState) logWith recorder Debug (LogGetOptionsLoop file) let ncfp = toNormalizedFilePath' file cachedHieYamlLocation <- join <$> liftIO (atomically (STM.lookup ncfp (filesMap sessionState))) + sessionLoadingOptions <- asks sessionLoadingOptions hieYaml <- liftIO $ findCradle sessionLoadingOptions file let hieLoc = cachedHieYamlLocation <|> hieYaml sessionOpts recorder sessionShake sessionState knownTargetsVar (hieLoc, file) @@ -780,8 +749,9 @@ sessionOpts recorder sessionShake sessionState knownTargetsVar (hieYaml, file) = -- Keep the same name cache liftIO $ modifyVar_ (hscEnvs sessionState) (return . Map.adjust (const []) hieYaml) consultCradle recorder sessionShake sessionState knownTargetsVar hieYaml file - -- if deps are ok, we can just remove the file from pending files - else liftIO $ atomically $ removeFromPending sessionState file + else do + -- if deps are ok, we can just remove the file from pending files + liftIO $ atomically $ removeFromPending sessionState file Nothing -> consultCradle recorder sessionShake sessionState knownTargetsVar hieYaml file @@ -835,7 +805,7 @@ session :: (Maybe FilePath, NormalizedFilePath, ComponentOptions, FilePath) -> SessionM () session recorder sessionShake sessionState knownTargetsVar(hieYaml, cfp, opts, libDir) = do - let initEmptyHscEnv = emptyHscEnv libDir + let initEmptyHscEnv = emptyHscEnvM libDir (new_deps, old_deps) <- packageSetup recorder sessionState initEmptyHscEnv (hieYaml, cfp, opts) -- For each component, now make a new HscEnvEq which contains the @@ -844,7 +814,7 @@ session recorder sessionShake sessionState knownTargetsVar(hieYaml, cfp, opts, l -- HscEnv but set the active component accordingly hscEnv <- initEmptyHscEnv ideOptions <- asks sessionIdeOptions - let new_cache = newComponentCache recorder (optExtensions ideOptions) cfp hscEnv + let new_cache = newComponentCache (cmapWithPrio LogSessionGhc recorder) (optExtensions ideOptions) cfp hscEnv all_target_details <- liftIO $ new_cache old_deps new_deps (all_targets, this_flags_map) <- liftIO $ addErrorTargetIfUnknown all_target_details hieYaml cfp @@ -867,54 +837,7 @@ packageSetup recorder sessionState newEmptyHscEnv (hieYaml, cfp, opts) = do -- information about other components loaded into the HscEnv -- (unitId, DynFlag, Targets) liftIO $ modifyVar (hscEnvs sessionState) $ - addComponentInfo recorder getCacheDirs dep_info newTargetDfs (hieYaml, cfp, opts) - -addComponentInfo :: - MonadUnliftIO m => - Recorder (WithPriority Log) -> - (String -> [String] -> IO CacheDirs) -> - DependencyInfo -> - NonEmpty (DynFlags, [GHC.Target]) -> - (Maybe FilePath, NormalizedFilePath, ComponentOptions) -> - Map.Map (Maybe FilePath) [RawComponentInfo] -> - m (Map.Map (Maybe FilePath) [RawComponentInfo], ([ComponentInfo], [ComponentInfo])) -addComponentInfo recorder getCacheDirs dep_info newDynFlags (hieYaml, cfp, opts) m = do - -- Just deps if there's already an HscEnv - -- Nothing is it's the first time we are making an HscEnv - let oldDeps = Map.lookup hieYaml m - let -- Add the raw information about this component to the list - -- We will modify the unitId and DynFlags used for - -- compilation but these are the true source of - -- information. - new_deps = fmap (\(df, targets) -> RawComponentInfo (homeUnitId_ df) df targets cfp opts dep_info) newDynFlags - all_deps = new_deps `NE.appendList` fromMaybe [] oldDeps - -- Get all the unit-ids for things in this component - - all_deps' <- forM all_deps $ \RawComponentInfo{..} -> do - let prefix = show rawComponentUnitId - -- See Note [Avoiding bad interface files] - let cacheDirOpts = componentOptions opts - cacheDirs <- liftIO $ getCacheDirs prefix cacheDirOpts - processed_df <- setCacheDirs recorder cacheDirs rawComponentDynFlags - -- The final component information, mostly the same but the DynFlags don't - -- contain any packages which are also loaded - -- into the same component. - pure $ ComponentInfo - { componentUnitId = rawComponentUnitId - , componentDynFlags = processed_df - , componentTargets = rawComponentTargets - , componentFP = rawComponentFP - , componentCOptions = rawComponentCOptions - , componentDependencyInfo = rawComponentDependencyInfo - } - -- Modify the map so the hieYaml now maps to the newly updated - -- ComponentInfos - -- Returns - -- . The information for the new component which caused this cache miss - -- . The modified information (without -inplace flags) for - -- existing packages - let (new,old) = NE.splitAt (NE.length new_deps) all_deps' - pure (Map.insert hieYaml (NE.toList all_deps) m, (new,old)) + addComponentInfo (cmapWithPrio LogSessionGhc recorder) getCacheDirs dep_info newTargetDfs (hieYaml, cfp, opts) addErrorTargetIfUnknown :: Foldable t => t [TargetDetails] -> Maybe FilePath -> NormalizedFilePath -> IO ([TargetDetails], HashMap NormalizedFilePath (IdeResult HscEnvEq, DependencyInfo)) addErrorTargetIfUnknown all_target_details hieYaml cfp = do @@ -1062,255 +985,20 @@ cradleToOptsAndLibDir recorder loadConfig cradle file old_fps = do PreferSingleComponentLoading -> LoadFile PreferMultiComponentLoading -> LoadWithContext old_fps -emptyHscEnv :: FilePath -> SessionM HscEnv -emptyHscEnv libDir = do - nc <- asks sessionSharedNameCache - -- We call setSessionDynFlags so that the loader is initialised - -- We need to do this before we call initUnits. - env <- liftIO $ runGhc (Just libDir) $ - getSessionDynFlags >>= setSessionDynFlags >> getSession - pure $ setNameCache nc (hscSetFlags ((hsc_dflags env){useUnicode = True }) env) - -data TargetDetails = TargetDetails - { - targetTarget :: !Target, - targetEnv :: !(IdeResult HscEnvEq), - targetDepends :: !DependencyInfo, - targetLocations :: ![NormalizedFilePath] - } +-- ---------------------------------------------------------------------------- +-- Utilities +-- ---------------------------------------------------------------------------- -fromTargetId :: [FilePath] -- ^ import paths - -> [String] -- ^ extensions to consider - -> TargetId - -> IdeResult HscEnvEq - -> DependencyInfo - -> IO [TargetDetails] --- For a target module we consider all the import paths -fromTargetId is exts (GHC.TargetModule modName) env dep = do - let fps = [i moduleNameSlashes modName -<.> ext <> boot - | ext <- exts - , i <- is - , boot <- ["", "-boot"] - ] - let locs = fmap toNormalizedFilePath' fps - return [TargetDetails (TargetModule modName) env dep locs] --- For a 'TargetFile' we consider all the possible module names -fromTargetId _ _ (GHC.TargetFile f _) env deps = do - let nf = toNormalizedFilePath' f - let other - | "-boot" `isSuffixOf` f = toNormalizedFilePath' (L.dropEnd 5 $ fromNormalizedFilePath nf) - | otherwise = toNormalizedFilePath' (fromNormalizedFilePath nf ++ "-boot") - return [TargetDetails (TargetFile nf) env deps [nf, other]] +emptyHscEnvM :: FilePath -> SessionM HscEnv +emptyHscEnvM libDir = do + nc <- asks sessionSharedNameCache + liftIO $ Ghc.emptyHscEnv nc libDir toFlagsMap :: TargetDetails -> [(NormalizedFilePath, (IdeResult HscEnvEq, DependencyInfo))] toFlagsMap TargetDetails{..} = [ (l, (targetEnv, targetDepends)) | l <- targetLocations] - -setNameCache :: NameCache -> HscEnv -> HscEnv -setNameCache nc hsc = hsc { hsc_NC = nc } - -#if MIN_VERSION_ghc(9,13,0) --- Moved back to implementation in GHC. -checkHomeUnitsClosed' :: UnitEnv -> OS.Set UnitId -> [DriverMessages] -checkHomeUnitsClosed' ue _ = checkHomeUnitsClosed ue -#else --- This function checks the important property that if both p and q are home units --- then any dependency of p, which transitively depends on q is also a home unit. --- GHC had an implementation of this function, but it was horribly inefficient --- We should move back to the GHC implementation on compilers where --- https://gitlab.haskell.org/ghc/ghc/-/merge_requests/12162 is included -checkHomeUnitsClosed' :: UnitEnv -> OS.Set UnitId -> Maybe (Compat.MsgEnvelope DriverMessage) -checkHomeUnitsClosed' ue home_id_set - | OS.null bad_unit_ids = Nothing - | otherwise = Just (GHC.mkPlainErrorMsgEnvelope rootLoc $ DriverHomePackagesNotClosed (OS.toList bad_unit_ids)) - where - bad_unit_ids = upwards_closure OS.\\ home_id_set - rootLoc = mkGeneralSrcSpan (Compat.fsLit "") - - graph :: Graph (Node UnitId UnitId) - graph = graphFromEdgedVerticesUniq graphNodes - - -- downwards closure of graph - downwards_closure - = graphFromEdgedVerticesUniq [ DigraphNode uid uid (OS.toList deps) - | (uid, deps) <- Map.toList (allReachable graph node_key)] - - inverse_closure = transposeG downwards_closure - - upwards_closure = OS.fromList $ map node_key $ reachablesG inverse_closure [DigraphNode uid uid [] | uid <- OS.toList home_id_set] - - all_unit_direct_deps :: UniqMap UnitId (OS.Set UnitId) - all_unit_direct_deps - = unitEnv_foldWithKey go emptyUniqMap $ ue_home_unit_graph ue - where - go rest this this_uis = - plusUniqMap_C OS.union - (addToUniqMap_C OS.union external_depends this (OS.fromList this_deps)) - rest - where - external_depends = mapUniqMap (OS.fromList . unitDepends) -#if !MIN_VERSION_ghc(9,7,0) - $ listToUniqMap $ Map.toList -#endif - - $ unitInfoMap this_units - this_units = homeUnitEnv_units this_uis - this_deps = [ Compat.toUnitId unit | (unit,Just _) <- explicitUnits this_units] - - graphNodes :: [Node UnitId UnitId] - graphNodes = go OS.empty home_id_set - where - go done todo - = case OS.minView todo of - Nothing -> [] - Just (uid, todo') - | OS.member uid done -> go done todo' - | otherwise -> case lookupUniqMap all_unit_direct_deps uid of - Nothing -> pprPanic "uid not found" (Compat.ppr (uid, all_unit_direct_deps)) - Just depends -> - let todo'' = (depends OS.\\ done) `OS.union` todo' - in DigraphNode uid uid (OS.toList depends) : go (OS.insert uid done) todo'' -#endif - --- | Create a mapping from FilePaths to HscEnvEqs --- This combines all the components we know about into --- an appropriate session, which is a multi component --- session on GHC 9.4+ -newComponentCache - :: Recorder (WithPriority Log) - -> [String] -- ^ File extensions to consider - -> NormalizedFilePath -- ^ Path to file that caused the creation of this component - -> HscEnv -- ^ An empty HscEnv - -> [ComponentInfo] -- ^ New components to be loaded - -> [ComponentInfo] -- ^ old, already existing components - -> IO [ [TargetDetails] ] -newComponentCache recorder exts _cfp hsc_env old_cis new_cis = do - let cis = Map.unionWith unionCIs (mkMap new_cis) (mkMap old_cis) - -- When we have multiple components with the same uid, - -- prefer the new one over the old. - -- However, we might have added some targets to the old unit - -- (see special target), so preserve those - unionCIs new_ci old_ci = new_ci { componentTargets = componentTargets new_ci ++ componentTargets old_ci } - mkMap = Map.fromListWith unionCIs . map (\ci -> (componentUnitId ci, ci)) - let dfs = map componentDynFlags $ Map.elems cis - uids = Map.keys cis - logWith recorder Info $ LogMakingNewHscEnv uids - hscEnv' <- -- Set up a multi component session with the other units on GHC 9.4 - Compat.initUnits dfs hsc_env - - let closure_errs = maybeToList $ checkHomeUnitsClosed' (hsc_unit_env hscEnv') (hsc_all_home_unit_ids hscEnv') - closure_err_to_multi_err err = - ideErrorWithSource - (Just "cradle") (Just DiagnosticSeverity_Warning) _cfp - (T.pack (Compat.printWithoutUniques (singleMessage err))) - (Just (fmap GhcDriverMessage err)) - multi_errs = map closure_err_to_multi_err closure_errs - bad_units = OS.fromList $ concat $ do - x <- map errMsgDiagnostic closure_errs - DriverHomePackagesNotClosed us <- pure x - pure us - isBad ci = (homeUnitId_ (componentDynFlags ci)) `OS.member` bad_units - -- Whenever we spin up a session on Linux, dynamically load libm.so.6 - -- in. We need this in case the binary is statically linked, in which - -- case the interactive session will fail when trying to load - -- ghc-prim, which happens whenever Template Haskell is being - -- evaluated or haskell-language-server's eval plugin tries to run - -- some code. If the binary is dynamically linked, then this will have - -- no effect. - -- See https://github.com/haskell/haskell-language-server/issues/221 - -- We need to do this after the call to setSessionDynFlags initialises - -- the loader - when (os == "linux") $ do - initObjLinker hscEnv' - res <- loadDLL hscEnv' "libm.so.6" - case res of - Nothing -> pure () - Just err -> logWith recorder Error $ LogDLLLoadError err - - forM (Map.elems cis) $ \ci -> do - let df = componentDynFlags ci - thisEnv <- do - -- In GHC 9.4 we have multi component support, and we have initialised all the units - -- above. - -- We just need to set the current unit here - pure $ hscSetActiveUnitId (homeUnitId_ df) hscEnv' - henv <- newHscEnvEq thisEnv - let targetEnv = (if isBad ci then multi_errs else [], Just henv) - targetDepends = componentDependencyInfo ci - logWith recorder Debug $ LogNewComponentCache (targetEnv, targetDepends) - evaluate $ liftRnf rwhnf $ componentTargets ci - - let mk t = fromTargetId (importPaths df) exts (targetId t) targetEnv targetDepends - ctargets <- concatMapM mk (componentTargets ci) - - return (L.nubOrdOn targetTarget ctargets) - -{- Note [Avoiding bad interface files] -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Originally, we set the cache directory for the various components once -on the first occurrence of the component. -This works fine if these components have no references to each other, -but you have components that depend on each other, the interface files are -updated for each component. -After restarting the session and only opening the component that depended -on the other, suddenly the interface files of this component are stale. -However, from the point of view of `ghcide`, they do not look stale, -thus, not regenerated and the IDE shows weird errors such as: -``` -typecheckIface -Declaration for Rep_ClientRunFlags -Axiom branches Rep_ClientRunFlags: - Failed to load interface for ‘Distribution.Simple.Flag’ - Use -v to see a list of the files searched for. -``` -and -``` -expectJust checkFamInstConsistency -CallStack (from HasCallStack): - error, called at compiler\\utils\\Maybes.hs:55:27 in ghc:Maybes - expectJust, called at compiler\\typecheck\\FamInst.hs:461:30 in ghc:FamInst -``` - -To mitigate this, we set the cache directory for each component dependent -on the components of the current `HscEnv`, additionally to the component options -of the respective components. -Assume two components, c1, c2, where c2 depends on c1, and the options of the -respective components are co1, co2. -If we want to load component c2, followed by c1, we set the cache directory for -each component in this way: - - * Load component c2 - * (Cache Directory State) - - name of c2 + co2 - * Load component c1 - * (Cache Directory State) - - name of c2 + name of c1 + co2 - - name of c2 + name of c1 + co1 - -Overall, we created three cache directories. If we opened c1 first, then we -create a fourth cache directory. -This makes sure that interface files are always correctly updated. - -Since this causes a lot of recompilation, we only update the cache-directory, -if the dependencies of a component have really changed. -E.g. when you load two executables, they can not depend on each other. They -should be filtered out, such that we dont have to re-compile everything. --} - --- | Set the cache-directory based on the ComponentOptions and a list of --- internal packages. --- For the exact reason, see Note [Avoiding bad interface files]. -setCacheDirs :: MonadUnliftIO m => Recorder (WithPriority Log) -> CacheDirs -> DynFlags -> m DynFlags -setCacheDirs recorder CacheDirs{..} dflags = do - logWith recorder Info $ LogInterfaceFilesCacheDir (fromMaybe cacheDir hiCacheDir) - pure $ dflags - & maybe id setHiDir hiCacheDir - & maybe id setHieDir hieCacheDir - & maybe id setODir oCacheDir - -- See Note [Multi Cradle Dependency Info] -type DependencyInfo = Map.Map FilePath (Maybe UTCTime) type HieMap = Map.Map (Maybe FilePath) [RawComponentInfo] -- | Maps a "hie.yaml" location to all its Target Filepaths and options. type FlagsMap = STM.Map (Maybe FilePath) (HM.HashMap NormalizedFilePath (IdeResult HscEnvEq, DependencyInfo)) @@ -1318,67 +1006,6 @@ type FlagsMap = STM.Map (Maybe FilePath) (HM.HashMap NormalizedFilePath (IdeResu -- It aims to be the reverse of 'FlagsMap'. type FilesMap = STM.Map NormalizedFilePath (Maybe FilePath) - --- This is pristine information about a component -data RawComponentInfo = RawComponentInfo - { rawComponentUnitId :: UnitId - -- | Unprocessed DynFlags. Contains inplace packages such as libraries. - -- We do not want to use them unprocessed. - , rawComponentDynFlags :: DynFlags - -- | All targets of this components. - , rawComponentTargets :: [GHC.Target] - -- | Filepath which caused the creation of this component - , rawComponentFP :: NormalizedFilePath - -- | Component Options used to load the component. - , rawComponentCOptions :: ComponentOptions - -- | Maps cradle dependencies, such as `stack.yaml`, or `.cabal` file - -- to last modification time. See Note [Multi Cradle Dependency Info]. - , rawComponentDependencyInfo :: DependencyInfo - } - --- This is processed information about the component, in particular the dynflags will be modified. -data ComponentInfo = ComponentInfo - { componentUnitId :: UnitId - -- | Processed DynFlags. Does not contain inplace packages such as local - -- libraries. Can be used to actually load this Component. - , componentDynFlags :: DynFlags - -- | All targets of this components. - , componentTargets :: [GHC.Target] - -- | Filepath which caused the creation of this component - , componentFP :: NormalizedFilePath - -- | Component Options used to load the component. - , componentCOptions :: ComponentOptions - -- | Maps cradle dependencies, such as `stack.yaml`, or `.cabal` file - -- to last modification time. See Note [Multi Cradle Dependency Info] - , componentDependencyInfo :: DependencyInfo - } - --- | Check if any dependency has been modified lately. -checkDependencyInfo :: DependencyInfo -> IO Bool -checkDependencyInfo old_di = do - di <- getDependencyInfo (Map.keys old_di) - return (di == old_di) - --- Note [Multi Cradle Dependency Info] --- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ --- Why do we implement our own file modification tracking here? --- The primary reason is that the custom caching logic is quite complicated and going into shake --- adds even more complexity and more indirection. I did try for about 5 hours to work out how to --- use shake rules rather than IO but eventually gave up. - --- | Computes a mapping from a filepath to its latest modification date. --- See Note [Multi Cradle Dependency Info] why we do this ourselves instead --- of letting shake take care of it. -getDependencyInfo :: [FilePath] -> IO DependencyInfo -getDependencyInfo fs = Map.fromList <$> mapM do_one fs - - where - safeTryIO :: IO a -> IO (Either IOException a) - safeTryIO = Safe.try - - do_one :: FilePath -> IO (FilePath, Maybe UTCTime) - do_one fp = (fp,) . eitherToMaybe <$> safeTryIO (getModificationTime fp) - -- | Memoize an IO function, with the characteristics: -- -- * If multiple people ask for a result simultaneously, make sure you only compute it once. @@ -1396,118 +1023,6 @@ memoIO op = do return (Map.insert k res mp, res) Just res -> return (mp, res) -unit_flags :: [Flag (CmdLineP [String])] -unit_flags = [defFlag "unit" (SepArg addUnit)] - -addUnit :: String -> EwM (CmdLineP [String]) () -addUnit unit_str = liftEwM $ do - units <- getCmdLineState - putCmdLineState (unit_str : units) - --- | Throws if package flags are unsatisfiable -setOptions :: GhcMonad m - => NormalizedFilePath - -> ComponentOptions - -> DynFlags - -> FilePath -- ^ root dir, see Note [Root Directory] - -> m (NonEmpty (DynFlags, [GHC.Target])) -setOptions cfp (ComponentOptions theOpts compRoot _) dflags rootDir = do - ((theOpts',_errs,_warns),units) <- processCmdLineP unit_flags [] (map noLoc theOpts) - case NE.nonEmpty units of - Just us -> initMulti us - Nothing -> do - (df, targets) <- initOne (map unLoc theOpts') - -- A special target for the file which caused this wonderful - -- component to be created. In case the cradle doesn't list all the targets for - -- the component, in which case things will be horribly broken anyway. - -- - -- When we have a singleComponent that is caused to be loaded due to a - -- file, we assume the file is part of that component. This is useful - -- for bare GHC sessions, such as many of the ones used in the testsuite - -- - -- We don't do this when we have multiple components, because each - -- component better list all targets or there will be anarchy. - -- It is difficult to know which component to add our file to in - -- that case. - -- Multi unit arguments are likely to come from cabal, which - -- does list all targets. - -- - -- If we don't end up with a target for the current file in the end, then - -- we will report it as an error for that file - let abs_fp = toAbsolute rootDir (fromNormalizedFilePath cfp) - let special_target = Compat.mkSimpleTarget df abs_fp - pure $ (df, special_target : targets) :| [] - where - initMulti unitArgFiles = - forM unitArgFiles $ \f -> do - args <- liftIO $ expandResponse [f] - initOne args - initOne this_opts = do - (dflags', targets') <- addCmdOpts this_opts dflags - let dflags'' = - case unitIdString (homeUnitId_ dflags') of - -- cabal uses main for the unit id of all executable packages - -- This makes multi-component sessions confused about what - -- options to use for that component. - -- Solution: hash the options and use that as part of the unit id - -- This works because there won't be any dependencies on the - -- executable unit. - "main" -> - let hash = B.unpack $ B16.encode $ H.finalize $ H.updates H.init (map B.pack this_opts) - hashed_uid = Compat.toUnitId (Compat.stringToUnit ("main-"++hash)) - in setHomeUnitId_ hashed_uid dflags' - _ -> dflags' - - let targets = makeTargetsAbsolute root targets' - root = case workingDirectory dflags'' of - Nothing -> compRoot - Just wdir -> compRoot wdir - let dflags''' = - setWorkingDirectory root $ - disableWarningsAsErrors $ - -- disabled, generated directly by ghcide instead - flip gopt_unset Opt_WriteInterface $ - -- disabled, generated directly by ghcide instead - -- also, it can confuse the interface stale check - dontWriteHieFiles $ - setIgnoreInterfacePragmas $ - setBytecodeLinkerOptions $ - disableOptimisation $ - Compat.setUpTypedHoles $ - makeDynFlagsAbsolute compRoot -- makeDynFlagsAbsolute already accounts for workingDirectory - dflags'' - return (dflags''', targets) - -setIgnoreInterfacePragmas :: DynFlags -> DynFlags -setIgnoreInterfacePragmas df = - gopt_set (gopt_set df Opt_IgnoreInterfacePragmas) Opt_IgnoreOptimChanges - -disableOptimisation :: DynFlags -> DynFlags -disableOptimisation df = updOptLevel 0 df - -setHiDir :: FilePath -> DynFlags -> DynFlags -setHiDir f d = - -- override user settings to avoid conflicts leading to recompilation - d { hiDir = Just f} - -setODir :: FilePath -> DynFlags -> DynFlags -setODir f d = - -- override user settings to avoid conflicts leading to recompilation - d { objectDir = Just f} - -getCacheDirsDefault :: String -> [String] -> IO CacheDirs -getCacheDirsDefault prefix opts = do - dir <- Just <$> getXdgDirectory XdgCache (cacheDir prefix ++ "-" ++ opts_hash) - return $ CacheDirs dir dir dir - where - -- Create a unique folder per set of different GHC options, assuming that each different set of - -- GHC options will create incompatible interface files. - opts_hash = B.unpack $ B16.encode $ H.finalize $ H.updates H.init (map B.pack opts) - --- | Sub directory for the cache path -cacheDir :: String -cacheDir = "ghcide" - ---------------------------------------------------------------------------------------------------- data PackageSetupException diff --git a/ghcide/session-loader/Development/IDE/Session/Dependency.hs b/ghcide/session-loader/Development/IDE/Session/Dependency.hs new file mode 100644 index 0000000000..926e0e47b3 --- /dev/null +++ b/ghcide/session-loader/Development/IDE/Session/Dependency.hs @@ -0,0 +1,35 @@ +module Development.IDE.Session.Dependency where + +import Control.Exception.Safe as Safe +import Data.Either.Extra +import qualified Data.Map.Strict as Map +import Data.Time.Clock +import System.Directory + +type DependencyInfo = Map.Map FilePath (Maybe UTCTime) + +-- | Check if any dependency has been modified lately. +checkDependencyInfo :: DependencyInfo -> IO Bool +checkDependencyInfo old_di = do + di <- getDependencyInfo (Map.keys old_di) + return (di == old_di) + +-- Note [Multi Cradle Dependency Info] +-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +-- Why do we implement our own file modification tracking here? +-- The primary reason is that the custom caching logic is quite complicated and going into shake +-- adds even more complexity and more indirection. I did try for about 5 hours to work out how to +-- use shake rules rather than IO but eventually gave up. + +-- | Computes a mapping from a filepath to its latest modification date. +-- See Note [Multi Cradle Dependency Info] why we do this ourselves instead +-- of letting shake take care of it. +getDependencyInfo :: [FilePath] -> IO DependencyInfo +getDependencyInfo fs = Map.fromList <$> mapM do_one fs + + where + safeTryIO :: IO a -> IO (Either IOException a) + safeTryIO = Safe.try + + do_one :: FilePath -> IO (FilePath, Maybe UTCTime) + do_one fp = (fp,) . eitherToMaybe <$> safeTryIO (getModificationTime fp) diff --git a/ghcide/session-loader/Development/IDE/Session/Ghc.hs b/ghcide/session-loader/Development/IDE/Session/Ghc.hs new file mode 100644 index 0000000000..25f377200c --- /dev/null +++ b/ghcide/session-loader/Development/IDE/Session/Ghc.hs @@ -0,0 +1,522 @@ +{-# LANGUAGE CPP #-} +module Development.IDE.Session.Ghc where + +import Control.Monad +import Control.Monad.Extra as Extra +import Control.Monad.IO.Class +import qualified Crypto.Hash.SHA1 as H +import qualified Data.ByteString.Base16 as B16 +import qualified Data.ByteString.Char8 as B +import Data.Function +import Data.List +import Data.List.Extra as L +import Data.List.NonEmpty (NonEmpty (..)) +import qualified Data.List.NonEmpty as NE +import qualified Data.Map.Strict as Map +import Data.Maybe +import qualified Data.Text as T +import Development.IDE.Core.Shake hiding (Log, knownTargets, + withHieDb) +import qualified Development.IDE.GHC.Compat as Compat +import Development.IDE.GHC.Compat.CmdLine +import Development.IDE.GHC.Compat.Core hiding (Target, TargetFile, + TargetModule, Var, + Warning, getOptions) +import qualified Development.IDE.GHC.Compat.Core as GHC +import Development.IDE.GHC.Compat.Env hiding (Logger) +import Development.IDE.GHC.Compat.Units (UnitId) +import Development.IDE.GHC.Util +import Development.IDE.Types.Diagnostics +import Development.IDE.Types.HscEnvEq (HscEnvEq, newHscEnvEq) +import Development.IDE.Types.Location +import GHC.ResponseFile +import HIE.Bios.Environment hiding (getCacheDir) +import HIE.Bios.Types hiding (Log) +import Ide.Logger (Pretty (pretty), + Priority (Debug, Error, Info), + Recorder, WithPriority, logWith, viaShow, (<+>)) +import System.Directory +import System.FilePath +import System.Info + + +import Control.DeepSeq +import Control.Exception (evaluate) +import Control.Monad.IO.Unlift (MonadUnliftIO) +import qualified Data.Set as OS +import qualified Development.IDE.GHC.Compat.Util as Compat +import Development.IDE.Session.Dependency +import GHC.Data.Graph.Directed +import Ide.PluginUtils (toAbsolute) + +import GHC.Driver.Env (hsc_all_home_unit_ids) +import GHC.Driver.Errors.Types +import GHC.Types.Error (errMsgDiagnostic, + singleMessage) +import GHC.Unit.State + +data Log + = LogInterfaceFilesCacheDir !FilePath + | LogMakingNewHscEnv ![UnitId] + | LogNewComponentCache !(([FileDiagnostic], Maybe HscEnvEq), DependencyInfo) + | LogDLLLoadError !String +deriving instance Show Log + +instance Pretty Log where + pretty = \case + LogInterfaceFilesCacheDir path -> + "Interface files cache directory:" <+> pretty path + LogMakingNewHscEnv inPlaceUnitIds -> + "Making new HscEnv. In-place unit ids:" <+> pretty (map show inPlaceUnitIds) + LogNewComponentCache componentCache -> + "New component cache HscEnvEq:" <+> viaShow componentCache + LogDLLLoadError errorString -> + "Error dynamically loading libm.so.6:" <+> pretty errorString + +-- This is pristine information about a component +data RawComponentInfo = RawComponentInfo + { rawComponentUnitId :: UnitId + -- | Unprocessed DynFlags. Contains inplace packages such as libraries. + -- We do not want to use them unprocessed. + , rawComponentDynFlags :: DynFlags + -- | All targets of this components. + , rawComponentTargets :: [GHC.Target] + -- | Filepath which caused the creation of this component + , rawComponentFP :: NormalizedFilePath + -- | Component Options used to load the component. + , rawComponentCOptions :: ComponentOptions + -- | Maps cradle dependencies, such as `stack.yaml`, or `.cabal` file + -- to last modification time. See Note [Multi Cradle Dependency Info]. + , rawComponentDependencyInfo :: DependencyInfo + } + +-- This is processed information about the component, in particular the dynflags will be modified. +data ComponentInfo = ComponentInfo + { componentUnitId :: UnitId + -- | Processed DynFlags. Does not contain inplace packages such as local + -- libraries. Can be used to actually load this Component. + , componentDynFlags :: DynFlags + -- | All targets of this components. + , componentTargets :: [GHC.Target] + -- | Filepath which caused the creation of this component + , componentFP :: NormalizedFilePath + -- | Component Options used to load the component. + , componentCOptions :: ComponentOptions + -- | Maps cradle dependencies, such as `stack.yaml`, or `.cabal` file + -- to last modification time. See Note [Multi Cradle Dependency Info] + , componentDependencyInfo :: DependencyInfo + } + + +unit_flags :: [Flag (CmdLineP [String])] +unit_flags = [defFlag "unit" (SepArg addUnit)] + +addUnit :: String -> EwM (CmdLineP [String]) () +addUnit unit_str = liftEwM $ do + units <- getCmdLineState + putCmdLineState (unit_str : units) + + +-- | Create a mapping from FilePaths to HscEnvEqs +-- This combines all the components we know about into +-- an appropriate session, which is a multi component +-- session on GHC 9.4+ +newComponentCache + :: Recorder (WithPriority Log) + -> [String] -- ^ File extensions to consider + -> NormalizedFilePath -- ^ Path to file that caused the creation of this component + -> HscEnv -- ^ An empty HscEnv + -> [ComponentInfo] -- ^ New components to be loaded + -> [ComponentInfo] -- ^ old, already existing components + -> IO [ [TargetDetails] ] +newComponentCache recorder exts _cfp hsc_env old_cis new_cis = do + let cis = Map.unionWith unionCIs (mkMap new_cis) (mkMap old_cis) + -- When we have multiple components with the same uid, + -- prefer the new one over the old. + -- However, we might have added some targets to the old unit + -- (see special target), so preserve those + unionCIs new_ci old_ci = new_ci { componentTargets = componentTargets new_ci ++ componentTargets old_ci } + mkMap = Map.fromListWith unionCIs . map (\ci -> (componentUnitId ci, ci)) + let dfs = map componentDynFlags $ Map.elems cis + uids = Map.keys cis + logWith recorder Info $ LogMakingNewHscEnv uids + hscEnv' <- -- Set up a multi component session with the other units on GHC 9.4 + Compat.initUnits dfs hsc_env + + let closure_errs = maybeToList $ checkHomeUnitsClosed' (hsc_unit_env hscEnv') (hsc_all_home_unit_ids hscEnv') + closure_err_to_multi_err err = + ideErrorWithSource + (Just "cradle") (Just DiagnosticSeverity_Warning) _cfp + (T.pack (Compat.printWithoutUniques (singleMessage err))) + (Just (fmap GhcDriverMessage err)) + multi_errs = map closure_err_to_multi_err closure_errs + bad_units = OS.fromList $ concat $ do + x <- map errMsgDiagnostic closure_errs + DriverHomePackagesNotClosed us <- pure x + pure us + isBad ci = (homeUnitId_ (componentDynFlags ci)) `OS.member` bad_units + -- Whenever we spin up a session on Linux, dynamically load libm.so.6 + -- in. We need this in case the binary is statically linked, in which + -- case the interactive session will fail when trying to load + -- ghc-prim, which happens whenever Template Haskell is being + -- evaluated or haskell-language-server's eval plugin tries to run + -- some code. If the binary is dynamically linked, then this will have + -- no effect. + -- See https://github.com/haskell/haskell-language-server/issues/221 + -- We need to do this after the call to setSessionDynFlags initialises + -- the loader + when (os == "linux") $ do + initObjLinker hscEnv' + res <- loadDLL hscEnv' "libm.so.6" + case res of + Nothing -> pure () + Just err -> logWith recorder Error $ LogDLLLoadError err + + forM (Map.elems cis) $ \ci -> do + let df = componentDynFlags ci + thisEnv <- do + -- In GHC 9.4 we have multi component support, and we have initialised all the units + -- above. + -- We just need to set the current unit here + pure $ hscSetActiveUnitId (homeUnitId_ df) hscEnv' + henv <- newHscEnvEq thisEnv + let targetEnv = (if isBad ci then multi_errs else [], Just henv) + targetDepends = componentDependencyInfo ci + logWith recorder Debug $ LogNewComponentCache (targetEnv, targetDepends) + evaluate $ liftRnf rwhnf $ componentTargets ci + + let mk t = fromTargetId (importPaths df) exts (targetId t) targetEnv targetDepends + ctargets <- concatMapM mk (componentTargets ci) + + return (L.nubOrdOn targetTarget ctargets) + +-- | Throws if package flags are unsatisfiable +setOptions :: GhcMonad m + => NormalizedFilePath + -> ComponentOptions + -> DynFlags + -> FilePath -- ^ root dir, see Note [Root Directory] + -> m (NonEmpty (DynFlags, [GHC.Target])) +setOptions cfp (ComponentOptions theOpts compRoot _) dflags rootDir = do + ((theOpts',_errs,_warns),units) <- processCmdLineP unit_flags [] (map noLoc theOpts) + case NE.nonEmpty units of + Just us -> initMulti us + Nothing -> do + (df, targets) <- initOne (map unLoc theOpts') + -- A special target for the file which caused this wonderful + -- component to be created. In case the cradle doesn't list all the targets for + -- the component, in which case things will be horribly broken anyway. + -- + -- When we have a singleComponent that is caused to be loaded due to a + -- file, we assume the file is part of that component. This is useful + -- for bare GHC sessions, such as many of the ones used in the testsuite + -- + -- We don't do this when we have multiple components, because each + -- component better list all targets or there will be anarchy. + -- It is difficult to know which component to add our file to in + -- that case. + -- Multi unit arguments are likely to come from cabal, which + -- does list all targets. + -- + -- If we don't end up with a target for the current file in the end, then + -- we will report it as an error for that file + let abs_fp = toAbsolute rootDir (fromNormalizedFilePath cfp) + let special_target = Compat.mkSimpleTarget df abs_fp + pure $ (df, special_target : targets) :| [] + where + initMulti unitArgFiles = + forM unitArgFiles $ \f -> do + args <- liftIO $ expandResponse [f] + initOne args + initOne this_opts = do + (dflags', targets') <- addCmdOpts this_opts dflags + let dflags'' = + case unitIdString (homeUnitId_ dflags') of + -- cabal uses main for the unit id of all executable packages + -- This makes multi-component sessions confused about what + -- options to use for that component. + -- Solution: hash the options and use that as part of the unit id + -- This works because there won't be any dependencies on the + -- executable unit. + "main" -> + let hash = B.unpack $ B16.encode $ H.finalize $ H.updates H.init (map B.pack this_opts) + hashed_uid = Compat.toUnitId (Compat.stringToUnit ("main-"++hash)) + in setHomeUnitId_ hashed_uid dflags' + _ -> dflags' + + let targets = makeTargetsAbsolute root targets' + root = case workingDirectory dflags'' of + Nothing -> compRoot + Just wdir -> compRoot wdir + let dflags''' = + setWorkingDirectory root $ + disableWarningsAsErrors $ + -- disabled, generated directly by ghcide instead + flip gopt_unset Opt_WriteInterface $ + -- disabled, generated directly by ghcide instead + -- also, it can confuse the interface stale check + dontWriteHieFiles $ + setIgnoreInterfacePragmas $ + setBytecodeLinkerOptions $ + disableOptimisation $ + Compat.setUpTypedHoles $ + makeDynFlagsAbsolute compRoot -- makeDynFlagsAbsolute already accounts for workingDirectory + dflags'' + return (dflags''', targets) + +addComponentInfo :: + MonadUnliftIO m => + Recorder (WithPriority Log) -> + (String -> [String] -> IO CacheDirs) -> + DependencyInfo -> + NonEmpty (DynFlags, [GHC.Target]) -> + (Maybe FilePath, NormalizedFilePath, ComponentOptions) -> + Map.Map (Maybe FilePath) [RawComponentInfo] -> + m (Map.Map (Maybe FilePath) [RawComponentInfo], ([ComponentInfo], [ComponentInfo])) +addComponentInfo recorder getCacheDirs dep_info newDynFlags (hieYaml, cfp, opts) m = do + -- Just deps if there's already an HscEnv + -- Nothing is it's the first time we are making an HscEnv + let oldDeps = Map.lookup hieYaml m + let -- Add the raw information about this component to the list + -- We will modify the unitId and DynFlags used for + -- compilation but these are the true source of + -- information. + new_deps = fmap (\(df, targets) -> RawComponentInfo (homeUnitId_ df) df targets cfp opts dep_info) newDynFlags + all_deps = new_deps `NE.appendList` fromMaybe [] oldDeps + -- Get all the unit-ids for things in this component + + all_deps' <- forM all_deps $ \RawComponentInfo{..} -> do + let prefix = show rawComponentUnitId + -- See Note [Avoiding bad interface files] + let cacheDirOpts = componentOptions opts + cacheDirs <- liftIO $ getCacheDirs prefix cacheDirOpts + processed_df <- setCacheDirs recorder cacheDirs rawComponentDynFlags + -- The final component information, mostly the same but the DynFlags don't + -- contain any packages which are also loaded + -- into the same component. + pure $ ComponentInfo + { componentUnitId = rawComponentUnitId + , componentDynFlags = processed_df + , componentTargets = rawComponentTargets + , componentFP = rawComponentFP + , componentCOptions = rawComponentCOptions + , componentDependencyInfo = rawComponentDependencyInfo + } + -- Modify the map so the hieYaml now maps to the newly updated + -- ComponentInfos + -- Returns + -- . The information for the new component which caused this cache miss + -- . The modified information (without -inplace flags) for + -- existing packages + let (new,old) = NE.splitAt (NE.length new_deps) all_deps' + pure (Map.insert hieYaml (NE.toList all_deps) m, (new,old)) + +setIgnoreInterfacePragmas :: DynFlags -> DynFlags +setIgnoreInterfacePragmas df = + gopt_set (gopt_set df Opt_IgnoreInterfacePragmas) Opt_IgnoreOptimChanges + +disableOptimisation :: DynFlags -> DynFlags +disableOptimisation df = updOptLevel 0 df + +setHiDir :: FilePath -> DynFlags -> DynFlags +setHiDir f d = + -- override user settings to avoid conflicts leading to recompilation + d { hiDir = Just f} + +setODir :: FilePath -> DynFlags -> DynFlags +setODir f d = + -- override user settings to avoid conflicts leading to recompilation + d { objectDir = Just f} + +data CacheDirs = CacheDirs + { hiCacheDir, hieCacheDir, oCacheDir :: Maybe FilePath} + +{- Note [Avoiding bad interface files] +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Originally, we set the cache directory for the various components once +on the first occurrence of the component. +This works fine if these components have no references to each other, +but you have components that depend on each other, the interface files are +updated for each component. +After restarting the session and only opening the component that depended +on the other, suddenly the interface files of this component are stale. +However, from the point of view of `ghcide`, they do not look stale, +thus, not regenerated and the IDE shows weird errors such as: +``` +typecheckIface +Declaration for Rep_ClientRunFlags +Axiom branches Rep_ClientRunFlags: + Failed to load interface for ‘Distribution.Simple.Flag’ + Use -v to see a list of the files searched for. +``` +and +``` +expectJust checkFamInstConsistency +CallStack (from HasCallStack): + error, called at compiler\\utils\\Maybes.hs:55:27 in ghc:Maybes + expectJust, called at compiler\\typecheck\\FamInst.hs:461:30 in ghc:FamInst +``` + +To mitigate this, we set the cache directory for each component dependent +on the components of the current `HscEnv`, additionally to the component options +of the respective components. +Assume two components, c1, c2, where c2 depends on c1, and the options of the +respective components are co1, co2. +If we want to load component c2, followed by c1, we set the cache directory for +each component in this way: + + * Load component c2 + * (Cache Directory State) + - name of c2 + co2 + * Load component c1 + * (Cache Directory State) + - name of c2 + name of c1 + co2 + - name of c2 + name of c1 + co1 + +Overall, we created three cache directories. If we opened c1 first, then we +create a fourth cache directory. +This makes sure that interface files are always correctly updated. + +Since this causes a lot of recompilation, we only update the cache-directory, +if the dependencies of a component have really changed. +E.g. when you load two executables, they can not depend on each other. They +should be filtered out, such that we dont have to re-compile everything. +-} + + +-- | Set the cache-directory based on the ComponentOptions and a list of +-- internal packages. +-- For the exact reason, see Note [Avoiding bad interface files]. +setCacheDirs :: MonadUnliftIO m => Recorder (WithPriority Log) -> CacheDirs -> DynFlags -> m DynFlags +setCacheDirs recorder CacheDirs{..} dflags = do + logWith recorder Info $ LogInterfaceFilesCacheDir (fromMaybe cacheDir hiCacheDir) + pure $ dflags + & maybe id setHiDir hiCacheDir + & maybe id setHieDir hieCacheDir + & maybe id setODir oCacheDir + +getCacheDirsDefault :: String -> [String] -> IO CacheDirs +getCacheDirsDefault prefix opts = do + dir <- Just <$> getXdgDirectory XdgCache (cacheDir prefix ++ "-" ++ opts_hash) + return $ CacheDirs dir dir dir + where + -- Create a unique folder per set of different GHC options, assuming that each different set of + -- GHC options will create incompatible interface files. + opts_hash = B.unpack $ B16.encode $ H.finalize $ H.updates H.init (map B.pack opts) + +setNameCache :: NameCache -> HscEnv -> HscEnv +setNameCache nc hsc = hsc { hsc_NC = nc } + +-- | Sub directory for the cache path +cacheDir :: String +cacheDir = "ghcide" + +emptyHscEnv :: NameCache -> FilePath -> IO HscEnv +emptyHscEnv nc libDir = do + -- We call setSessionDynFlags so that the loader is initialised + -- We need to do this before we call initUnits. + env <- liftIO $ runGhc (Just libDir) $ + getSessionDynFlags >>= setSessionDynFlags >> getSession + pure $ setNameCache nc (hscSetFlags ((hsc_dflags env){useUnicode = True }) env) + +-- ---------------------------------------------------------------------------- +-- Target Details +-- ---------------------------------------------------------------------------- + +data TargetDetails = TargetDetails + { + targetTarget :: !Target, + targetEnv :: !(IdeResult HscEnvEq), + targetDepends :: !DependencyInfo, + targetLocations :: ![NormalizedFilePath] + } + +fromTargetId :: [FilePath] -- ^ import paths + -> [String] -- ^ extensions to consider + -> TargetId + -> IdeResult HscEnvEq + -> DependencyInfo + -> IO [TargetDetails] +-- For a target module we consider all the import paths +fromTargetId is exts (GHC.TargetModule modName) env dep = do + let fps = [i moduleNameSlashes modName -<.> ext <> boot + | ext <- exts + , i <- is + , boot <- ["", "-boot"] + ] + let locs = fmap toNormalizedFilePath' fps + return [TargetDetails (TargetModule modName) env dep locs] +-- For a 'TargetFile' we consider all the possible module names +fromTargetId _ _ (GHC.TargetFile f _) env deps = do + let nf = toNormalizedFilePath' f + let other + | "-boot" `isSuffixOf` f = toNormalizedFilePath' (L.dropEnd 5 $ fromNormalizedFilePath nf) + | otherwise = toNormalizedFilePath' (fromNormalizedFilePath nf ++ "-boot") + return [TargetDetails (TargetFile nf) env deps [nf, other]] + +-- ---------------------------------------------------------------------------- +-- Backwards compatibility +-- ---------------------------------------------------------------------------- + +#if MIN_VERSION_ghc(9,13,0) +-- Moved back to implementation in GHC. +checkHomeUnitsClosed' :: UnitEnv -> OS.Set UnitId -> [DriverMessages] +checkHomeUnitsClosed' ue _ = checkHomeUnitsClosed ue +#else +-- This function checks the important property that if both p and q are home units +-- then any dependency of p, which transitively depends on q is also a home unit. +-- GHC had an implementation of this function, but it was horribly inefficient +-- We should move back to the GHC implementation on compilers where +-- https://gitlab.haskell.org/ghc/ghc/-/merge_requests/12162 is included +checkHomeUnitsClosed' :: UnitEnv -> OS.Set UnitId -> Maybe (Compat.MsgEnvelope DriverMessage) +checkHomeUnitsClosed' ue home_id_set + | OS.null bad_unit_ids = Nothing + | otherwise = Just (GHC.mkPlainErrorMsgEnvelope rootLoc $ DriverHomePackagesNotClosed (OS.toList bad_unit_ids)) + where + bad_unit_ids = upwards_closure OS.\\ home_id_set + rootLoc = mkGeneralSrcSpan (Compat.fsLit "") + + graph :: Graph (Node UnitId UnitId) + graph = graphFromEdgedVerticesUniq graphNodes + + -- downwards closure of graph + downwards_closure + = graphFromEdgedVerticesUniq [ DigraphNode uid uid (OS.toList deps) + | (uid, deps) <- Map.toList (allReachable graph node_key)] + + inverse_closure = transposeG downwards_closure + + upwards_closure = OS.fromList $ map node_key $ reachablesG inverse_closure [DigraphNode uid uid [] | uid <- OS.toList home_id_set] + + all_unit_direct_deps :: UniqMap UnitId (OS.Set UnitId) + all_unit_direct_deps + = unitEnv_foldWithKey go emptyUniqMap $ ue_home_unit_graph ue + where + go rest this this_uis = + plusUniqMap_C OS.union + (addToUniqMap_C OS.union external_depends this (OS.fromList this_deps)) + rest + where + external_depends = mapUniqMap (OS.fromList . unitDepends) +#if !MIN_VERSION_ghc(9,7,0) + $ listToUniqMap $ Map.toList +#endif + + $ unitInfoMap this_units + this_units = homeUnitEnv_units this_uis + this_deps = [ Compat.toUnitId unit | (unit,Just _) <- explicitUnits this_units] + + graphNodes :: [Node UnitId UnitId] + graphNodes = go OS.empty home_id_set + where + go done todo + = case OS.minView todo of + Nothing -> [] + Just (uid, todo') + | OS.member uid done -> go done todo' + | otherwise -> case lookupUniqMap all_unit_direct_deps uid of + Nothing -> pprPanic "uid not found" (Compat.ppr (uid, all_unit_direct_deps)) + Just depends -> + let todo'' = (depends OS.\\ done) `OS.union` todo' + in DigraphNode uid uid (OS.toList depends) : go (OS.insert uid done) todo'' +#endif From 7d106cff15a80b38897710ce0ef04a8ba735169b Mon Sep 17 00:00:00 2001 From: soulomoon Date: Sat, 19 Jul 2025 02:22:49 +0800 Subject: [PATCH 56/87] fix duplication --- .../session-loader/Development/IDE/Session.hs | 133 ++---------------- 1 file changed, 9 insertions(+), 124 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 5e9a710893..ca4cafcd4d 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -91,9 +91,9 @@ import qualified Data.HashSet as Set import Database.SQLite.Simple import Development.IDE.Core.Tracing (withTrace) import Development.IDE.Core.WorkerThread (withWorkerQueue) -import Development.IDE.Session.Diagnostics (renderCradleError) import Development.IDE.Session.Dependency -import Development.IDE.Session.Ghc hiding (Log) +import Development.IDE.Session.Diagnostics (renderCradleError) +import Development.IDE.Session.Ghc hiding (Log) import Development.IDE.Types.Shake (WithHieDb, WithHieDbShield (..), toNoFileKey) @@ -106,11 +106,11 @@ import Text.ParserCombinators.ReadP (readP_to_S) import Control.Concurrent.STM (STM, TVar) import qualified Control.Monad.STM as STM +import Control.Monad.Trans.Reader +import qualified Development.IDE.Session.Ghc as Ghc import qualified Development.IDE.Session.OrderedSet as S import qualified Focus import qualified StmContainers.Map as STM -import Control.Monad.Trans.Reader -import qualified Development.IDE.Session.Ghc as Ghc data Log = LogSettingInitialDynFlags @@ -689,12 +689,12 @@ data SessionShake = SessionShake } data SessionEnv = SessionEnv - { sessionLspContext :: Maybe (LanguageContextEnv Config) - , sessionRootDir :: FilePath - , sessionIdeOptions :: IdeOptions - , sessionClientConfig :: Config + { sessionLspContext :: Maybe (LanguageContextEnv Config) + , sessionRootDir :: FilePath + , sessionIdeOptions :: IdeOptions + , sessionClientConfig :: Config , sessionSharedNameCache :: NameCache - , sessionLoadingOptions :: SessionLoadingOptions + , sessionLoadingOptions :: SessionLoadingOptions } type SessionM = ReaderT SessionEnv IO @@ -1024,121 +1024,6 @@ memoIO op = do return (Map.insert k res mp, res) Just res -> return (mp, res) -unit_flags :: [Flag (CmdLineP [String])] -unit_flags = [defFlag "unit" (SepArg addUnit)] - -addUnit :: String -> EwM (CmdLineP [String]) () -addUnit unit_str = liftEwM $ do - units <- getCmdLineState - putCmdLineState (unit_str : units) - --- | Throws if package flags are unsatisfiable -setOptions :: GhcMonad m - => NormalizedFilePath - -> ComponentOptions - -> DynFlags - -> FilePath -- ^ root dir, see Note [Root Directory] - -> m (NonEmpty (DynFlags, [GHC.Target])) -setOptions cfp (ComponentOptions theOpts compRoot _) dflags rootDir = do - ((theOpts',_errs,_warns),units) <- processCmdLineP unit_flags [] (map noLoc theOpts) - case NE.nonEmpty units of - Just us -> initMulti us - Nothing -> do - (df, targets) <- initOne (map unLoc theOpts') - -- A special target for the file which caused this wonderful - -- component to be created. In case the cradle doesn't list all the targets for - -- the component, in which case things will be horribly broken anyway. - -- - -- When we have a singleComponent that is caused to be loaded due to a - -- file, we assume the file is part of that component. This is useful - -- for bare GHC sessions, such as many of the ones used in the testsuite - -- - -- We don't do this when we have multiple components, because each - -- component better list all targets or there will be anarchy. - -- It is difficult to know which component to add our file to in - -- that case. - -- Multi unit arguments are likely to come from cabal, which - -- does list all targets. - -- - -- If we don't end up with a target for the current file in the end, then - -- we will report it as an error for that file - let abs_fp = toAbsolute rootDir (fromNormalizedFilePath cfp) - let special_target = Compat.mkSimpleTarget df abs_fp - pure $ (df, special_target : targets) :| [] - where - initMulti unitArgFiles = - forM unitArgFiles $ \f -> do - args <- liftIO $ expandResponse [f] - -- The reponse files may contain arguments like "+RTS", - -- and hie-bios doesn't expand the response files of @-unit@ arguments. - -- Thus, we need to do the stripping here. - initOne $ HieBios.removeRTS $ HieBios.removeVerbosityOpts args - initOne this_opts = do - (dflags', targets') <- addCmdOpts this_opts dflags - let dflags'' = - case unitIdString (homeUnitId_ dflags') of - -- cabal uses main for the unit id of all executable packages - -- This makes multi-component sessions confused about what - -- options to use for that component. - -- Solution: hash the options and use that as part of the unit id - -- This works because there won't be any dependencies on the - -- executable unit. - "main" -> - let hash = B.unpack $ B16.encode $ H.finalize $ H.updates H.init (map B.pack this_opts) - hashed_uid = Compat.toUnitId (Compat.stringToUnit ("main-"++hash)) - in setHomeUnitId_ hashed_uid dflags' - _ -> dflags' - - let targets = makeTargetsAbsolute root targets' - root = case workingDirectory dflags'' of - Nothing -> compRoot - Just wdir -> compRoot wdir - let dflags''' = - setWorkingDirectory root $ - disableWarningsAsErrors $ - -- disabled, generated directly by ghcide instead - flip gopt_unset Opt_WriteInterface $ - -- disabled, generated directly by ghcide instead - -- also, it can confuse the interface stale check - dontWriteHieFiles $ - setIgnoreInterfacePragmas $ - setBytecodeLinkerOptions $ - disableOptimisation $ - Compat.setUpTypedHoles $ - makeDynFlagsAbsolute compRoot -- makeDynFlagsAbsolute already accounts for workingDirectory - dflags'' - return (dflags''', targets) - -setIgnoreInterfacePragmas :: DynFlags -> DynFlags -setIgnoreInterfacePragmas df = - gopt_set (gopt_set df Opt_IgnoreInterfacePragmas) Opt_IgnoreOptimChanges - -disableOptimisation :: DynFlags -> DynFlags -disableOptimisation df = updOptLevel 0 df - -setHiDir :: FilePath -> DynFlags -> DynFlags -setHiDir f d = - -- override user settings to avoid conflicts leading to recompilation - d { hiDir = Just f} - -setODir :: FilePath -> DynFlags -> DynFlags -setODir f d = - -- override user settings to avoid conflicts leading to recompilation - d { objectDir = Just f} - -getCacheDirsDefault :: String -> [String] -> IO CacheDirs -getCacheDirsDefault prefix opts = do - dir <- Just <$> getXdgDirectory XdgCache (cacheDir prefix ++ "-" ++ opts_hash) - return $ CacheDirs dir dir dir - where - -- Create a unique folder per set of different GHC options, assuming that each different set of - -- GHC options will create incompatible interface files. - opts_hash = B.unpack $ B16.encode $ H.finalize $ H.updates H.init (map B.pack opts) - --- | Sub directory for the cache path -cacheDir :: String -cacheDir = "ghcide" - ---------------------------------------------------------------------------------------------------- data PackageSetupException From ddef7d4f2a1d6c6d82aab9144139759a67ea8e1f Mon Sep 17 00:00:00 2001 From: soulomoon Date: Mon, 4 Aug 2025 15:11:59 +0800 Subject: [PATCH 57/87] Remove RTS options from multi-unit configuration files --- ghcide-test/data/multi-unit/a-1.0.0-inplace | 3 --- ghcide-test/data/multi-unit/c-1.0.0-inplace | 2 -- 2 files changed, 5 deletions(-) diff --git a/ghcide-test/data/multi-unit/a-1.0.0-inplace b/ghcide-test/data/multi-unit/a-1.0.0-inplace index cab2b716ff..a54ea9bc4b 100644 --- a/ghcide-test/data/multi-unit/a-1.0.0-inplace +++ b/ghcide-test/data/multi-unit/a-1.0.0-inplace @@ -16,6 +16,3 @@ base text -XHaskell98 A -+RTS --A32M --RTS diff --git a/ghcide-test/data/multi-unit/c-1.0.0-inplace b/ghcide-test/data/multi-unit/c-1.0.0-inplace index 7421d59279..7201a40de4 100644 --- a/ghcide-test/data/multi-unit/c-1.0.0-inplace +++ b/ghcide-test/data/multi-unit/c-1.0.0-inplace @@ -17,5 +17,3 @@ a-1.0.0-inplace base -XHaskell98 C -+RTS --A32M From d5632d695f05637bb9c4a88907995240922cb4f3 Mon Sep 17 00:00:00 2001 From: soulomoon Date: Mon, 4 Aug 2025 15:53:17 +0800 Subject: [PATCH 58/87] format --- cabal.project | 2 -- ghcide/session-loader/Development/IDE/Session/Dependency.hs | 4 ++-- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/cabal.project b/cabal.project index e4b31e93f5..fed144eb90 100644 --- a/cabal.project +++ b/cabal.project @@ -51,10 +51,8 @@ constraints: allow-newer: cabal-install-parsers:Cabal-syntax, - if impl(ghc >= 9.11) benchmarks: False allow-newer: cabal-install-parsers:base, cabal-install-parsers:time, - diff --git a/ghcide/session-loader/Development/IDE/Session/Dependency.hs b/ghcide/session-loader/Development/IDE/Session/Dependency.hs index 926e0e47b3..deedf809b8 100644 --- a/ghcide/session-loader/Development/IDE/Session/Dependency.hs +++ b/ghcide/session-loader/Development/IDE/Session/Dependency.hs @@ -1,8 +1,8 @@ module Development.IDE.Session.Dependency where -import Control.Exception.Safe as Safe +import Control.Exception.Safe as Safe import Data.Either.Extra -import qualified Data.Map.Strict as Map +import qualified Data.Map.Strict as Map import Data.Time.Clock import System.Directory From 4c5bc35eb53f3b4436e5670a302fab1a372c9e8b Mon Sep 17 00:00:00 2001 From: soulomoon Date: Mon, 4 Aug 2025 17:51:15 +0800 Subject: [PATCH 59/87] apply the patch from https://github.com/haskell/haskell-language-server/commit/c3b61feccbc87857390b9fdb542ce0b3a701d074 --- ghcide-test/data/multi-unit/a-1.0.0-inplace | 3 +++ ghcide-test/data/multi-unit/c-1.0.0-inplace | 2 ++ ghcide/session-loader/Development/IDE/Session/Ghc.hs | 5 ++++- 3 files changed, 9 insertions(+), 1 deletion(-) diff --git a/ghcide-test/data/multi-unit/a-1.0.0-inplace b/ghcide-test/data/multi-unit/a-1.0.0-inplace index a54ea9bc4b..cab2b716ff 100644 --- a/ghcide-test/data/multi-unit/a-1.0.0-inplace +++ b/ghcide-test/data/multi-unit/a-1.0.0-inplace @@ -16,3 +16,6 @@ base text -XHaskell98 A ++RTS +-A32M +-RTS diff --git a/ghcide-test/data/multi-unit/c-1.0.0-inplace b/ghcide-test/data/multi-unit/c-1.0.0-inplace index 7201a40de4..7421d59279 100644 --- a/ghcide-test/data/multi-unit/c-1.0.0-inplace +++ b/ghcide-test/data/multi-unit/c-1.0.0-inplace @@ -17,3 +17,5 @@ a-1.0.0-inplace base -XHaskell98 C ++RTS +-A32M diff --git a/ghcide/session-loader/Development/IDE/Session/Ghc.hs b/ghcide/session-loader/Development/IDE/Session/Ghc.hs index 88157bd990..ab1e5b7977 100644 --- a/ghcide/session-loader/Development/IDE/Session/Ghc.hs +++ b/ghcide/session-loader/Development/IDE/Session/Ghc.hs @@ -230,7 +230,10 @@ setOptions haddockOpt cfp (ComponentOptions theOpts compRoot _) dflags rootDir = initMulti unitArgFiles = forM unitArgFiles $ \f -> do args <- liftIO $ expandResponse [f] - initOne args + -- The reponse files may contain arguments like "+RTS", + -- and hie-bios doesn't expand the response files of @-unit@ arguments. + -- Thus, we need to do the stripping here. + initOne $ HieBios.removeRTS $ HieBios.removeVerbosityOpts args initOne this_opts = do (dflags', targets') <- addCmdOpts this_opts dflags let dflags'' = From d943452c92ece39076d6af94f1d256b650138417 Mon Sep 17 00:00:00 2001 From: soulomoon Date: Mon, 4 Aug 2025 18:16:39 +0800 Subject: [PATCH 60/87] add comments for orderedSet --- .../session-loader/Development/IDE/Session.hs | 2 +- .../Development/IDE/Session/OrderedSet.hs | 20 ++++++++++++++++--- 2 files changed, 18 insertions(+), 4 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 02a0a13763..6feb6325f2 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -509,7 +509,7 @@ incrementVersion state = modifyVar' (version state) succ -- | Get files from the pending file set getPendingFiles :: SessionState -> IO (HashSet FilePath) -getPendingFiles state = atomically $ Set.fromList <$> S.toUnOrderedList (pendingFiles state) +getPendingFiles state = atomically $ S.toHashSet (pendingFiles state) -- | Handle errors during session loading by recording file as having error and removing from pending handleSingleFileProcessingError' :: SessionState -> Maybe FilePath -> FilePath -> PackageSetupException -> SessionM () diff --git a/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs b/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs index a2b0a76565..250d6fa086 100644 --- a/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs +++ b/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs @@ -5,6 +5,7 @@ import Control.Concurrent.STM (STM, TQueue, flushTQueue, import Control.Concurrent.STM.TQueue (readTQueue, writeTQueue) import Control.Monad (when) import Data.Hashable (Hashable) +import qualified Data.HashSet import qualified Focus import qualified ListT as LT import qualified StmContainers.Set as S @@ -13,6 +14,14 @@ import StmContainers.Set (Set) type OrderedSet a = (TQueue a, Set a) +-- | Insert an element into the ordered set. +-- If the element is not already present, it is added to both the queue and set. +-- If the element already exists, it is moved to the end of the queue to maintain +-- most-recently-inserted ordering semantics. +-- It take O(n), not very good. + +-- Alternative: could preserve original position. +-- I am not sure which one is better. insert :: Hashable a => a -> OrderedSet a -> STM () insert a (que, s) = do (_, inserted) <- S.focus (Focus.testingIfInserts $ Focus.insert ()) a s @@ -23,7 +32,6 @@ insert a (que, s) = do mapM_ (writeTQueue que) items return () writeTQueue que a - -- when que $ writeTQueue que a newIO :: Hashable a => IO (OrderedSet a) newIO = do @@ -31,6 +39,9 @@ newIO = do s <- S.newIO return (que, s) +-- | Read the first element from the queue. +-- If an element is not in the set, it means it has been deleted, +-- so we retry until we find a valid element that exists in the set. readQueue :: Hashable a => OrderedSet a -> STM a readQueue rs@(que, s) = do f <- readTQueue que @@ -41,8 +52,11 @@ readQueue rs@(que, s) = do lookup :: Hashable a => a -> OrderedSet a -> STM Bool lookup a (_, s) = S.lookup a s +-- | Delete an element from the set. +-- The queue is not modified directly; stale entries are filtered out lazily +-- during reading operations (see 'readQueue'). delete :: Hashable a => a -> OrderedSet a -> STM () delete a (_, s) = S.delete a s -toUnOrderedList :: Hashable a => OrderedSet a -> STM [a] -toUnOrderedList (_, s) = LT.toList $ S.listT s +toHashSet :: Hashable a => OrderedSet a -> Data.HashSet a +toHashSet (_, s) = TreeSet.fromList $ LT.toList $ S.listT s From bd79156a01920b2ba30b5b40b11a96f86762f993 Mon Sep 17 00:00:00 2001 From: soulomoon Date: Mon, 4 Aug 2025 18:18:15 +0800 Subject: [PATCH 61/87] update comments for orderedSet --- ghcide/session-loader/Development/IDE/Session/OrderedSet.hs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs b/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs index 250d6fa086..33d64b53d8 100644 --- a/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs +++ b/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs @@ -20,7 +20,7 @@ type OrderedSet a = (TQueue a, Set a) -- most-recently-inserted ordering semantics. -- It take O(n), not very good. --- Alternative: could preserve original position. +-- Alternative: preserve original position and ignore new one. -- I am not sure which one is better. insert :: Hashable a => a -> OrderedSet a -> STM () insert a (que, s) = do From dc34df6d38751038fc1c35ccb5dcaabfad4b90b4 Mon Sep 17 00:00:00 2001 From: soulomoon Date: Mon, 4 Aug 2025 18:38:57 +0800 Subject: [PATCH 62/87] fix import --- .../Development/IDE/Session/Ghc.hs | 1 + .../Development/IDE/Session/OrderedSet.hs | 18 ++++-------------- 2 files changed, 5 insertions(+), 14 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session/Ghc.hs b/ghcide/session-loader/Development/IDE/Session/Ghc.hs index ab1e5b7977..7a84263ec9 100644 --- a/ghcide/session-loader/Development/IDE/Session/Ghc.hs +++ b/ghcide/session-loader/Development/IDE/Session/Ghc.hs @@ -30,6 +30,7 @@ import Development.IDE.Types.Diagnostics import Development.IDE.Types.HscEnvEq (HscEnvEq, newHscEnvEq) import Development.IDE.Types.Location import GHC.ResponseFile +import qualified HIE.Bios.Cradle.Utils as HieBios import HIE.Bios.Environment hiding (getCacheDir) import HIE.Bios.Types hiding (Log) import Ide.Logger (Pretty (pretty), diff --git a/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs b/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs index 33d64b53d8..a66e89f84d 100644 --- a/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs +++ b/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs @@ -16,22 +16,12 @@ type OrderedSet a = (TQueue a, Set a) -- | Insert an element into the ordered set. -- If the element is not already present, it is added to both the queue and set. --- If the element already exists, it is moved to the end of the queue to maintain --- most-recently-inserted ordering semantics. --- It take O(n), not very good. - --- Alternative: preserve original position and ignore new one. --- I am not sure which one is better. +-- If the element already exists, ignore it insert :: Hashable a => a -> OrderedSet a -> STM () insert a (que, s) = do (_, inserted) <- S.focus (Focus.testingIfInserts $ Focus.insert ()) a s -- if already in the set - -- update the position of the element in the queue - when (not inserted) $ do - items <- filter (==a) <$> flushTQueue que - mapM_ (writeTQueue que) items - return () - writeTQueue que a + when inserted $ writeTQueue que a newIO :: Hashable a => IO (OrderedSet a) newIO = do @@ -58,5 +48,5 @@ lookup a (_, s) = S.lookup a s delete :: Hashable a => a -> OrderedSet a -> STM () delete a (_, s) = S.delete a s -toHashSet :: Hashable a => OrderedSet a -> Data.HashSet a -toHashSet (_, s) = TreeSet.fromList $ LT.toList $ S.listT s +toHashSet :: Hashable a => OrderedSet a -> STM (Data.HashSet.HashSet a) +toHashSet (_, s) = Data.HashSet.fromList <$> LT.toList (S.listT s) From 71d65754ed8ff942c55b9d833985990949553eb1 Mon Sep 17 00:00:00 2001 From: soulomoon Date: Tue, 5 Aug 2025 17:17:15 +0800 Subject: [PATCH 63/87] refactor OrderedSet to use a record for better clarity and structure --- .../session-loader/Development/IDE/Session.hs | 1 - .../Development/IDE/Session/OrderedSet.hs | 20 ++++++++++--------- 2 files changed, 11 insertions(+), 10 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 6feb6325f2..5804ead632 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -56,7 +56,6 @@ import Development.IDE.Types.HscEnvEq (HscEnvEq) import Development.IDE.Types.Location import Development.IDE.Types.Options import qualified HIE.Bios as HieBios -import qualified HIE.Bios.Cradle.Utils as HieBios import HIE.Bios.Environment hiding (getCacheDir) import HIE.Bios.Types hiding (Log) import qualified HIE.Bios.Types as HieBios diff --git a/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs b/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs index a66e89f84d..630f1dc4fc 100644 --- a/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs +++ b/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs @@ -1,7 +1,6 @@ module Development.IDE.Session.OrderedSet where -import Control.Concurrent.STM (STM, TQueue, flushTQueue, - newTQueueIO) +import Control.Concurrent.STM (STM, TQueue, newTQueueIO) import Control.Concurrent.STM.TQueue (readTQueue, writeTQueue) import Control.Monad (when) import Data.Hashable (Hashable) @@ -12,13 +11,16 @@ import qualified StmContainers.Set as S import StmContainers.Set (Set) -type OrderedSet a = (TQueue a, Set a) +data OrderedSet a = OrderedSet + { insertionOrder :: TQueue a + , elements :: Set a + } -- | Insert an element into the ordered set. -- If the element is not already present, it is added to both the queue and set. -- If the element already exists, ignore it insert :: Hashable a => a -> OrderedSet a -> STM () -insert a (que, s) = do +insert a (OrderedSet que s) = do (_, inserted) <- S.focus (Focus.testingIfInserts $ Focus.insert ()) a s -- if already in the set when inserted $ writeTQueue que a @@ -27,26 +29,26 @@ newIO :: Hashable a => IO (OrderedSet a) newIO = do que <- newTQueueIO s <- S.newIO - return (que, s) + return (OrderedSet que s) -- | Read the first element from the queue. -- If an element is not in the set, it means it has been deleted, -- so we retry until we find a valid element that exists in the set. readQueue :: Hashable a => OrderedSet a -> STM a -readQueue rs@(que, s) = do +readQueue rs@(OrderedSet que s) = do f <- readTQueue que b <- S.lookup f s -- retry if no files are left in the queue if b then return f else readQueue rs lookup :: Hashable a => a -> OrderedSet a -> STM Bool -lookup a (_, s) = S.lookup a s +lookup a (OrderedSet _ s) = S.lookup a s -- | Delete an element from the set. -- The queue is not modified directly; stale entries are filtered out lazily -- during reading operations (see 'readQueue'). delete :: Hashable a => a -> OrderedSet a -> STM () -delete a (_, s) = S.delete a s +delete a (OrderedSet _ s) = S.delete a s toHashSet :: Hashable a => OrderedSet a -> STM (Data.HashSet.HashSet a) -toHashSet (_, s) = Data.HashSet.fromList <$> LT.toList (S.listT s) +toHashSet (OrderedSet _ s) = Data.HashSet.fromList <$> LT.toList (S.listT s) From dd6b562976f7998200dd143939b844143417035c Mon Sep 17 00:00:00 2001 From: soulomoon Date: Mon, 11 Aug 2025 21:22:24 +0800 Subject: [PATCH 64/87] merge master in --- ghcide/session-loader/Development/IDE/Session.hs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 5804ead632..b1f0b63d21 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -832,7 +832,7 @@ packageSetup recorder sessionState newEmptyHscEnv (hieYaml, cfp, opts) = do hscEnv <- newEmptyHscEnv newTargetDfs <- liftIO $ evalGhcEnv hscEnv $ setOptions haddockparse cfp opts (hsc_dflags hscEnv) rootDir let deps = componentDependencies opts ++ maybeToList hieYaml - dep_info <- liftIO $ getDependencyInfo deps + dep_info <- liftIO $ getDependencyInfo (fmap (toAbsolute rootDir) deps) -- Now lookup to see whether we are combining with an existing HscEnv -- or making a new one. The lookup returns the HscEnv and a list of -- information about other components loaded into the HscEnv From 24738d61deddc0853f89b9fab23eb1e03a7b5460 Mon Sep 17 00:00:00 2001 From: soulomoon Date: Wed, 20 Aug 2025 00:43:14 +0800 Subject: [PATCH 65/87] session: move handleBatchLoadSuccess inside restartSession to avoid stale GhcSession results and lost diagnostics --- .../session-loader/Development/IDE/Session.hs | 48 +++++++++---------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index b1f0b63d21..678acb13f4 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -817,10 +817,30 @@ session recorder sessionShake sessionState knownTargetsVar(hieYaml, cfp, opts, l let new_cache = newComponentCache (cmapWithPrio LogSessionGhc recorder) (optExtensions ideOptions) cfp hscEnv all_target_details <- liftIO $ new_cache old_deps new_deps (all_targets, this_flags_map) <- liftIO $ addErrorTargetIfUnknown all_target_details hieYaml cfp - - liftIO $ handleBatchLoadSuccess recorder sessionState hieYaml this_flags_map all_targets - -- Typecheck all files in the project on startup - liftIO $ loadKnownTargets recorder sessionShake (optCheckProject ideOptions) knownTargetsVar new_deps all_targets + -- The VFS doesn't change on cradle edits, re-use the old one. + -- Invalidate all the existing GhcSession build nodes by restarting the Shake session + liftIO $ do + checkProject <- optCheckProject ideOptions + restartSession sessionShake VFSUnmodified "new component" [] $ do + -- It is necessary to call handleBatchLoadSuccess in restartSession + -- to ensure the GhcSession rule does not return before a new session is started. + -- Otherwise, invalid compilation results may propagate to downstream rules, + -- potentially resulting in lost diagnostics and other issues. + handleBatchLoadSuccess recorder sessionState hieYaml this_flags_map all_targets + keys2 <- invalidateCache sessionShake + keys1 <- extendKnownTargets recorder knownTargetsVar all_targets + -- Typecheck all files in the project on startup + unless (null new_deps || not checkProject) $ do + cfps' <- liftIO $ filterM (IO.doesFileExist . fromNormalizedFilePath) (concatMap targetLocations all_targets) + void $ enqueueActions sessionShake $ mkDelayedAction "InitialLoad" Debug $ void $ do + mmt <- uses GetModificationTime cfps' + let cs_exist = catMaybes (zipWith (<$) cfps' mmt) + modIfaces <- uses GetModIface cs_exist + -- update exports map + shakeExtras <- getShakeExtras + let !exportsMap' = createExportsMap $ mapMaybe (fmap hirModIface) modIfaces + liftIO $ atomically $ modifyTVar' (exportsMap shakeExtras) (exportsMap' <>) + return [keys1, keys2] -- | Create a new HscEnv from a hieYaml root and a set of options packageSetup :: Recorder (WithPriority Log) -> SessionState -> SessionM HscEnv -> (Maybe FilePath, NormalizedFilePath, ComponentOptions) -> SessionM ([ComponentInfo], [ComponentInfo]) @@ -897,26 +917,6 @@ extendKnownTargets recorder knownTargetsVar newTargets = do logWith recorder Debug $ LogKnownFilesUpdated (targetMap x) return $ toNoFileKey GetKnownTargets -loadKnownTargets :: Recorder (WithPriority Log) -> SessionShake -> IO Bool -> TVar (Hashed KnownTargets) -> [ComponentInfo] -> [TargetDetails] -> IO () -loadKnownTargets recorder sessionShake getCheckProject knownTargetsVar new_deps targets = do - checkProject <- getCheckProject - - -- The VFS doesn't change on cradle edits, re-use the old one. - -- Invalidate all the existing GhcSession build nodes by restarting the Shake session - restartSession sessionShake VFSUnmodified "new component" [] $ do - keys2 <- invalidateCache sessionShake - keys1 <- extendKnownTargets recorder knownTargetsVar targets - unless (null new_deps || not checkProject) $ do - cfps' <- liftIO $ filterM (IO.doesFileExist . fromNormalizedFilePath) (concatMap targetLocations targets) - void $ enqueueActions sessionShake $ mkDelayedAction "InitialLoad" Debug $ void $ do - mmt <- uses GetModificationTime cfps' - let cs_exist = catMaybes (zipWith (<$) cfps' mmt) - modIfaces <- uses GetModIface cs_exist - -- update exports map - shakeExtras <- getShakeExtras - let !exportsMap' = createExportsMap $ mapMaybe (fmap hirModIface) modIfaces - liftIO $ atomically $ modifyTVar' (exportsMap shakeExtras) (exportsMap' <>) - return [keys1, keys2] loadCradleWithNotifications :: Recorder (WithPriority Log) -> From b1237d0a244efafc083106f6229b05e711be7fb7 Mon Sep 17 00:00:00 2001 From: soulomoon Date: Thu, 21 Aug 2025 17:14:03 +0800 Subject: [PATCH 66/87] debug --- .github/workflows/test.yml | 2 +- ghcide/src/Development/IDE/Core/Compile.hs | 3 +- ghcide/src/Development/IDE/Core/FileStore.hs | 10 +- ghcide/src/Development/IDE/Core/Shake.hs | 155 +++++++++--------- ghcide/src/Development/IDE/Plugin/Test.hs | 2 +- .../IDE/Graph/Internal/Database.hs | 20 ++- .../Development/IDE/Graph/Internal/Types.hs | 12 +- log copy.txt | 139 ++++++++++++++++ log.txt | 111 +++++++++++++ run_progress_test.sh | 22 +++ 10 files changed, 383 insertions(+), 93 deletions(-) create mode 100644 log copy.txt create mode 100644 log.txt create mode 100644 run_progress_test.sh diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 984758a310..1a9dde30fd 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -114,7 +114,7 @@ jobs: - if: needs.pre_job.outputs.should_skip_ghcide != 'true' && matrix.test name: Test ghcide # run the tests without parallelism to avoid running out of memory - run: cabal test ghcide-tests || cabal test ghcide-tests + run: cabal test ghcide-tests - if: matrix.test name: Test hls-plugin-api diff --git a/ghcide/src/Development/IDE/Core/Compile.hs b/ghcide/src/Development/IDE/Core/Compile.hs index 48439e2ff3..1e77a4c2f3 100644 --- a/ghcide/src/Development/IDE/Core/Compile.hs +++ b/ghcide/src/Development/IDE/Core/Compile.hs @@ -793,7 +793,8 @@ atomicFileWrite se targetPath write = do let dir = takeDirectory targetPath createDirectoryIfMissing True dir (tempFilePath, cleanUp) <- newTempFileWithin dir - (write tempFilePath >>= \x -> renameFile tempFilePath targetPath >> atomically (resetInterfaceStore se (toNormalizedFilePath' targetPath)) >> pure x) + (write tempFilePath >>= \x -> renameFile tempFilePath targetPath >> + atomically (resetInterfaceStore se (toNormalizedFilePath' targetPath)) >> pure x) `onException` cleanUp generateHieAsts :: HscEnv -> TcModuleResult diff --git a/ghcide/src/Development/IDE/Core/FileStore.hs b/ghcide/src/Development/IDE/Core/FileStore.hs index e545ec7b14..eb7b459d93 100644 --- a/ghcide/src/Development/IDE/Core/FileStore.hs +++ b/ghcide/src/Development/IDE/Core/FileStore.hs @@ -82,6 +82,7 @@ data Log = LogCouldNotIdentifyReverseDeps !NormalizedFilePath | LogTypeCheckingReverseDeps !NormalizedFilePath !(Maybe [NormalizedFilePath]) | LogShake Shake.Log + | LogGetModificationTime !NormalizedFilePath deriving Show instance Pretty Log where @@ -94,6 +95,8 @@ instance Pretty Log where <> ":" <+> pretty (fmap (fmap show) reverseDepPaths) LogShake msg -> pretty msg + LogGetModificationTime path -> + "Getting modification time for" <+> viaShow path addWatchedFileRule :: Recorder (WithPriority Log) -> (NormalizedFilePath -> Action Bool) -> Rules () addWatchedFileRule recorder isWatched = defineNoDiagnostics (cmapWithPrio LogShake recorder) $ \AddWatchedFile f -> do @@ -109,7 +112,8 @@ addWatchedFileRule recorder isWatched = defineNoDiagnostics (cmapWithPrio LogSha getModificationTimeRule :: Recorder (WithPriority Log) -> Rules () -getModificationTimeRule recorder = defineEarlyCutoff (cmapWithPrio LogShake recorder) $ Rule $ \(GetModificationTime_ missingFileDiags) file -> +getModificationTimeRule recorder = defineEarlyCutoff (cmapWithPrio LogShake recorder) $ Rule $ \(GetModificationTime_ missingFileDiags) file -> do + logWith recorder Info $ LogGetModificationTime file getModificationTimeImpl missingFileDiags file getModificationTimeImpl @@ -279,11 +283,9 @@ setFileModified recorder vfs state saved nfp actionBefore = do AlwaysCheck -> True CheckOnSave -> saved _ -> False - restartShakeSession (shakeExtras state) vfs (fromNormalizedFilePath nfp ++ " (modified)") [] $ do + restartShakeSession (shakeExtras state) vfs (fromNormalizedFilePath nfp ++ " (modified)") ([mkDelayedAction "ParentTC" L.Debug (typecheckParentsAction recorder nfp) | checkParents]) $ do keys<-actionBefore return (toKey GetModificationTime nfp:keys) - when checkParents $ - typecheckParents recorder state nfp typecheckParents :: Recorder (WithPriority Log) -> IdeState -> NormalizedFilePath -> IO () typecheckParents recorder state nfp = void $ shakeEnqueue (shakeExtras state) parents diff --git a/ghcide/src/Development/IDE/Core/Shake.hs b/ghcide/src/Development/IDE/Core/Shake.hs index 6fc9a4d00e..60bfd6f165 100644 --- a/ghcide/src/Development/IDE/Core/Shake.hs +++ b/ghcide/src/Development/IDE/Core/Shake.hs @@ -130,6 +130,7 @@ import Development.IDE.Types.Options as Options import qualified Language.LSP.Protocol.Message as LSP import qualified Language.LSP.Server as LSP +import Control.Concurrent (threadDelay) import Development.IDE.Core.Tracing import Development.IDE.Core.WorkerThread import Development.IDE.GHC.Compat (NameCache, @@ -183,7 +184,7 @@ import UnliftIO (MonadUnliftIO (withRunI data Log = LogCreateHieDbExportsMapStart | LogCreateHieDbExportsMapFinish !Int - | LogBuildSessionRestart !String ![DelayedActionInternal] !KeySet !Seconds !(Maybe FilePath) + | LogBuildSessionRestart !String ![DelayedActionInternal] ![Key] !Seconds !(Maybe FilePath) | LogBuildSessionRestartTakingTooLong !Seconds | LogDelayedAction !(DelayedAction ()) !Seconds | LogBuildSessionFinish !(Maybe SomeException) @@ -208,7 +209,7 @@ instance Pretty Log where vcat [ "Restarting build session due to" <+> pretty reason , "Action Queue:" <+> pretty (map actionName actionQueue) - , "Keys:" <+> pretty (map show $ toListKeySet keyBackLog) + , "Keys:" <+> pretty (map show keyBackLog) , "Aborting previous build session took" <+> pretty (showDuration abortDuration) <+> pretty shakeProfilePath ] LogBuildSessionRestartTakingTooLong seconds -> "Build restart is taking too long (" <> pretty seconds <> " seconds)" @@ -279,7 +280,7 @@ data ShakeExtras = ShakeExtras ,globals :: TVar (HMap.HashMap TypeRep Dynamic) -- ^ Registry of global state used by rules. -- Small and immutable after startup, so not worth using an STM.Map. - ,state :: Values + ,stateValues :: Values ,diagnostics :: STMDiagnosticStore ,hiddenDiagnostics :: STMDiagnosticStore ,publishedDiagnostics :: STM.Map NormalizedUri [FileDiagnostic] @@ -328,8 +329,6 @@ data ShakeExtras = ShakeExtras -- We don't need a STM.Map because we never update individual keys ourselves. , defaultConfig :: Config -- ^ Default HLS config, only relevant if the client does not provide any Config - , dirtyKeys :: TVar KeySet - -- ^ Set of dirty rule keys since the last Shake run , restartQueue :: TQueue (IO ()) -- ^ Queue of restart actions to be run. , loaderQueue :: TQueue (IO ()) @@ -452,7 +451,7 @@ getIdeOptionsIO ide = do -- | Return the most recent, potentially stale, value and a PositionMapping -- for the version of that value. lastValueIO :: IdeRule k v => ShakeExtras -> k -> NormalizedFilePath -> IO (Maybe (v, PositionMapping)) -lastValueIO s@ShakeExtras{positionMapping,persistentKeys,state} k file = do +lastValueIO s@ShakeExtras{positionMapping,persistentKeys,stateValues} k file = do let readPersistent | IdeTesting testing <- ideTesting s -- Don't read stale persistent values in tests @@ -466,7 +465,7 @@ lastValueIO s@ShakeExtras{positionMapping,persistentKeys,state} k file = do MaybeT $ pure $ (,del,ver) <$> fromDynamic dv case mv of Nothing -> atomicallyNamed "lastValueIO 1" $ do - STM.focus (Focus.alter (alterValue $ Failed True)) (toKey k file) state + STM.focus (Focus.alter (alterValue $ Failed True)) (toKey k file) stateValues return Nothing Just (v,del,mbVer) -> do actual_version <- case mbVer of @@ -474,7 +473,7 @@ lastValueIO s@ShakeExtras{positionMapping,persistentKeys,state} k file = do Nothing -> (Just . ModificationTime <$> getModTime (fromNormalizedFilePath file)) `catch` (\(_ :: IOException) -> pure Nothing) atomicallyNamed "lastValueIO 2" $ do - STM.focus (Focus.alter (alterValue $ Stale (Just del) actual_version (toDyn v))) (toKey k file) state + STM.focus (Focus.alter (alterValue $ Stale (Just del) actual_version (toDyn v))) (toKey k file) stateValues Just . (v,) . addOldDelta del <$> mappingForVersion positionMapping file actual_version -- We got a new stale value from the persistent rule, insert it in the map without affecting diagnostics @@ -485,7 +484,7 @@ lastValueIO s@ShakeExtras{positionMapping,persistentKeys,state} k file = do -- Something already succeeded before, leave it alone _ -> old - atomicallyNamed "lastValueIO 4" (STM.lookup (toKey k file) state) >>= \case + atomicallyNamed "lastValueIO 4" (STM.lookup (toKey k file) stateValues) >>= \case Nothing -> readPersistent Just (ValueWithDiagnostics value _) -> case value of Succeeded ver (fromDynamic -> Just v) -> @@ -599,8 +598,8 @@ deleteValue -> k -> NormalizedFilePath -> STM [Key] -deleteValue ShakeExtras{state} key file = do - STM.delete (toKey key file) state +deleteValue ShakeExtras{stateValues} key file = do + STM.delete (toKey key file) stateValues return [toKey key file] @@ -668,7 +667,7 @@ shakeOpen recorder lspEnv defaultConfig idePlugins debouncer ideNc <- initNameCache 'r' knownKeyNames shakeExtras <- do globals <- newTVarIO HMap.empty - state <- STM.newIO + stateValues <- STM.newIO diagnostics <- STM.newIO hiddenDiagnostics <- STM.newIO publishedDiagnostics <- STM.newIO @@ -701,7 +700,6 @@ shakeOpen recorder lspEnv defaultConfig idePlugins debouncer actionQueue <- newQueue let clientCapabilities = maybe def LSP.resClientCapabilities lspEnv - dirtyKeys <- newTVarIO mempty -- Take one VFS snapshot at the start vfsVar <- newTVarIO =<< vfsSnapshot lspEnv pure ShakeExtras{shakeRecorder = recorder, ..} @@ -721,7 +719,7 @@ shakeOpen recorder lspEnv defaultConfig idePlugins debouncer -- monitoring let readValuesCounter = fromIntegral . countRelevantKeys checkParents <$> getStateKeys shakeExtras - readDirtyKeys = fromIntegral . countRelevantKeys checkParents . toListKeySet <$> readTVarIO(dirtyKeys shakeExtras) + readDirtyKeys = return 0 readIndexPending = fromIntegral . HMap.size <$> readTVarIO (indexPending $ hiedbWriter shakeExtras) readExportsMap = fromIntegral . ExportsMap.exportsMapSize <$> readTVarIO (exportsMap shakeExtras) readDatabaseCount = fromIntegral . countRelevantKeys checkParents . map fst <$> shakeGetDatabaseKeys shakeDb @@ -741,7 +739,7 @@ shakeOpen recorder lspEnv defaultConfig idePlugins debouncer getStateKeys :: ShakeExtras -> IO [Key] -getStateKeys = (fmap.fmap) fst . atomically . ListT.toList . STM.listT . state +getStateKeys = (fmap.fmap) fst . atomically . ListT.toList . STM.listT . stateValues -- | Must be called in the 'Initialized' handler and only once shakeSessionInit :: Recorder (WithPriority Log) -> IdeState -> IO () @@ -749,7 +747,7 @@ shakeSessionInit recorder IdeState{..} = do -- Take a snapshot of the VFS - it should be empty as we've received no notifications -- till now, but it can't hurt to be in sync with the `lsp` library. vfs <- vfsSnapshot (lspEnv shakeExtras) - initSession <- newSession recorder shakeExtras (VFSModified vfs) shakeDb [] "shakeSessionInit" + initSession <- newSession recorder shakeExtras (VFSModified vfs) shakeDb [] "shakeSessionInit" Nothing putMVar shakeSession initSession logWith recorder Debug LogSessionInitialised @@ -798,22 +796,22 @@ shakeRestart recorder IdeState{..} vfs reason acts ioActionBetweenShakeSession = shakeSession (\runner -> do (stopTime,()) <- duration $ logErrorAfter 10 $ cancelShakeSession runner - keys <- ioActionBetweenShakeSession + newDirtyKeys <- ioActionBetweenShakeSession -- it is every important to update the dirty keys after we enter the critical section -- see Note [Housekeeping rule cache and dirty key outside of hls-graph] - atomically $ modifyTVar' (dirtyKeys shakeExtras) $ \x -> foldl' (flip insertKeySet) x keys + -- atomically $ modifyTVar' (dirtyKeys shakeExtras) $ \x -> foldl' (flip insertKeySet) x keys res <- shakeDatabaseProfile shakeDb - backlog <- readTVarIO $ dirtyKeys shakeExtras queue <- atomicallyNamed "actionQueue - peek" $ peekInProgress $ actionQueue shakeExtras -- this log is required by tests - logWith recorder Debug $ LogBuildSessionRestart reason queue backlog stopTime res + logWith recorder Debug $ LogBuildSessionRestart reason queue newDirtyKeys stopTime res + return newDirtyKeys ) -- It is crucial to be masked here, otherwise we can get killed -- between spawning the new thread and updating shakeSession. -- See https://github.com/haskell/ghcide/issues/79 - (\() -> do - (,()) <$> newSession recorder shakeExtras vfs shakeDb acts reason) + (\newDirtyKeys -> do + (,()) <$> newSession recorder shakeExtras vfs shakeDb acts reason (Just newDirtyKeys)) where logErrorAfter :: Seconds -> IO () -> IO () logErrorAfter seconds action = flip withAsync (const action) $ do @@ -846,15 +844,15 @@ data VFSModified = VFSUnmodified | VFSModified !VFS -- | Set up a new 'ShakeSession' with a set of initial actions -- Will crash if there is an existing 'ShakeSession' running. -newSession - :: Recorder (WithPriority Log) - -> ShakeExtras - -> VFSModified - -> ShakeDatabase - -> [DelayedActionInternal] - -> String - -> IO ShakeSession -newSession recorder extras@ShakeExtras{..} vfsMod shakeDb acts reason = do +-- newSession +-- :: Recorder (WithPriority Log) +-- -> ShakeExtras +-- -> VFSModified +-- -> ShakeDatabase +-- -> [DelayedActionInternal] +-- -> String +-- -> IO ShakeSession +newSession recorder extras@ShakeExtras{..} vfsMod shakeDb acts reason newDirtyKeys = do -- Take a new VFS snapshot case vfsMod of @@ -863,16 +861,27 @@ newSession recorder extras@ShakeExtras{..} vfsMod shakeDb acts reason = do IdeOptions{optRunSubset} <- getIdeOptionsIO extras reenqueued <- atomicallyNamed "actionQueue - peek" $ peekInProgress actionQueue - allPendingKeys <- - if optRunSubset - then Just <$> readTVarIO dirtyKeys - else return Nothing let -- A daemon-like action used to inject additional work -- Runs actions from the work queue sequentially pumpActionThread otSpan = do d <- liftIO $ atomicallyNamed "action queue - pop" $ popQueue actionQueue actionFork (run otSpan d) $ \_ -> pumpActionThread otSpan + -- + -- garbageCollect = do + -- previousNumber <- countQueue actionQueue + -- liftIO $ threadDelay 2_000_000 + -- currentNumber <- countQueue actionQueue + -- if previousNumber + currentNumber == 0 + -- then do + -- logWith recorder Debug LogGarbageCollectingActionQueue + -- -- If the queue is empty, we can garbage collect it + -- -- This will remove all actions that are not running + -- atomicallyNamed "actionQueue - garbage collect" $ garbageCollectQueue actionQueue + -- else do + -- logWith recorder Debug LogGarbageCollectingActionQueueSkipped + + -- TODO figure out how to thread the otSpan into defineEarlyCutoff run _otSpan d = do @@ -887,10 +896,10 @@ newSession recorder extras@ShakeExtras{..} vfsMod shakeDb acts reason = do workRun restore = withSpan "Shake session" $ \otSpan -> do setTag otSpan "reason" (fromString reason) setTag otSpan "queue" (fromString $ unlines $ map actionName reenqueued) - whenJust allPendingKeys $ \kk -> setTag otSpan "keys" (BS8.pack $ unlines $ map show $ toListKeySet kk) + -- whenJust allPendingKeys $ \kk -> setTag otSpan "keys" (BS8.pack $ unlines $ map show $ toListKeySet kk) let keysActs = pumpActionThread otSpan : map (run otSpan) (reenqueued ++ acts) res <- try @SomeException $ - restore $ shakeRunDatabaseForKeys (toListKeySet <$> allPendingKeys) shakeDb keysActs + restore $ shakeRunDatabaseForKeys (if optRunSubset then newDirtyKeys else Nothing) shakeDb keysActs return $ do let exception = case res of @@ -953,35 +962,36 @@ garbageCollectDirtyKeys = do garbageCollectDirtyKeysOlderThan :: Int -> CheckParents -> Action [Key] garbageCollectDirtyKeysOlderThan maxAge checkParents = otTracedGarbageCollection "dirty GC" $ do - dirtySet <- getDirtySet - garbageCollectKeys "dirty GC" maxAge checkParents dirtySet - -garbageCollectKeys :: String -> Int -> CheckParents -> [(Key, Int)] -> Action [Key] -garbageCollectKeys label maxAge checkParents agedKeys = do - start <- liftIO offsetTime - ShakeExtras{state, dirtyKeys, lspEnv, shakeRecorder, ideTesting} <- getShakeExtras - (n::Int, garbage) <- liftIO $ - foldM (removeDirtyKey dirtyKeys state) (0,[]) agedKeys - t <- liftIO start - when (n>0) $ liftIO $ do - logWith shakeRecorder Debug $ LogShakeGarbageCollection (T.pack label) n t - when (coerce ideTesting) $ liftIO $ mRunLspT lspEnv $ - LSP.sendNotification (SMethod_CustomMethod (Proxy @"ghcide/GC")) - (toJSON $ mapMaybe (fmap showKey . fromKeyType) garbage) - return garbage - - where - showKey = show . Q - removeDirtyKey dk values st@(!counter, keys) (k, age) - | age > maxAge - , Just (kt,_) <- fromKeyType k - , not(kt `HSet.member` preservedKeys checkParents) - = atomicallyNamed "GC" $ do - gotIt <- STM.focus (Focus.member <* Focus.delete) k values - when gotIt $ - modifyTVar' dk (insertKeySet k) - return $ if gotIt then (counter+1, k:keys) else st - | otherwise = pure st + return [] + -- dirtySet <- getDirtySet + -- garbageCollectKeys "dirty GC" maxAge checkParents dirtySet + +-- garbageCollectKeys :: String -> Int -> CheckParents -> [(Key, Int)] -> Action [Key] +-- garbageCollectKeys label maxAge checkParents agedKeys = do +-- start <- liftIO offsetTime +-- ShakeExtras{state, lspEnv, shakeRecorder, ideTesting} <- getShakeExtras +-- (n::Int, garbage) <- liftIO $ +-- foldM (removeDirtyKey dirtyKeys state) (0,[]) agedKeys +-- t <- liftIO start +-- when (n>0) $ liftIO $ do +-- logWith shakeRecorder Debug $ LogShakeGarbageCollection (T.pack label) n t +-- when (coerce ideTesting) $ liftIO $ mRunLspT lspEnv $ +-- LSP.sendNotification (SMethod_CustomMethod (Proxy @"ghcide/GC")) +-- (toJSON $ mapMaybe (fmap showKey . fromKeyType) garbage) +-- return garbage + +-- where +-- showKey = show . Q +-- removeDirtyKey dk values st@(!counter, keys) (k, age) +-- | age > maxAge +-- , Just (kt,_) <- fromKeyType k +-- , not(kt `HSet.member` preservedKeys checkParents) +-- = atomicallyNamed "GC" $ do +-- gotIt <- STM.focus (Focus.member <* Focus.delete) k values +-- when gotIt $ +-- modifyTVar' dk (insertKeySet k) +-- return $ if gotIt then (counter+1, k:keys) else st +-- | otherwise = pure st countRelevantKeys :: CheckParents -> [Key] -> Int countRelevantKeys checkParents = @@ -1090,8 +1100,8 @@ useWithStaleFast' key file = do -- keep updating the value in the key. waitValue <- delayedAction $ mkDelayedAction ("C:" ++ show key ++ ":" ++ fromNormalizedFilePath file) Debug $ use key file - s@ShakeExtras{state} <- askShake - r <- liftIO $ atomicallyNamed "useStateFast" $ getValues state key file + s@ShakeExtras{stateValues} <- askShake + r <- liftIO $ atomicallyNamed "useStateFast" $ getValues stateValues key file liftIO $ case r of -- block for the result if we haven't computed before Nothing -> do @@ -1235,13 +1245,13 @@ defineEarlyCutoff' -> (Value v -> Action (Maybe BS.ByteString, IdeResult v)) -> Action (RunResult (A (RuleResult k))) defineEarlyCutoff' doDiagnostics cmp key file mbOld mode action = do - ShakeExtras{state, progress, dirtyKeys} <- getShakeExtras + ShakeExtras{stateValues, progress} <- getShakeExtras options <- getIdeOptions let trans g x = withRunInIO $ \run -> g (run x) (if optSkipProgress options key then id else trans (inProgress progress file)) $ do val <- case mbOld of Just old | mode == RunDependenciesSame -> do - mbValue <- liftIO $ atomicallyNamed "define - read 1" $ getValues state key file + mbValue <- liftIO $ atomicallyNamed "define - read 1" $ getValues stateValues key file case mbValue of -- No changes in the dependencies and we have -- an existing successful result. @@ -1257,7 +1267,7 @@ defineEarlyCutoff' doDiagnostics cmp key file mbOld mode action = do res <- case val of Just res -> return res Nothing -> do - staleV <- liftIO $ atomicallyNamed "define -read 3" $ getValues state key file <&> \case + staleV <- liftIO $ atomicallyNamed "define -read 3" $ getValues stateValues key file <&> \case Nothing -> Failed False Just (Succeeded ver v, _) -> Stale Nothing ver v Just (Stale d ver v, _) -> Stale d ver v @@ -1285,8 +1295,7 @@ defineEarlyCutoff' doDiagnostics cmp key file mbOld mode action = do (A res) $ do -- this hook needs to be run in the same transaction as the key is marked clean -- see Note [Housekeeping rule cache and dirty key outside of hls-graph] - setValues state key file res (Vector.fromList diags) - modifyTVar' dirtyKeys (deleteKeySet $ toKey key file) + setValues stateValues key file res (Vector.fromList diags) return res where -- Highly unsafe helper to compute the version of a file diff --git a/ghcide/src/Development/IDE/Plugin/Test.hs b/ghcide/src/Development/IDE/Plugin/Test.hs index e24bcfeee9..d99f61ddfd 100644 --- a/ghcide/src/Development/IDE/Plugin/Test.hs +++ b/ghcide/src/Development/IDE/Plugin/Test.hs @@ -130,7 +130,7 @@ testRequestHandler s (GarbageCollectDirtyKeys parents age) = do res <- liftIO $ runAction "garbage collect dirty" s $ garbageCollectDirtyKeysOlderThan age parents return $ Right $ toJSON $ map show res testRequestHandler s GetStoredKeys = do - keys <- liftIO $ atomically $ map fst <$> ListT.toList (STM.listT $ state $ shakeExtras s) + keys <- liftIO $ atomically $ map fst <$> ListT.toList (STM.listT $ stateValues $ shakeExtras s) return $ Right $ toJSON $ map show keys testRequestHandler s GetFilesOfInterest = do ff <- liftIO $ getFilesOfInterest s diff --git a/hls-graph/src/Development/IDE/Graph/Internal/Database.hs b/hls-graph/src/Development/IDE/Graph/Internal/Database.hs index 359e5ceb6a..afe985bd88 100644 --- a/hls-graph/src/Development/IDE/Graph/Internal/Database.hs +++ b/hls-graph/src/Development/IDE/Graph/Internal/Database.hs @@ -39,6 +39,7 @@ import Development.IDE.Graph.Internal.Types import qualified Focus import qualified ListT import qualified StmContainers.Map as SMap +import qualified StmContainers.Set as SSet import System.IO.Unsafe import System.Time.Extra (duration, sleep) @@ -53,6 +54,7 @@ newDatabase :: Dynamic -> TheRules -> IO Database newDatabase databaseExtra databaseRules = do databaseStep <- newTVarIO $ Step 0 databaseValues <- atomically SMap.new + databaseDirtyKeys <- atomically SSet.new pure Database{..} -- | Increment the step and mark dirty. @@ -60,13 +62,12 @@ newDatabase databaseExtra databaseRules = do incDatabase :: Database -> Maybe [Key] -> IO () -- only some keys are dirty incDatabase db (Just kk) = do - atomicallyNamed "incDatabase" $ modifyTVar' (databaseStep db) $ \(Step i) -> Step $ i + 1 - transitiveDirtyKeys <- transitiveDirtySet db kk - for_ (toListKeySet transitiveDirtyKeys) $ \k -> - -- Updating all the keys atomically is not necessary - -- since we assume that no build is mutating the db. - -- Therefore run one transaction per key to minimise contention. - atomicallyNamed "incDatabase" $ SMap.focus updateDirty k (databaseValues db) + atomicallyNamed "incDatabase" $ do + modifyTVar' (databaseStep db) $ \(Step i) -> Step $ i + 1 + for_ kk $ \k -> SSet.insert k (databaseDirtyKeys db) + keys <- ListT.toList $ SSet.listT (databaseDirtyKeys db) + transitiveDirtyKeys <- transitiveDirtySet db keys + for_ (toListKeySet transitiveDirtyKeys) $ \k -> SMap.focus updateDirty k (databaseValues db) -- all keys are dirty incDatabase db Nothing = do @@ -220,6 +221,7 @@ compute db@Database{..} stack key mode result = do atomicallyNamed "compute and run hook" $ do runHook SMap.focus (updateStatus $ Clean res) key databaseValues + SSet.delete key databaseDirtyKeys pure res updateStatus :: Monad m => Status -> Focus.Focus KeyDetails m () @@ -286,14 +288,14 @@ updateReverseDeps myId db prev new = do getReverseDependencies :: Database -> Key -> STM (Maybe KeySet) getReverseDependencies db = (fmap.fmap) keyReverseDeps . flip SMap.lookup (databaseValues db) -transitiveDirtySet :: Foldable t => Database -> t Key -> IO KeySet +transitiveDirtySet :: Foldable t => Database -> t Key -> STM KeySet transitiveDirtySet database = flip State.execStateT mempty . traverse_ loop where loop x = do seen <- State.get if x `memberKeySet` seen then pure () else do State.put (insertKeySet x seen) - next <- lift $ atomically $ getReverseDependencies database x + next <- lift $ getReverseDependencies database x traverse_ loop (maybe mempty toListKeySet next) -------------------------------------------------------------------------------- diff --git a/hls-graph/src/Development/IDE/Graph/Internal/Types.hs b/hls-graph/src/Development/IDE/Graph/Internal/Types.hs index 34bed42391..cdf384f7f6 100644 --- a/hls-graph/src/Development/IDE/Graph/Internal/Types.hs +++ b/hls-graph/src/Development/IDE/Graph/Internal/Types.hs @@ -27,6 +27,7 @@ import GHC.Generics (Generic) import qualified ListT import qualified StmContainers.Map as SMap import StmContainers.Map (Map) +import StmContainers.Set (Set) import System.Time.Extra (Seconds) import UnliftIO (MonadUnliftIO) @@ -109,10 +110,13 @@ onKeyReverseDeps f it@KeyDetails{..} = it{keyReverseDeps = f keyReverseDeps} data Database = Database { - databaseExtra :: Dynamic, - databaseRules :: TheRules, - databaseStep :: !(TVar Step), - databaseValues :: !(Map Key KeyDetails) + databaseExtra :: Dynamic, + databaseRules :: TheRules, + databaseStep :: !(TVar Step), + databaseValues :: !(Map Key KeyDetails), + databaseDirtyKeys :: !(Set Key) + -- ^ The set of dirty keys, which are the keys that have been marked as dirty + -- by the client, it would be removed once the target key is marked as clean. } waitForDatabaseRunningKeys :: Database -> IO () diff --git a/log copy.txt b/log copy.txt new file mode 100644 index 0000000000..5da3744ff0 --- /dev/null +++ b/log copy.txt @@ -0,0 +1,139 @@ +Run #3 +ThreadId 6 ghcide + diagnostics +| 2025-08-1 Cancellation + edit header + GetHieAst: 9T14:55:44.590216Z | Info | Test Project located in directory: /private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-26398544736 +ThreadId 7 | 2025-08-19T14:55:44.591607Z | Info | Heap statistics are not enabled (RTS option -T is needed) +ThreadId 7 | 2025-08-19T14:55:44.594438Z | Info | Starting LSP server... + If you are seeing this in a terminal, you probably should have run WITHOUT the --lsp option! + PluginIds: [ test + , block-command + , ghcide-completions + , core + , ghcide-type-lenses + , LSPRecorderCallback + , ghcide-hover-and-symbols + , ghcide-core ] +ThreadId 16 | 2025-08-19T14:55:44.594799Z | Info | Starting server +ThreadId 16 | 2025-08-19T14:55:44.595197Z | Debug | LSP: set new config: { + "cabalFormattingProvider": "cabal-gild", + "checkParents": "CheckOnSave", + "checkProject": true, + "formattingProvider": "ormolu", + "maxCompletions": 40, + "plugin": { }, + "sessionLoading": "singleComponent" +} +ThreadId 16 | 2025-08-19T14:55:44.595437Z | Info | Registering IDE configuration: IdeConfiguration {workspaceFolders = fromList [NormalizedUri (-2250868254854792059) "file:///private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-26398544736"], clientSettings = hashed (Just (Object (fromList [("haskell",Object (fromList [("cabalFormattingProvider",String "cabal-gild"),("checkParents",String "CheckOnSave"),("checkProject",Bool True),("formattingProvider",String "ormolu"),("maxCompletions",Number 40.0),("plugin",Object (fromList [])),("sessionLoading",String "singleComponent")]))])))} +ThreadId 16 | 2025-08-19T14:55:44.603799Z | Info | Started LSP server in 0.01s +ThreadId 16 | 2025-08-19T14:55:44.603902Z | Debug | shouldRunSubset: True +ThreadId 24 | 2025-08-19T14:55:44.603975Z | Debug | Initializing exports map from hiedb +ThreadId 24 | 2025-08-19T14:55:44.604080Z | Debug | Done initializing exports map from hiedb. Size: 0 +ThreadId 20 | 2025-08-19T14:55:44.604735Z | Debug | Shake session initialized +ThreadId 16 | 2025-08-19T14:55:44.605403Z | Debug | VFS: opening file:///private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-26398544736/Foo.hs +ThreadId 16 | 2025-08-19T14:55:44.605775Z | Debug | LSP: set new config: { + "cabalFormattingProvider": "cabal-gild", + "checkParents": "CheckOnSave", + "checkProject": true, + "formattingProvider": "ormolu", + "maxCompletions": 40, + "plugin": { }, + "sessionLoading": "singleComponent" +} +ThreadId 26 | 2025-08-19T14:55:44.605883Z | Debug | Finished build session +AsyncCancelled +ThreadId 21 | 2025-08-19T14:55:44.605934Z | Debug | Configuration changed: Config {checkParents = CheckOnSave, checkProject = True, formattingProvider = "ormolu", cabalFormattingProvider = "cabal-gild", maxCompletions = 40, sessionLoading = PreferSingleComponentLoading, plugins = fromList []} +ThreadId 21 | 2025-08-19T14:55:44.606008Z | Debug | Restarting build session due to config change +Action Queue: [] +Keys: [GetClientSettings; ] +Aborting previous build session took 0.00s +ThreadId 30 | 2025-08-19T14:55:44.606131Z | Debug | Finished build session +AsyncCancelled +ThreadId 21 | 2025-08-19T14:55:44.606163Z | Debug | Set files of interst to + [ ( /private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-26398544736/Foo.hs + , Modified {firstOpen = True} ) ] +ThreadId 21 | 2025-08-19T14:55:44.606229Z | Debug | Restarting build session due to /private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-26398544736/Foo.hs (modified) +Action Queue: [] +Keys: [ GetClientSettings; + , GetModificationTime; /private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-26398544736/Foo.hs + , IsFileOfInterest; /private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-26398544736/Foo.hs ] +Aborting previous build session took 0.00s +ThreadId 20 | 2025-08-19T14:55:44.606351Z | Debug | Opened text document: file:///private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-26398544736/Foo.hs +ThreadId 39 | 2025-08-19T14:55:44.606579Z | Warning | Typechecking file start /private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-26398544736/Foo.hs +ThreadId 42 | 2025-08-19T14:55:44.606750Z | Debug | Looking up session cache for /private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-26398544736/Foo.hs +ThreadId 22 | 2025-08-19T14:55:44.606794Z | Debug | Loop: getOptions for /private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-26398544736/Foo.hs +ThreadId 22 | 2025-08-19T14:55:44.606904Z | Info | Cradle path: Foo.hs +ThreadId 22 | 2025-08-19T14:55:44.606952Z | Warning | No [cradle](https://github.com/mpickering/hie-bios#hie-bios) found for Foo.hs. +Proceeding with [implicit cradle](https://hackage.haskell.org/package/implicit-hie). +You should ignore this message, unless you see a 'Multi Cradle: No prefixes matched' error. +ThreadId 46 | 2025-08-19T14:55:44.620269Z | Debug | Cradle: Cradle{ cradleRootDir = "/private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-26398544736", cradleOptsProg = CradleAction: Default} +ThreadId 46 | 2025-08-19T14:55:44.620334Z | Info | invoking build tool to determine build flags (this may take some time depending on the cache) +ThreadId 46 | 2025-08-19T14:55:44.683118Z | Debug | ghc --print-libdir +ThreadId 46 | 2025-08-19T14:55:44.746399Z | Debug | ghc --numeric-version +ThreadId 22 | 2025-08-19T14:55:44.746594Z | Debug | Session loading result: Right (ComponentOptions {componentOptions = [], componentRoot = "/private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-26398544736", componentDependencies = []},"/Users/ares/.ghcup/ghc/9.12.2/lib/ghc-9.12.2/lib","9.12.2") +ThreadId 22 | 2025-08-19T14:55:44.751250Z | Info | Interface files cache directory: /var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/.cache/ghcide/main-da39a3ee5e6b4b0d3255bfef95601890afd80709-da39a3ee5e6b4b0d3255bfef95601890afd80709 +ThreadId 22 | 2025-08-19T14:55:44.761208Z | Info | Making new HscEnv. In-place unit ids: [ main-da39a3ee5e6b4b0d3255bfef95601890afd80709 ] +ThreadId 22 | 2025-08-19T14:55:44.766821Z | Debug | New component cache HscEnvEq: (([],Just HscEnvEq 5),fromList []) +ThreadId 22 | 2025-08-19T14:55:44.767014Z | Info | New loaded files: [ /private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-26398544736/Foo.hs ] +ThreadId 42 | 2025-08-19T14:55:44.767161Z | Debug | Looking up session cache for /private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-26398544736/Foo.hs +ThreadId 42 | 2025-08-19T14:55:44.767193Z | Info | Add dependency /private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-26398544736/Foo.hs +[] +ThreadId 71 | 2025-08-19T14:55:44.767277Z | Info | Modification time for "v1" +ThreadId 71 | 2025-08-19T14:55:44.767314Z | Info | Modification time for "v1.1" +ThreadId 33 | 2025-08-19T14:55:44.767455Z | Debug | Finished build session +AsyncCancelled +ThreadId 21 | 2025-08-19T14:55:44.767514Z | Debug | Known files updated: + fromList [(TargetFile NormalizedFilePath "/private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-26398544736/Foo.hs",fromList ["/private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-26398544736/Foo.hs"])] +ThreadId 21 | 2025-08-19T14:55:44.769748Z | Debug | Restarting build session due to new component +Action Queue: [ WaitForIdeRule TypeCheck Uri {getUri = "file:///private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-26398544736/Foo.hs"} ] +Keys: [ IsFileOfInterest; /private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-26398544736/Foo.hs + , GhcSessionIO; + , GetKnownTargets; ] +Aborting previous build session took 0.00s +ThreadId 83 | 2025-08-19T14:55:44.769932Z | Debug | Finished: InitialLoad Took: 0.00s +ThreadId 84 | 2025-08-19T14:55:44.769935Z | Warning | Typechecking file start /private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-26398544736/Foo.hs +ThreadId 87 | 2025-08-19T14:55:44.770101Z | Debug | Looking up session cache for /private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-26398544736/Foo.hs +ThreadId 87 | 2025-08-19T14:55:44.770141Z | Info | Add dependency /private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-26398544736/Foo.hs +[] +ThreadId 84 | 2025-08-19T14:55:44.779362Z | Warning | Typechecking file mid /private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-26398544736/Foo.hs +ThreadId 84 | 2025-08-19T14:55:44.787260Z | Warning | Typechecking file end /private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-26398544736/Foo.hs +ThreadId 81 | 2025-08-19T14:55:44.788775Z | Debug | Finished: WaitForIdeRule TypeCheck Uri {getUri = "file:///private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-26398544736/Foo.hs"} Took: 0.02s +ThreadId 16 | 2025-08-19T14:55:44.990428Z | Error | LSP: no handler for: "non-existent-method" +ThreadId 78 | 2025-08-19T14:55:44.992303Z | Debug | Finished build session +AsyncCancelled +ThreadId 21 | 2025-08-19T14:55:44.992398Z | Debug | Set files of interst to + [ ( /private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-26398544736/Foo.hs + , Modified {firstOpen = False} ) ] +ThreadId 21 | 2025-08-19T14:55:44.992559Z | Debug | Restarting build session due to /private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-26398544736/Foo.hs (modified) +Action Queue: [] +Keys: [ GetModificationTime; /private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-26398544736/Foo.hs + , IsFileOfInterest; /private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-26398544736/Foo.hs ] +Aborting previous build session took 0.00s +ThreadId 20 | 2025-08-19T14:55:44.992780Z | Debug | Modified text document: file:///private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-26398544736/Foo.hs +ThreadId 132 | 2025-08-19T14:55:44.993293Z | Info | Modification time for "v1" +ThreadId 132 | 2025-08-19T14:55:44.993379Z | Info | Modification time for "v1.1" +ThreadId 128 | 2025-08-19T14:55:44.994761Z | Warning | Typechecking file start /private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-26398544736/Foo.hs +ThreadId 126 | 2025-08-19T14:55:44.995047Z | Debug | Finished: WaitForIdeRule GetHieAst Uri {getUri = "file:///private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-26398544736/Foo.hs"} Took: 0.00s +ThreadId 121 | 2025-08-19T14:55:44.996016Z | Debug | Finished build session +AsyncCancelled +ThreadId 21 | 2025-08-19T14:55:44.996055Z | Debug | Restarting build session due to /private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-26398544736/Foo.hs (modified) +Action Queue: [] +Keys: [ GetModificationTime; /private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-26398544736/Foo.hs ] +Aborting previous build session took 0.00s +ThreadId 20 | 2025-08-19T14:55:44.996292Z | Debug | Modified text document: file:///private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-26398544736/Foo.hs +ThreadId 153 | 2025-08-19T14:55:45.005864Z | Info | Modification time for "v1" +ThreadId 153 | 2025-08-19T14:55:45.005981Z | Info | Modification time for "v1.1" +ThreadId 149 | 2025-08-19T14:55:45.007173Z | Warning | Typechecking file start /private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-26398544736/Foo.hs +ThreadId 149 | 2025-08-19T14:55:45.007522Z | Warning | Typechecking file mid /private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-26398544736/Foo.hs +ThreadId 149 | 2025-08-19T14:55:45.008236Z | Warning | Typechecking file end /private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-26398544736/Foo.hs +ThreadId 148 | 2025-08-19T14:55:45.008442Z | Debug | Finished: WaitForIdeRule TypeCheck Uri {getUri = "file:///private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-26398544736/Foo.hs"} Took: 0.01s +ThreadId 16 | 2025-08-19T14:55:45.211497Z | Error | LSP: no handler for: "non-existent-method" +ThreadId 16 | 2025-08-19T14:55:45.717804Z | Info | LSP: received shutdown +ThreadId 16 | 2025-08-19T14:55:45.717897Z | Debug | Received shutdown message +ThreadId 143 | 2025-08-19T14:55:45.717964Z | Debug | Finished build session +AsyncCancelled +ThreadId 6 | 2025-08-19T14:55:45.718622Z | Debug | Cleaned up temporary directory + GetHieAst: OK (1.13s) + +All 1 tests passed (1.13s) diff --git a/log.txt b/log.txt new file mode 100644 index 0000000000..86afac3e96 --- /dev/null +++ b/log.txt @@ -0,0 +1,111 @@ +Run #4 +Thghcide + diagnostics + Cancellation + edit header +readId 6 | GetHieAst: 2025-08-19T14:55:45.773048Z | Info | Test Project located in directory: /private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-35327754736 +ThreadId 7 | 2025-08-19T14:55:45.774261Z | Info | Heap statistics are not enabled (RTS option -T is needed) +ThreadId 7 | 2025-08-19T14:55:45.776775Z | Info | Starting LSP server... + If you are seeing this in a terminal, you probably should have run WITHOUT the --lsp option! + PluginIds: [ test + , block-command + , ghcide-completions + , core + , ghcide-type-lenses + , LSPRecorderCallback + , ghcide-hover-and-symbols + , ghcide-core ] +ThreadId 16 | 2025-08-19T14:55:45.777036Z | Info | Starting server +ThreadId 16 | 2025-08-19T14:55:45.777814Z | Debug | LSP: set new config: { + "cabalFormattingProvider": "cabal-gild", + "checkParents": "CheckOnSave", + "checkProject": true, + "formattingProvider": "ormolu", + "maxCompletions": 40, + "plugin": { }, + "sessionLoading": "singleComponent" +} +ThreadId 16 | 2025-08-19T14:55:45.778159Z | Info | Registering IDE configuration: IdeConfiguration {workspaceFolders = fromList [NormalizedUri (-4077115142264691803) "file:///private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-35327754736"], clientSettings = hashed (Just (Object (fromList [("haskell",Object (fromList [("cabalFormattingProvider",String "cabal-gild"),("checkParents",String "CheckOnSave"),("checkProject",Bool True),("formattingProvider",String "ormolu"),("maxCompletions",Number 40.0),("plugin",Object (fromList [])),("sessionLoading",String "singleComponent")]))])))} +ThreadId 16 | 2025-08-19T14:55:45.785776Z | Info | Started LSP server in 0.01s +ThreadId 16 | 2025-08-19T14:55:45.785884Z | Debug | shouldRunSubset: True +ThreadId 24 | 2025-08-19T14:55:45.785963Z | Debug | Initializing exports map from hiedb +ThreadId 24 | 2025-08-19T14:55:45.786047Z | Debug | Done initializing exports map from hiedb. Size: 0 +ThreadId 20 | 2025-08-19T14:55:45.786560Z | Debug | Shake session initialized +ThreadId 16 | 2025-08-19T14:55:45.786658Z | Debug | VFS: opening file:///private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-35327754736/Foo.hs +ThreadId 16 | 2025-08-19T14:55:45.786871Z | Debug | LSP: set new config: { + "cabalFormattingProvider": "cabal-gild", + "checkParents": "CheckOnSave", + "checkProject": true, + "formattingProvider": "ormolu", + "maxCompletions": 40, + "plugin": { }, + "sessionLoading": "singleComponent" +} +ThreadId 26 | 2025-08-19T14:55:45.786890Z | Debug | Finished build session +AsyncCancelled +ThreadId 21 | 2025-08-19T14:55:45.787076Z | Debug | Set files of interst to + [ ( /private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-35327754736/Foo.hs + , Modified {firstOpen = True} ) ] +ThreadId 21 | 2025-08-19T14:55:45.787154Z | Debug | Restarting build session due to /private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-35327754736/Foo.hs (modified) +Action Queue: [] +Keys: [ GetModificationTime; /private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-35327754736/Foo.hs + , IsFileOfInterest; /private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-35327754736/Foo.hs ] +Aborting previous build session took 0.00s +ThreadId 20 | 2025-08-19T14:55:45.787225Z | Debug | Opened text document: file:///private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-35327754736/Foo.hs +ThreadId 30 | 2025-08-19T14:55:45.787249Z | Debug | Finished build session +AsyncCancelled +ThreadId 21 | 2025-08-19T14:55:45.787316Z | Debug | Configuration changed: Config {checkParents = CheckOnSave, checkProject = True, formattingProvider = "ormolu", cabalFormattingProvider = "cabal-gild", maxCompletions = 40, sessionLoading = PreferSingleComponentLoading, plugins = fromList []} +ThreadId 21 | 2025-08-19T14:55:45.787402Z | Debug | Restarting build session due to config change +Action Queue: [] +Keys: [ GetModificationTime; /private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-35327754736/Foo.hs + , IsFileOfInterest; /private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-35327754736/Foo.hs + , GetClientSettings; ] +Aborting previous build session took 0.00s +ThreadId 39 | 2025-08-19T14:55:45.787576Z | Warning | Typechecking file start /private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-35327754736/Foo.hs +ThreadId 42 | 2025-08-19T14:55:45.787771Z | Debug | Looking up session cache for /private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-35327754736/Foo.hs +ThreadId 22 | 2025-08-19T14:55:45.787834Z | Debug | Loop: getOptions for /private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-35327754736/Foo.hs +ThreadId 22 | 2025-08-19T14:55:45.787956Z | Info | Cradle path: Foo.hs +ThreadId 22 | 2025-08-19T14:55:45.788018Z | Warning | No [cradle](https://github.com/mpickering/hie-bios#hie-bios) found for Foo.hs. +Proceeding with [implicit cradle](https://hackage.haskell.org/package/implicit-hie). +You should ignore this message, unless you see a 'Multi Cradle: No prefixes matched' error. +ThreadId 46 | 2025-08-19T14:55:45.802993Z | Debug | Cradle: Cradle{ cradleRootDir = "/private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-35327754736", cradleOptsProg = CradleAction: Default} +ThreadId 46 | 2025-08-19T14:55:45.803066Z | Info | invoking build tool to determine build flags (this may take some time depending on the cache) +ThreadId 46 | 2025-08-19T14:55:45.868167Z | Debug | ghc --print-libdir +ThreadId 46 | 2025-08-19T14:55:45.932486Z | Debug | ghc --numeric-version +ThreadId 22 | 2025-08-19T14:55:45.932641Z | Debug | Session loading result: Right (ComponentOptions {componentOptions = [], componentRoot = "/private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-35327754736", componentDependencies = []},"/Users/ares/.ghcup/ghc/9.12.2/lib/ghc-9.12.2/lib","9.12.2") +ThreadId 22 | 2025-08-19T14:55:45.936702Z | Info | Interface files cache directory: /var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/.cache/ghcide/main-da39a3ee5e6b4b0d3255bfef95601890afd80709-da39a3ee5e6b4b0d3255bfef95601890afd80709 +ThreadId 22 | 2025-08-19T14:55:45.946351Z | Info | Making new HscEnv. In-place unit ids: [ main-da39a3ee5e6b4b0d3255bfef95601890afd80709 ] +ThreadId 22 | 2025-08-19T14:55:45.956408Z | Debug | New component cache HscEnvEq: (([],Just HscEnvEq 5),fromList []) +ThreadId 22 | 2025-08-19T14:55:45.956697Z | Info | New loaded files: [ /private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-35327754736/Foo.hs ] +ThreadId 42 | 2025-08-19T14:55:45.957872Z | Debug | Looking up session cache for /private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-35327754736/Foo.hs +ThreadId 42 | 2025-08-19T14:55:45.957948Z | Info | Add dependency /private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-35327754736/Foo.hs +[] +ThreadId 70 | 2025-08-19T14:55:45.959426Z | Info | Modification time for "v1" +ThreadId 70 | 2025-08-19T14:55:45.959473Z | Info | Modification time for "v1.1" +ThreadId 37 | 2025-08-19T14:55:45.959782Z | Debug | Finished build session +AsyncCancelled +ThreadId 21 | 2025-08-19T14:55:45.959915Z | Debug | Known files updated: + fromList [(TargetFile NormalizedFilePath "/private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-35327754736/Foo.hs",fromList ["/private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-35327754736/Foo.hs"])] +ThreadId 21 | 2025-08-19T14:55:45.959969Z | Debug | Restarting build session due to new component +Action Queue: [ WaitForIdeRule TypeCheck Uri {getUri = "file:///private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-35327754736/Foo.hs"} ] +Keys: [ IsFileOfInterest; /private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-35327754736/Foo.hs + , GhcSessionIO; + , GetKnownTargets; ] +Aborting previous build session took 0.00s +ThreadId 83 | 2025-08-19T14:55:45.960398Z | Debug | Finished: InitialLoad Took: 0.00s +ThreadId 84 | 2025-08-19T14:55:45.984810Z | Warning | Typechecking file start /private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-35327754736/Foo.hs +ThreadId 88 | 2025-08-19T14:55:45.985135Z | Debug | Looking up session cache for /private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-35327754736/Foo.hs +ThreadId 88 | 2025-08-19T14:55:45.985189Z | Info | Add dependency /private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-35327754736/Foo.hs +[] +ThreadId 84 | 2025-08-19T14:55:45.992785Z | Warning | Typechecking file mid /private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-35327754736/Foo.hs +ThreadId 84 | 2025-08-19T14:55:46.004387Z | Warning | Typechecking file end /private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-35327754736/Foo.hs +ThreadId 81 | 2025-08-19T14:55:46.004765Z | Debug | Finished: WaitForIdeRule TypeCheck Uri {getUri = "file:///private/var/folders/36/_743psv11gv2wrj9dclrpd500000gn/T/hls-test-root/extra-dir-35327754736/Foo.hs"} Took: 0.04s +ThreadId 16 | 2025-08-19T14:55:46.207056Z | Error | LSP: no handler for: "non-existent-method" +ThreadId 16 | 2025-08-19T14:55:46.207691Z | Error | LSP: no handler for: "non-existent-method" +ThreadId 16 | 2025-08-19T14:55:46.208630Z | Info | LSP: received shutdown +ThreadId 16 | 2025-08-19T14:55:46.208805Z | Debug | Received shutdown message +ThreadId 78 | 2025-08-19T14:55:46.209199Z | Debug | Finished build session +AsyncCancelled + GetHieAst: FAIL (0.44s) + ghcide-test/exe/DiagnosticTests.hs:560: + Could not find (DiagnosticSeverity_Warning,(3,0),"Top-level binding",Just "GHC-38417",Nothing) in [] diff --git a/run_progress_test.sh b/run_progress_test.sh new file mode 100644 index 0000000000..24101db454 --- /dev/null +++ b/run_progress_test.sh @@ -0,0 +1,22 @@ +#!/bin/bash + +set -e +# pattern="edit header" + +# test_target="func-test" +# pattern="sends indefinite progress notifications" +test_target="ghcide-tests" +pattern="lower-case drive" +# HLS_TEST_LOG_STDERR=1 +NumberOfRuns=1 + # TASTY_PATTERN="sends indefinite progress notifications" cabal test func-test + # TASTY_PATTERN="notification handlers run in priority order" cabal test ghcide-tests + + +cabal build $test_target +targetBin=$(find dist-newstyle -type f -name $test_target) +for i in {1..$NumberOfRuns}; do + echo "Run #$i" + # TASTY_PATTERN=$pattern HLS_TEST_LOG_STDERR=$HLS_TEST_LOG_STDERR HLS_TEST_HARNESS_STDERR=1 $targetBin + TASTY_PATTERN=$pattern HLS_TEST_HARNESS_STDERR=1 $targetBin +done From 92e70e833959981d140a8723a7bd1a72f17ed499 Mon Sep 17 00:00:00 2001 From: patrick Date: Sun, 24 Aug 2025 16:19:59 +0800 Subject: [PATCH 67/87] remove cradle for test (#4706) --- ghcide-test/exe/DiagnosticTests.hs | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/ghcide-test/exe/DiagnosticTests.hs b/ghcide-test/exe/DiagnosticTests.hs index 52aba0b9b7..df05fdd61d 100644 --- a/ghcide-test/exe/DiagnosticTests.hs +++ b/ghcide-test/exe/DiagnosticTests.hs @@ -244,9 +244,7 @@ tests = testGroup "diagnostics" _ <- createDoc "ModuleB.hs" "haskell" contentB _ <- createDoc "ModuleB.hs-boot" "haskell" contentBboot expectDiagnostics [("ModuleB.hs", [(DiagnosticSeverity_Warning, (3,0), "Top-level binding", Just "GHC-38417")])] - , testWithDummyPlugin "bidirectional module dependency with hs-boot" - (mkIdeTestFs [directCradle ["ModuleA", "ModuleB"]]) - $ do + , testWithDummyPluginEmpty "bidirectional module dependency with hs-boot" $ do let contentA = T.unlines [ "module ModuleA where" , "import {-# SOURCE #-} ModuleB" From be30f3eb5bba59c50c084a324d432bfc2df24837 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20Dybiec?= Date: Mon, 25 Aug 2025 14:43:04 +0100 Subject: [PATCH 68/87] Use structured diagnostics in pragmas plugin (#4620) Changes suggestion provider in pragmas plugin to use structured diagnostics and ghc machinery to generate hints --- .../src/Development/IDE/GHC/Compat/Error.hs | 3 + .../src/Ide/Plugin/Pragmas.hs | 74 +++++++++++++++---- 2 files changed, 64 insertions(+), 13 deletions(-) diff --git a/ghcide/src/Development/IDE/GHC/Compat/Error.hs b/ghcide/src/Development/IDE/GHC/Compat/Error.hs index de59afa146..63ec75bfc9 100644 --- a/ghcide/src/Development/IDE/GHC/Compat/Error.hs +++ b/ghcide/src/Development/IDE/GHC/Compat/Error.hs @@ -18,6 +18,9 @@ module Development.IDE.GHC.Compat.Error ( DriverMessage (..), -- * General Diagnostics Diagnostic(..), + -- * GHC Hints + GhcHint (SuggestExtension), + LanguageExtensionHint (..), -- * Prisms and lenses for error selection _TcRnMessage, _TcRnMessageWithCtx, diff --git a/plugins/hls-pragmas-plugin/src/Ide/Plugin/Pragmas.hs b/plugins/hls-pragmas-plugin/src/Ide/Plugin/Pragmas.hs index 23bfd727cf..c395feba9e 100644 --- a/plugins/hls-pragmas-plugin/src/Ide/Plugin/Pragmas.hs +++ b/plugins/hls-pragmas-plugin/src/Ide/Plugin/Pragmas.hs @@ -1,6 +1,7 @@ {-# LANGUAGE CPP #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE LambdaCase #-} {-# LANGUAGE MultiWayIf #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE ViewPatterns #-} @@ -27,8 +28,13 @@ import qualified Data.Text as T import Development.IDE hiding (line) import Development.IDE.Core.Compile (sourceParser, sourceTypecheck) +import Development.IDE.Core.FileStore (getVersionedTextDoc) import Development.IDE.Core.PluginUtils import Development.IDE.GHC.Compat +import Development.IDE.GHC.Compat.Error (GhcHint (SuggestExtension), + LanguageExtensionHint (..), + diagnosticHints, + msgEnvelopeErrorL) import Development.IDE.Plugin.Completions (ghcideCompletionsPluginPriority) import Development.IDE.Plugin.Completions.Logic (getCompletionPrefixFromRope) import Development.IDE.Plugin.Completions.Types (PosPrefixInfo (..)) @@ -69,13 +75,33 @@ data Pragma = LangExt T.Text | OptGHC T.Text deriving (Show, Eq, Ord) suggestPragmaProvider :: PluginMethodHandler IdeState 'LSP.Method_TextDocumentCodeAction -suggestPragmaProvider = mkCodeActionProvider suggest +suggestPragmaProvider = if ghcVersion /= GHC96 then + mkCodeActionProvider suggestAddPragma + else mkCodeActionProvider96 suggestAddPragma96 suggestDisableWarningProvider :: PluginMethodHandler IdeState 'LSP.Method_TextDocumentCodeAction suggestDisableWarningProvider = mkCodeActionProvider $ const suggestDisableWarning -mkCodeActionProvider :: (Maybe DynFlags -> Diagnostic -> [PragmaEdit]) -> PluginMethodHandler IdeState 'LSP.Method_TextDocumentCodeAction +mkCodeActionProvider :: (Maybe DynFlags -> FileDiagnostic -> [PragmaEdit]) -> PluginMethodHandler IdeState 'LSP.Method_TextDocumentCodeAction mkCodeActionProvider mkSuggest state _plId + (LSP.CodeActionParams _ _ docId@LSP.TextDocumentIdentifier{ _uri = uri } caRange _) = do + verTxtDocId <- liftIO $ runAction "classplugin.codeAction.getVersionedTextDoc" state $ getVersionedTextDoc docId + normalizedFilePath <- getNormalizedFilePathE (verTxtDocId ^. L.uri) + -- ghc session to get some dynflags even if module isn't parsed + (hscEnv -> hsc_dflags -> sessionDynFlags, _) <- + runActionE "Pragmas.GhcSession" state $ useWithStaleE GhcSession normalizedFilePath + fileContents <- liftIO $ runAction "Pragmas.GetFileContents" state $ getFileContents normalizedFilePath + parsedModule <- liftIO $ runAction "Pragmas.GetParsedModule" state $ getParsedModule normalizedFilePath + let parsedModuleDynFlags = ms_hspp_opts . pm_mod_summary <$> parsedModule + nextPragmaInfo = Pragmas.getNextPragmaInfo sessionDynFlags fileContents + activeDiagnosticsInRange (shakeExtras state) normalizedFilePath caRange >>= \case + Nothing -> pure $ LSP.InL [] + Just fileDiags -> do + let actions = concatMap (mkSuggest parsedModuleDynFlags) fileDiags + pure $ LSP.InL $ pragmaEditToAction uri nextPragmaInfo <$> nubOrdOn snd actions + +mkCodeActionProvider96 :: (Maybe DynFlags -> Diagnostic -> [PragmaEdit]) -> PluginMethodHandler IdeState 'LSP.Method_TextDocumentCodeAction +mkCodeActionProvider96 mkSuggest state _plId (LSP.CodeActionParams _ _ LSP.TextDocumentIdentifier{ _uri = uri } _ (LSP.CodeActionContext diags _monly _)) = do normalizedFilePath <- getNormalizedFilePathE uri -- ghc session to get some dynflags even if module isn't parsed @@ -89,7 +115,6 @@ mkCodeActionProvider mkSuggest state _plId pure $ LSP.InL $ pragmaEditToAction uri nextPragmaInfo <$> pedits - -- | Add a Pragma to the given URI at the top of the file. -- It is assumed that the pragma name is a valid pragma, -- thus, not validated. @@ -108,22 +133,17 @@ pragmaEditToAction uri Pragmas.NextPragmaInfo{ nextPragmaLine, lineSplitTextEdit , let LSP.TextEdit{ _range, _newText } = insertTextEdit -> [LSP.TextEdit _range (render p <> _newText), deleteTextEdit] | otherwise -> [LSP.TextEdit pragmaInsertRange (render p)] - edit = LSP.WorkspaceEdit (Just $ M.singleton uri textEdits) Nothing Nothing -suggest :: Maybe DynFlags -> Diagnostic -> [PragmaEdit] -suggest dflags diag = - suggestAddPragma dflags diag - -- --------------------------------------------------------------------- -suggestDisableWarning :: Diagnostic -> [PragmaEdit] +suggestDisableWarning :: FileDiagnostic -> [PragmaEdit] suggestDisableWarning diagnostic - | Just (Just (JSON.Array attachedReasons)) <- diagnostic ^? attachedReason + | Just (Just (JSON.Array attachedReasons)) <- diagnostic ^? fdLspDiagnosticL . attachedReason = [ ("Disable \"" <> w <> "\" warnings", OptGHC w) | JSON.String attachedReason <- Foldable.toList attachedReasons @@ -142,10 +162,24 @@ warningBlacklist = -- --------------------------------------------------------------------- +-- | Offer to add a missing Language Pragma to the top of a file. +suggestAddPragma :: Maybe DynFlags -> FileDiagnostic -> [PragmaEdit] +suggestAddPragma mDynflags fd= [("Add \"" <> r <> "\"", LangExt r) | r <- map (T.pack . show) $ suggestsExtension fd, r `notElem` disabled] + where + disabled + | Just dynFlags <- mDynflags = + -- GHC does not export 'OnOff', so we have to view it as string + mapMaybe (T.stripPrefix "Off " . printOutputable) (extensions dynFlags) + | otherwise = + -- When the module failed to parse, we don't have access to its + -- dynFlags. In that case, simply don't disable any pragmas. + [] + -- | Offer to add a missing Language Pragma to the top of a file. -- Pragmas are defined by a curated list of known pragmas, see 'possiblePragmas'. -suggestAddPragma :: Maybe DynFlags -> Diagnostic -> [PragmaEdit] -suggestAddPragma mDynflags Diagnostic {_message, _source} +-- Kept for compatibility with ghc9.6 - it is missing some structured diagnostics +suggestAddPragma96 :: Maybe DynFlags -> Diagnostic -> [PragmaEdit] +suggestAddPragma96 mDynflags Diagnostic {_message, _source} | _source == Just sourceTypecheck || _source == Just sourceParser = genPragma _message where genPragma target = @@ -158,7 +192,7 @@ suggestAddPragma mDynflags Diagnostic {_message, _source} -- When the module failed to parse, we don't have access to its -- dynFlags. In that case, simply don't disable any pragmas. [] -suggestAddPragma _ _ = [] +suggestAddPragma96 _ _ = [] -- | Find all Pragmas are an infix of the search term. findPragma :: T.Text -> [T.Text] @@ -178,6 +212,20 @@ findPragma str = concatMap check possiblePragmas , "Strict" /= name ] +suggestsExtension :: FileDiagnostic -> [Extension] +suggestsExtension message = case message ^? fdStructuredMessageL . _SomeStructuredMessage . msgEnvelopeErrorL of + Just s -> concat $ map (\case + SuggestExtension s -> ghcHintSuggestsExtension s + _ -> []) (diagnosticHints s) + _ -> [] + +ghcHintSuggestsExtension :: LanguageExtensionHint -> [Extension] +ghcHintSuggestsExtension (SuggestSingleExtension _ ext) = [ext] +ghcHintSuggestsExtension (SuggestAnyExtension _ (ext:_)) = [ext] -- ghc suggests any of those, we pick first +ghcHintSuggestsExtension (SuggestAnyExtension _ []) = [] +ghcHintSuggestsExtension (SuggestExtensions _ ext) = ext +ghcHintSuggestsExtension (SuggestExtensionInOrderTo _ ext) = [ext] + -- | All language pragmas, including the No- variants allPragmas :: [T.Text] allPragmas = From 5550ca5a0a32153791b6556246febbdd19e01949 Mon Sep 17 00:00:00 2001 From: patrick Date: Wed, 27 Aug 2025 19:15:20 +0800 Subject: [PATCH 69/87] Replace writeFile with atomicFileWrite in ghcide-tests (#4710) * Replace writeFile and writeFileUTF8 with atomicFileWriteString and atomicFileWriteStringUTF8 for safer file operations --- ghcide-test/exe/CradleTests.hs | 11 ++++----- ghcide-test/exe/DependentFileTest.hs | 5 +++-- ghcide-test/exe/DiagnosticTests.hs | 4 ++-- ghcide-test/exe/GarbageCollectionTests.hs | 11 ++++----- ghcide-test/exe/IfaceTests.hs | 3 ++- ghcide-test/exe/PluginSimpleTests.hs | 3 ++- ghcide-test/exe/UnitTests.hs | 5 +++-- ghcide-test/exe/WatchedFileTests.hs | 12 +++++----- hls-test-utils/src/Test/Hls/FileSystem.hs | 27 +++++++++++++++++++++++ 9 files changed, 57 insertions(+), 24 deletions(-) diff --git a/ghcide-test/exe/CradleTests.hs b/ghcide-test/exe/CradleTests.hs index d79b90c835..8edb258257 100644 --- a/ghcide-test/exe/CradleTests.hs +++ b/ghcide-test/exe/CradleTests.hs @@ -29,6 +29,7 @@ import Language.LSP.Protocol.Types hiding import Language.LSP.Test import System.FilePath import System.IO.Extra hiding (withTempDir) +import Test.Hls.FileSystem import Test.Hls.Util (EnvSpec (..), OS (..), ignoreInEnv) import Test.Tasty @@ -53,7 +54,7 @@ loadCradleOnlyonce = testGroup "load cradle only once" ] where direct dir = do - liftIO $ writeFileUTF8 (dir "hie.yaml") + liftIO $ atomicFileWriteStringUTF8 (dir "hie.yaml") "cradle: {direct: {arguments: []}}" test dir implicit dir = test dir @@ -73,7 +74,7 @@ retryFailedCradle = testWithDummyPluginEmpty' "retry failed" $ \dir -> do -- The false cradle always fails let hieContents = "cradle: {bios: {shell: \"false\"}}" hiePath = dir "hie.yaml" - liftIO $ writeFile hiePath hieContents + liftIO $ atomicFileWriteString hiePath hieContents let aPath = dir "A.hs" doc <- createDoc aPath "haskell" "main = return ()" WaitForIdeRuleResult {..} <- waitForAction "TypeCheck" doc @@ -81,7 +82,7 @@ retryFailedCradle = testWithDummyPluginEmpty' "retry failed" $ \dir -> do -- Fix the cradle and typecheck again let validCradle = "cradle: {bios: {shell: \"echo A.hs\"}}" - liftIO $ writeFileUTF8 hiePath $ T.unpack validCradle + liftIO $ atomicFileWriteStringUTF8 hiePath $ T.unpack validCradle sendNotification SMethod_WorkspaceDidChangeWatchedFiles $ DidChangeWatchedFilesParams [FileEvent (filePathToUri $ dir "hie.yaml") FileChangeType_Changed ] @@ -214,7 +215,7 @@ sessionDepsArePickedUp = testWithDummyPluginEmpty' "session-deps-are-picked-up" $ \dir -> do liftIO $ - writeFileUTF8 + atomicFileWriteStringUTF8 (dir "hie.yaml") "cradle: {direct: {arguments: []}}" -- Open without OverloadedStrings and expect an error. @@ -223,7 +224,7 @@ sessionDepsArePickedUp = testWithDummyPluginEmpty' -- Update hie.yaml to enable OverloadedStrings. liftIO $ - writeFileUTF8 + atomicFileWriteStringUTF8 (dir "hie.yaml") "cradle: {direct: {arguments: [-XOverloadedStrings]}}" sendNotification SMethod_WorkspaceDidChangeWatchedFiles $ DidChangeWatchedFilesParams diff --git a/ghcide-test/exe/DependentFileTest.hs b/ghcide-test/exe/DependentFileTest.hs index 1f243819e3..dd2cb2a046 100644 --- a/ghcide-test/exe/DependentFileTest.hs +++ b/ghcide-test/exe/DependentFileTest.hs @@ -15,6 +15,7 @@ import Language.LSP.Protocol.Types hiding mkRange) import Language.LSP.Test import Test.Hls +import Test.Hls.FileSystem tests :: TestTree @@ -31,7 +32,7 @@ tests = testGroup "addDependentFile" -- If the file contains B then no type error -- otherwise type error let depFilePath = "dep-file.txt" - liftIO $ writeFile depFilePath "A" + liftIO $ atomicFileWriteString depFilePath "A" let fooContent = T.unlines [ "{-# LANGUAGE TemplateHaskell #-}" , "module Foo where" @@ -48,7 +49,7 @@ tests = testGroup "addDependentFile" expectDiagnostics [("Foo.hs", [(DiagnosticSeverity_Error, (4,11), "Couldn't match type", Just "GHC-83865")])] -- Now modify the dependent file - liftIO $ writeFile depFilePath "B" + liftIO $ atomicFileWriteString depFilePath "B" sendNotification SMethod_WorkspaceDidChangeWatchedFiles $ DidChangeWatchedFilesParams [FileEvent (filePathToUri depFilePath) FileChangeType_Changed ] diff --git a/ghcide-test/exe/DiagnosticTests.hs b/ghcide-test/exe/DiagnosticTests.hs index df05fdd61d..a0e9ae2768 100644 --- a/ghcide-test/exe/DiagnosticTests.hs +++ b/ghcide-test/exe/DiagnosticTests.hs @@ -39,7 +39,7 @@ import System.Time.Extra import Test.Hls (TestConfig (testConfigCaps, testDirLocation, testDisableKick, testPluginDescriptor), runSessionWithTestConfig, waitForProgressBegin) -import Test.Hls.FileSystem (directCradle, file, text) +import Test.Hls.FileSystem import Test.Tasty import Test.Tasty.HUnit @@ -381,7 +381,7 @@ tests = testGroup "diagnostics" let (drive, suffix) = splitDrive pathB in filePathToUri (joinDrive (lower drive) suffix) liftIO $ createDirectoryIfMissing True (takeDirectory pathB) - liftIO $ writeFileUTF8 pathB $ T.unpack bContent + liftIO $ atomicFileWriteStringUTF8 pathB $ T.unpack bContent uriA <- getDocUri "A/A.hs" Just pathA <- pure $ uriToFilePath uriA uriA <- pure $ diff --git a/ghcide-test/exe/GarbageCollectionTests.hs b/ghcide-test/exe/GarbageCollectionTests.hs index 5cc9935352..1a867ad747 100644 --- a/ghcide-test/exe/GarbageCollectionTests.hs +++ b/ghcide-test/exe/GarbageCollectionTests.hs @@ -12,6 +12,7 @@ import Language.LSP.Protocol.Types hiding (SemanticTokenAbsolute (..), SemanticTokensEdit (..), mkRange) import Language.LSP.Test import System.FilePath +import Test.Hls.FileSystem import Test.Tasty import Test.Tasty.HUnit import Text.Printf (printf) @@ -20,14 +21,14 @@ tests :: TestTree tests = testGroup "garbage collection" [ testGroup "dirty keys" [ testWithDummyPluginEmpty' "are collected" $ \dir -> do - liftIO $ writeFile (dir "hie.yaml") "cradle: {direct: {arguments: [A]}}" + liftIO $ atomicFileWriteString (dir "hie.yaml") "cradle: {direct: {arguments: [A]}}" doc <- generateGarbage "A" dir closeDoc doc garbage <- waitForGC liftIO $ assertBool "no garbage was found" $ not $ null garbage , testWithDummyPluginEmpty' "are deleted from the state" $ \dir -> do - liftIO $ writeFile (dir "hie.yaml") "cradle: {direct: {arguments: [A]}}" + liftIO $ atomicFileWriteString (dir "hie.yaml") "cradle: {direct: {arguments: [A]}}" docA <- generateGarbage "A" dir keys0 <- getStoredKeys closeDoc docA @@ -37,7 +38,7 @@ tests = testGroup "garbage collection" liftIO $ assertBool "keys were not deleted from the state" (length keys1 < length keys0) , testWithDummyPluginEmpty' "are not regenerated unless needed" $ \dir -> do - liftIO $ writeFile (dir "hie.yaml") "cradle: {direct: {arguments: [A.hs, B.hs]}}" + liftIO $ atomicFileWriteString (dir "hie.yaml") "cradle: {direct: {arguments: [A.hs, B.hs]}}" docA <- generateGarbage "A" dir _docB <- generateGarbage "B" dir @@ -58,7 +59,7 @@ tests = testGroup "garbage collection" liftIO $ regeneratedKeys @?= mempty , testWithDummyPluginEmpty' "regenerate successfully" $ \dir -> do - liftIO $ writeFile (dir "hie.yaml") "cradle: {direct: {arguments: [A]}}" + liftIO $ atomicFileWriteString (dir "hie.yaml") "cradle: {direct: {arguments: [A]}}" docA <- generateGarbage "A" dir closeDoc docA garbage <- waitForGC @@ -83,7 +84,7 @@ tests = testGroup "garbage collection" let fp = modName <> ".hs" body = printf "module %s where" modName doc <- createDoc fp "haskell" (T.pack body) - liftIO $ writeFile (dir fp) body + liftIO $ atomicFileWriteString (dir fp) body builds <- waitForTypecheck doc liftIO $ assertBool "something is wrong with this test" builds return doc diff --git a/ghcide-test/exe/IfaceTests.hs b/ghcide-test/exe/IfaceTests.hs index d7dc533550..e1e94c926d 100644 --- a/ghcide-test/exe/IfaceTests.hs +++ b/ghcide-test/exe/IfaceTests.hs @@ -18,6 +18,7 @@ import Language.LSP.Test import System.Directory import System.FilePath import System.IO.Extra hiding (withTempDir) +import Test.Hls.FileSystem import Test.Tasty import Test.Tasty.HUnit @@ -45,7 +46,7 @@ ifaceTHTest = testWithExtraFiles "iface-th-test" "TH" $ \dir -> do cdoc <- createDoc cPath "haskell" cSource -- Change [TH]a from () to Bool - liftIO $ writeFileUTF8 aPath (unlines $ init (lines $ T.unpack aSource) ++ ["th_a = [d| a = False|]"]) + liftIO $ atomicFileWriteStringUTF8 aPath (unlines $ init (lines $ T.unpack aSource) ++ ["th_a = [d| a = False|]"]) -- Check that the change propagates to C changeDoc cdoc [TextDocumentContentChangeEvent . InR $ TextDocumentContentChangeWholeDocument cSource] diff --git a/ghcide-test/exe/PluginSimpleTests.hs b/ghcide-test/exe/PluginSimpleTests.hs index c160d2461c..b15e9af749 100644 --- a/ghcide-test/exe/PluginSimpleTests.hs +++ b/ghcide-test/exe/PluginSimpleTests.hs @@ -9,6 +9,7 @@ import Language.LSP.Protocol.Types hiding (SemanticTokenAbsolute (..), SemanticTokensEdit (..), mkRange) import Language.LSP.Test import System.FilePath +import Test.Hls.FileSystem import Test.Tasty tests :: TestTree @@ -36,7 +37,7 @@ tests = -- required by plugin-1.0.0). See the build log above for details. testWithExtraFiles "simple plugin" "plugin-knownnat" $ \dir -> do _ <- openDoc (dir "KnownNat.hs") "haskell" - liftIO $ writeFile (dir"hie.yaml") + liftIO $ atomicFileWriteString (dir"hie.yaml") "cradle: {cabal: [{path: '.', component: 'lib:plugin'}]}" expectDiagnostics diff --git a/ghcide-test/exe/UnitTests.hs b/ghcide-test/exe/UnitTests.hs index b2940ab27f..dcd5c170f4 100644 --- a/ghcide-test/exe/UnitTests.hs +++ b/ghcide-test/exe/UnitTests.hs @@ -31,6 +31,7 @@ import System.Mem (performGC) import Test.Hls (IdeState, def, runSessionWithServerInTmpDir, waitForProgressDone) +import Test.Hls.FileSystem import Test.Tasty import Test.Tasty.ExpectedFailure import Test.Tasty.HUnit @@ -104,9 +105,9 @@ findResolution_us :: Int -> IO Int findResolution_us delay_us | delay_us >= 1000000 = error "Unable to compute timestamp resolution" findResolution_us delay_us = withTempFile $ \f -> withTempFile $ \f' -> do performGC - writeFile f "" + atomicFileWriteString f "" threadDelay delay_us - writeFile f' "" + atomicFileWriteString f' "" t <- getModTime f t' <- getModTime f' if t /= t' then return delay_us else findResolution_us (delay_us * 10) diff --git a/ghcide-test/exe/WatchedFileTests.hs b/ghcide-test/exe/WatchedFileTests.hs index 1c2ded9109..f00e4bfffe 100644 --- a/ghcide-test/exe/WatchedFileTests.hs +++ b/ghcide-test/exe/WatchedFileTests.hs @@ -29,7 +29,7 @@ tests :: TestTree tests = testGroup "watched files" [ testGroup "Subscriptions" [ testWithDummyPluginEmpty' "workspace files" $ \sessionDir -> do - liftIO $ writeFile (sessionDir "hie.yaml") "cradle: {direct: {arguments: [\"-isrc\", \"A\", \"WatchedFilesMissingModule\"]}}" + liftIO $ atomicFileWriteString (sessionDir "hie.yaml") "cradle: {direct: {arguments: [\"-isrc\", \"A\", \"WatchedFilesMissingModule\"]}}" _doc <- createDoc "A.hs" "haskell" "{-#LANGUAGE NoImplicitPrelude #-}\nmodule A where\nimport WatchedFilesMissingModule" setIgnoringRegistrationRequests False watchedFileRegs <- getWatchedFilesSubscriptionsUntil SMethod_TextDocumentPublishDiagnostics @@ -40,7 +40,7 @@ tests = testGroup "watched files" , testWithDummyPluginEmpty' "non workspace file" $ \sessionDir -> do tmpDir <- liftIO getTemporaryDirectory let yaml = "cradle: {direct: {arguments: [\"-i" <> tail(init(show tmpDir)) <> "\", \"A\", \"WatchedFilesMissingModule\"]}}" - liftIO $ writeFile (sessionDir "hie.yaml") yaml + liftIO $ atomicFileWriteString (sessionDir "hie.yaml") yaml _doc <- createDoc "A.hs" "haskell" "{-# LANGUAGE NoImplicitPrelude#-}\nmodule A where\nimport WatchedFilesMissingModule" setIgnoringRegistrationRequests False watchedFileRegs <- getWatchedFilesSubscriptionsUntil SMethod_TextDocumentPublishDiagnostics @@ -53,8 +53,8 @@ tests = testGroup "watched files" , testGroup "Changes" [ testWithDummyPluginEmpty' "workspace files" $ \sessionDir -> do - liftIO $ writeFile (sessionDir "hie.yaml") "cradle: {direct: {arguments: [\"-isrc\", \"A\", \"B\"]}}" - liftIO $ writeFile (sessionDir "B.hs") $ unlines + liftIO $ atomicFileWriteString (sessionDir "hie.yaml") "cradle: {direct: {arguments: [\"-isrc\", \"A\", \"B\"]}}" + liftIO $ atomicFileWriteString (sessionDir "B.hs") $ unlines ["module B where" ,"b :: Bool" ,"b = False"] @@ -66,7 +66,7 @@ tests = testGroup "watched files" ] expectDiagnostics [("A.hs", [(DiagnosticSeverity_Error, (3, 4), "Couldn't match expected type '()' with actual type 'Bool'", Just "GHC-83865")])] -- modify B off editor - liftIO $ writeFile (sessionDir "B.hs") $ unlines + liftIO $ atomicFileWriteString (sessionDir "B.hs") $ unlines ["module B where" ,"b :: Int" ,"b = 0"] @@ -80,7 +80,7 @@ tests = testGroup "watched files" let cabalFile = "reload.cabal" cabalContent <- liftIO $ T.readFile cabalFile let fix = T.replace "build-depends: base" "build-depends: base, split" - liftIO $ T.writeFile cabalFile (fix cabalContent) + liftIO $ atomicFileWriteText cabalFile (fix cabalContent) sendNotification SMethod_WorkspaceDidChangeWatchedFiles $ DidChangeWatchedFilesParams [ FileEvent (filePathToUri $ sessionDir cabalFile) FileChangeType_Changed ] expectDiagnostics [(hsFile, [])] diff --git a/hls-test-utils/src/Test/Hls/FileSystem.hs b/hls-test-utils/src/Test/Hls/FileSystem.hs index c93643badd..e349dbad3b 100644 --- a/hls-test-utils/src/Test/Hls/FileSystem.hs +++ b/hls-test-utils/src/Test/Hls/FileSystem.hs @@ -29,8 +29,12 @@ module Test.Hls.FileSystem , directProjectMulti , simpleCabalProject , simpleCabalProject' + , atomicFileWriteString + , atomicFileWriteStringUTF8 + , atomicFileWriteText ) where +import Control.Exception (onException) import Data.Foldable (traverse_) import qualified Data.Text as T import qualified Data.Text.IO as T @@ -38,6 +42,7 @@ import Development.IDE (NormalizedFilePath) import Language.LSP.Protocol.Types (toNormalizedFilePath) import System.Directory import System.FilePath as FP +import System.IO.Extra (newTempFileWithin, writeFileUTF8) import System.Process.Extra (readProcess) -- ---------------------------------------------------------------------------- @@ -244,3 +249,25 @@ simpleCabalProject' :: [FileTree] -> [FileTree] simpleCabalProject' fps = [ simpleCabalCradle ] <> fps + + +atomicFileWrite :: FilePath -> (FilePath -> IO a) -> IO a +atomicFileWrite targetPath write = do + let dir = takeDirectory targetPath + createDirectoryIfMissing True dir + (tempFilePath, cleanUp) <- newTempFileWithin dir + (write tempFilePath >>= \x -> renameFile tempFilePath targetPath >> pure x) + `onException` cleanUp + + +atomicFileWriteString :: FilePath -> String -> IO () +atomicFileWriteString targetPath content = + atomicFileWrite targetPath (flip writeFile content) + +atomicFileWriteStringUTF8 :: FilePath -> String -> IO () +atomicFileWriteStringUTF8 targetPath content = + atomicFileWrite targetPath (flip writeFileUTF8 content) + +atomicFileWriteText :: FilePath -> T.Text -> IO () +atomicFileWriteText targetPath content = + atomicFileWrite targetPath (flip T.writeFile content) From bd586063b29c67111e227f66251e339b5db6ec91 Mon Sep 17 00:00:00 2001 From: Kevin Brubeck Unhammer Date: Wed, 27 Aug 2025 16:37:42 +0200 Subject: [PATCH 70/87] Document how to set formattingProvider in emacs, modern syntax (#4713) eglot docs recommend using property lists Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> --- docs/configuration.md | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/docs/configuration.md b/docs/configuration.md index 9da816c09e..2b0eb55e1e 100644 --- a/docs/configuration.md +++ b/docs/configuration.md @@ -443,22 +443,20 @@ This will install `eglot` and enable it by default in `haskell-mode`. To configure `haskell-language-server` we use the `eglot-workspace-configuration` variable. With `M-x eglot-show-workspace-configuration` you can see the JSON that `eglot` will send to `haskell-language-server`. See for more information. -As an example, the setting below will disable the `stan` plugin. +As an example, the setting below will disable the `stan` plugin and use `fourmolu` for formatting: ```emacs-lisp (use-package eglot :ensure t :config - (add-hook 'haskell-mode-hook 'eglot-ensure) + (add-hook 'haskell-mode-hook 'eglot-ensure) ; start eglot automatically in haskell projects :config (setq-default eglot-workspace-configuration - '((haskell - (plugin - (stan - (globalOn . :json-false)))))) ;; disable stan + '(:haskell (:plugin (:stan (:globalOn :json-false)) ; disable stan + :formattingProvider "fourmolu"))) ; use fourmolu instead of ormolu :custom - (eglot-autoshutdown t) ;; shutdown language server after closing last file - (eglot-confirm-server-initiated-edits nil) ;; allow edits without confirmation + (eglot-autoshutdown t) ; shutdown language server after closing last file + (eglot-confirm-server-initiated-edits nil) ; allow edits without confirmation ) ``` From 60a6c486aa3d7d108022fa9059d09c7ade5e716e Mon Sep 17 00:00:00 2001 From: Lin Jian Date: Sun, 31 Aug 2025 19:26:28 +0800 Subject: [PATCH 71/87] Replace neat-interpolation with string-interpolate (#4717) https://github.com/haskell/haskell-language-server/pull/4626#discussion_r2311826643 --- hls-test-utils/hls-test-utils.cabal | 2 +- hls-test-utils/src/Test/Hls.hs | 4 +- hls-test-utils/src/Test/Hls/Util.hs | 4 +- plugins/hls-cabal-plugin/test/Completer.hs | 60 +++++----- plugins/hls-cabal-plugin/test/Context.hs | 128 ++++++++++----------- plugins/hls-cabal-plugin/test/Main.hs | 2 +- 6 files changed, 100 insertions(+), 100 deletions(-) diff --git a/hls-test-utils/hls-test-utils.cabal b/hls-test-utils/hls-test-utils.cabal index 084de98534..a7d904e0d0 100644 --- a/hls-test-utils/hls-test-utils.cabal +++ b/hls-test-utils/hls-test-utils.cabal @@ -49,8 +49,8 @@ library , lsp , lsp-test ^>=0.17 , lsp-types ^>=2.3 - , neat-interpolation , safe-exceptions + , string-interpolate , tasty , tasty-expected-failure , tasty-golden diff --git a/hls-test-utils/src/Test/Hls.hs b/hls-test-utils/src/Test/Hls.hs index 1193b2dd19..0ab203fe36 100644 --- a/hls-test-utils/src/Test/Hls.hs +++ b/hls-test-utils/src/Test/Hls.hs @@ -357,7 +357,7 @@ goldenWithDocInTmpDir languageKind config plugin title tree path desc ext act = -- For example: -- -- @ --- parameterisedCursorTest "Cursor Test" [trimming| +-- parameterisedCursorTest "Cursor Test" [__i| -- foo = 2 -- ^ -- bar = 3 @@ -380,7 +380,7 @@ goldenWithDocInTmpDir languageKind config plugin title tree path desc ext act = -- TODO: Many Haskell and Cabal source may contain '^' characters for good reasons. -- We likely need a way to change the character for certain test cases in the future. -- --- The quasi quoter 'trimming' is very helpful to define such tests, as it additionally +-- The quasi quoter '__i' is very helpful to define such tests, as it additionally -- allows to interpolate haskell values and functions. We reexport this quasi quoter -- for easier usage. parameterisedCursorTest :: (Show a, Eq a) => String -> T.Text -> [a] -> (T.Text -> PosPrefixInfo -> IO a) -> TestTree diff --git a/hls-test-utils/src/Test/Hls/Util.hs b/hls-test-utils/src/Test/Hls/Util.hs index 98c795f8e0..cdb3c4de94 100644 --- a/hls-test-utils/src/Test/Hls/Util.hs +++ b/hls-test-utils/src/Test/Hls/Util.hs @@ -47,7 +47,7 @@ module Test.Hls.Util -- * Extract positions from input file. , extractCursorPositions , mkParameterisedLabel - , trimming + , __i ) where @@ -81,11 +81,11 @@ import Test.Tasty.ExpectedFailure (expectFailBecause, import Test.Tasty.HUnit (assertFailure) import qualified Data.List as List +import Data.String.Interpolate (__i) import qualified Data.Text.Internal.Search as T import qualified Data.Text.Utf16.Rope.Mixed as Rope import Development.IDE.Plugin.Completions.Logic (getCompletionPrefixFromRope) import Development.IDE.Plugin.Completions.Types (PosPrefixInfo (..)) -import NeatInterpolation (trimming) noLiteralCaps :: ClientCapabilities noLiteralCaps = def & L.textDocument ?~ textDocumentCaps diff --git a/plugins/hls-cabal-plugin/test/Completer.hs b/plugins/hls-cabal-plugin/test/Completer.hs index ab7165b1ac..1abaacaacf 100644 --- a/plugins/hls-cabal-plugin/test/Completer.hs +++ b/plugins/hls-cabal-plugin/test/Completer.hs @@ -392,39 +392,39 @@ extract item = case item ^. L.textEdit of _ -> error "" importTestData :: T.Text -importTestData = [trimming| -cabal-version: 3.0 -name: hls-cabal-plugin -version: 0.1.0.0 -synopsis: -homepage: -license: MIT -license-file: LICENSE -author: Fendor -maintainer: fendor@posteo.de -category: Development -extra-source-files: CHANGELOG.md +importTestData = [__i| + cabal-version: 3.0 + name: hls-cabal-plugin + version: 0.1.0.0 + synopsis: + homepage: + license: MIT + license-file: LICENSE + author: Fendor + maintainer: fendor@posteo.de + category: Development + extra-source-files: CHANGELOG.md -common defaults - default-language: GHC2021 - -- Should have been in GHC2021, an oversight - default-extensions: ExplicitNamespaces + common defaults + default-language: GHC2021 + -- Should have been in GHC2021, an oversight + default-extensions: ExplicitNamespaces -common test-defaults - ghc-options: -threaded -rtsopts -with-rtsopts=-N + common test-defaults + ghc-options: -threaded -rtsopts -with-rtsopts=-N -library - import: - ^ - exposed-modules: IDE.Plugin.Cabal - build-depends: base ^>=4.14.3.0 - hs-source-dirs: src - default-language: Haskell2010 + library + import: + ^ + exposed-modules: IDE.Plugin.Cabal + build-depends: base ^>=4.14.3.0 + hs-source-dirs: src + default-language: Haskell2010 -common notForLib - default-language: GHC2021 + common notForLib + default-language: GHC2021 -test-suite tests - import: - ^ + test-suite tests + import: + ^ |] diff --git a/plugins/hls-cabal-plugin/test/Context.hs b/plugins/hls-cabal-plugin/test/Context.hs index 8e6176bc5b..00d13b08f8 100644 --- a/plugins/hls-cabal-plugin/test/Context.hs +++ b/plugins/hls-cabal-plugin/test/Context.hs @@ -217,93 +217,93 @@ getContextTests = -- ------------------------------------------------------------------------ libraryStanzaData :: T.Text -libraryStanzaData = [trimming| -cabal-version: 3.0 -name: simple-cabal -library - default-language: Haskell98 - build-depends: +libraryStanzaData = [__i| + cabal-version: 3.0 + name: simple-cabal + library + default-language: Haskell98 + build-depends: -ma + ma |] executableStanzaData :: T.Text -executableStanzaData = [trimming| -cabal-version: 3.0 -name: simple-cabal -executable exeName - default-language: Haskell2010 - hs-source-dirs: test/preprocessor +executableStanzaData = [__i| + cabal-version: 3.0 + name: simple-cabal + executable exeName + default-language: Haskell2010 + hs-source-dirs: test/preprocessor |] topLevelData :: T.Text -topLevelData = [trimming| -cabal-version: 3.0 -name: +topLevelData = [__i| + cabal-version: 3.0 + name: - eee + eee |] conditionalData :: T.Text -conditionalData = [trimming| -cabal-version: 3.0 -name: simple-cabal -library - if os(windows) - buildable: - elif os(linux) - buildable: - else - buildable: +conditionalData = [__i| + cabal-version: 3.0 + name: simple-cabal + library + if os(windows) + buildable: + elif os(linux) + buildable: + else + buildable: |] multiLineOptsData :: T.Text -multiLineOptsData = [trimming| -cabal-version: 3.0 -name: +multiLineOptsData = [__i| + cabal-version: 3.0 + name: -library - build-depends: - base, + library + build-depends: + base, - text , + text , |] multiPositionTestData :: T.Text -multiPositionTestData = [trimming| -cabal-version: 3.4 - ^ ^ -category: Development -^ -name: haskell-language-server -description: - Please see the README on GitHub at +multiPositionTestData = [__i| + cabal-version: 3.4 + ^ ^ + category: Development ^ -extra-source-files: - README.md - ChangeLog.md - test/testdata/**/*.project - test/testdata/**/*.cabal - test/testdata/**/*.yaml - test/testdata/**/*.hs - test/testdata/**/*.json - ^ - -- These globs should only match test/testdata - plugins/**/*.project + name: haskell-language-server + description: + Please see the README on GitHub at + ^ + extra-source-files: + README.md + ChangeLog.md + test/testdata/**/*.project + test/testdata/**/*.cabal + test/testdata/**/*.yaml + test/testdata/**/*.hs + test/testdata/**/*.json + ^ + -- These globs should only match test/testdata + plugins/**/*.project -source-repository head - ^ ^ ^ - type: git - ^ ^ ^ ^ - location: https://github.com/haskell/haskell-language-server + source-repository head + ^ ^ ^ + type: git + ^ ^ ^ ^ + location: https://github.com/haskell/haskell-language-server - ^ -common cabalfmt + ^ + common cabalfmt - ^ - build-depends: haskell-language-server:hls-cabal-fmt-plugin - ^ ^ - cpp-options: -Dhls_cabalfmt + ^ + build-depends: haskell-language-server:hls-cabal-fmt-plugin + ^ ^ + cpp-options: -Dhls_cabalfmt |] diff --git a/plugins/hls-cabal-plugin/test/Main.hs b/plugins/hls-cabal-plugin/test/Main.hs index 43794e753d..5570598a37 100644 --- a/plugins/hls-cabal-plugin/test/Main.hs +++ b/plugins/hls-cabal-plugin/test/Main.hs @@ -292,7 +292,7 @@ reloadOnCabalChangeTests = testGroup "Reload on .cabal changes" cabalDoc <- openDoc "simple-reload.cabal" "cabal" skipManyTill anyMessage cabalKickDone saveDoc cabalDoc - [trimming| + [__i| cabal-version: 3.4 name: simple-reload version: 0.1.0.0 From 1263b9f843c33728a6a64435f06815186bbed82a Mon Sep 17 00:00:00 2001 From: soulomoon Date: Mon, 1 Sep 2025 23:31:22 +0800 Subject: [PATCH 72/87] Refactor garbage collection logic and improve logging in Shake module --- ghcide/src/Development/IDE/Core/FileStore.hs | 2 +- ghcide/src/Development/IDE/Core/OfInterest.hs | 13 +- .../Development/IDE/Core/ProgressReporting.hs | 45 ++- ghcide/src/Development/IDE/Core/Shake.hs | 286 +++++++++++------- ghcide/src/Development/IDE/Plugin/Test.hs | 5 - ghcide/src/Development/IDE/Types/Action.hs | 10 +- .../src/Development/IDE/Graph/Database.hs | 6 +- .../IDE/Graph/Internal/Database.hs | 87 +++++- .../Development/IDE/Graph/Internal/Types.hs | 6 + hls-graph/test/ActionSpec.hs | 9 +- hls-test-utils/src/Development/IDE/Test.hs | 5 +- 11 files changed, 329 insertions(+), 145 deletions(-) diff --git a/ghcide/src/Development/IDE/Core/FileStore.hs b/ghcide/src/Development/IDE/Core/FileStore.hs index eb7b459d93..2cde86713f 100644 --- a/ghcide/src/Development/IDE/Core/FileStore.hs +++ b/ghcide/src/Development/IDE/Core/FileStore.hs @@ -297,7 +297,7 @@ typecheckParentsAction recorder nfp = do case revs of Nothing -> logWith recorder Info $ LogCouldNotIdentifyReverseDeps nfp Just rs -> do - logWith recorder Info $ LogTypeCheckingReverseDeps nfp revs + logWith recorder L.Debug $ LogTypeCheckingReverseDeps nfp revs void $ uses GetModIface rs -- | Note that some keys have been modified and restart the session diff --git a/ghcide/src/Development/IDE/Core/OfInterest.hs b/ghcide/src/Development/IDE/Core/OfInterest.hs index 19e0f40e24..abdc224898 100644 --- a/ghcide/src/Development/IDE/Core/OfInterest.hs +++ b/ghcide/src/Development/IDE/Core/OfInterest.hs @@ -57,10 +57,6 @@ instance Pretty Log where pretty = \case LogShake msg -> pretty msg -newtype OfInterestVar = OfInterestVar (Var (HashMap NormalizedFilePath FileOfInterestStatus)) - -instance IsIdeGlobal OfInterestVar - -- | The rule that initialises the files of interest state. ofInterestRules :: Recorder (WithPriority Log) -> Rules () ofInterestRules recorder = do @@ -79,9 +75,6 @@ ofInterestRules recorder = do summarize (IsFOI (Modified False)) = BS.singleton 2 summarize (IsFOI (Modified True)) = BS.singleton 3 ------------------------------------------------------------- -newtype GarbageCollectVar = GarbageCollectVar (Var Bool) -instance IsIdeGlobal GarbageCollectVar ------------------------------------------------------------ -- Exposed API @@ -154,10 +147,6 @@ kick = do liftIO $ progressUpdate progress ProgressCompleted - GarbageCollectVar var <- getIdeGlobalAction - garbageCollectionScheduled <- liftIO $ readVar var - when garbageCollectionScheduled $ do - void garbageCollectDirtyKeys - liftIO $ writeVar var False signal (Proxy @"kick/done") + diff --git a/ghcide/src/Development/IDE/Core/ProgressReporting.hs b/ghcide/src/Development/IDE/Core/ProgressReporting.hs index 3d8a2bf989..20dfbe9e69 100644 --- a/ghcide/src/Development/IDE/Core/ProgressReporting.hs +++ b/ghcide/src/Development/IDE/Core/ProgressReporting.hs @@ -39,7 +39,11 @@ import Language.LSP.Server (ProgressAmount (..), ProgressCancellable (..), withProgress) import qualified Language.LSP.Server as LSP +import qualified ListT as L import qualified StmContainers.Map as STM +import qualified StmContainers.Set as S +import qualified StmContainers.Set as Set +import StmContainers.Set import UnliftIO (Async, async, bracket, cancel) data ProgressEvent @@ -124,24 +128,25 @@ updateState _ StopProgress st = pure st data InProgressState = InProgressState { -- | Number of files to do - todoVar :: TVar Int, + todoVar :: TVar Int, -- | Number of files done - doneVar :: TVar Int, - currentVar :: STM.Map NormalizedFilePath Int + doneVar :: TVar Int, + currentVar :: STM.Map NormalizedFilePath Int, + workingFileVar :: S.Set NormalizedFilePath } newInProgress :: IO InProgressState -newInProgress = InProgressState <$> newTVarIO 0 <*> newTVarIO 0 <*> STM.newIO +newInProgress = InProgressState <$> newTVarIO 0 <*> newTVarIO 0 <*> STM.newIO <*> newIO recordProgress :: InProgressState -> NormalizedFilePath -> (Int -> Int) -> IO () recordProgress InProgressState {..} file shift = do (prev, new) <- atomicallyNamed "recordProgress" $ STM.focus alterPrevAndNew file currentVar atomicallyNamed "recordProgress2" $ case (prev, new) of (Nothing, 0) -> modifyTVar' doneVar (+ 1) >> modifyTVar' todoVar (+ 1) - (Nothing, _) -> modifyTVar' todoVar (+ 1) + (Nothing, _) -> modifyTVar' todoVar (+ 1) >> S.insert file workingFileVar (Just 0, 0) -> pure () (Just 0, _) -> modifyTVar' doneVar pred - (Just _, 0) -> modifyTVar' doneVar (+ 1) + (Just _, 0) -> modifyTVar' doneVar (+ 1) >> S.delete file workingFileVar (Just _, _) -> pure () where alterPrevAndNew = do @@ -158,16 +163,18 @@ recordProgress InProgressState {..} file shift = do progressReportingNoTrace :: STM Int -> STM Int -> + STM (Maybe T.Text)-> Maybe (LSP.LanguageContextEnv c) -> T.Text -> ProgressReportingStyle -> IO ProgressReporting -progressReportingNoTrace _ _ Nothing _title _optProgressStyle = return noProgressReporting -progressReportingNoTrace todo done (Just lspEnv) title optProgressStyle = do +progressReportingNoTrace _ _ _ Nothing _title _optProgressStyle = return noProgressReporting +progressReportingNoTrace todo done mf (Just lspEnv) title optProgressStyle = do progressState <- newVar NotStarted - let _progressUpdate event = liftIO $ updateStateVar $ Event event + let _progressUpdate event = do + liftIO $ updateStateVar $ Event event _progressStop = updateStateVar StopProgress - updateStateVar = modifyVar_ progressState . updateState (progressCounter lspEnv title optProgressStyle todo done) + updateStateVar = modifyVar_ progressState . updateState (progressCounter lspEnv title optProgressStyle todo done mf) return ProgressReporting {..} -- | `progressReporting` initiates a new progress reporting session. @@ -182,12 +189,18 @@ progressReporting Nothing _title _optProgressStyle = noPerFileProgressReporting progressReporting (Just lspEnv) title optProgressStyle = do inProgressState <- newInProgress progressReportingInner <- progressReportingNoTrace (readTVar $ todoVar inProgressState) - (readTVar $ doneVar inProgressState) (Just lspEnv) title optProgressStyle + (readTVar $ doneVar inProgressState) (getFile $ workingFileVar inProgressState) (Just lspEnv) title optProgressStyle let inProgress :: NormalizedFilePath -> IO a -> IO a inProgress = updateStateForFile inProgressState return PerFileProgressReporting {..} where + getFile :: Set.Set NormalizedFilePath -> STM (Maybe T.Text) + getFile set = do + let lst = S.listT set + x <- L.head lst + return (T.pack . fromNormalizedFilePath <$> x) + updateStateForFile inProgress file = UnliftIO.bracket (liftIO $ f succ) (const $ liftIO $ f pred) . const where -- This functions are deliberately eta-expanded to avoid space leaks. @@ -203,23 +216,25 @@ progressCounter :: ProgressReportingStyle -> STM Int -> STM Int -> + STM (Maybe T.Text)-> IO () -progressCounter lspEnv title optProgressStyle getTodo getDone = +progressCounter lspEnv title optProgressStyle getTodo getDone mf = LSP.runLspT lspEnv $ withProgress title Nothing NotCancellable $ \update -> loop update 0 where loop _ _ | optProgressStyle == NoProgress = forever $ liftIO $ threadDelay maxBound loop update prevPct = do - (todo, done, nextPct) <- liftIO $ atomically $ do + (todo, done, nextPct,file) <- liftIO $ atomically $ do todo <- getTodo done <- getDone + file <- mf let nextFrac :: Double nextFrac = if todo == 0 then 0 else fromIntegral done / fromIntegral todo nextPct :: UInt nextPct = floor $ 100 * nextFrac when (nextPct == prevPct) retry - pure (todo, done, nextPct) + pure (todo, done, nextPct, file) - _ <- update (ProgressAmount (Just nextPct) (Just $ T.pack $ show done <> "/" <> show todo)) + _ <- update (ProgressAmount (Just nextPct) (Just $ (T.pack $ show done) <> "/" <> (T.pack $ show todo) <> maybe mempty (":" <>) file)) loop update nextPct mRunLspT :: (Applicative m) => Maybe (LSP.LanguageContextEnv c) -> LSP.LspT c m () -> m () diff --git a/ghcide/src/Development/IDE/Core/Shake.hs b/ghcide/src/Development/IDE/Core/Shake.hs index 60bfd6f165..53c9939ab2 100644 --- a/ghcide/src/Development/IDE/Core/Shake.hs +++ b/ghcide/src/Development/IDE/Core/Shake.hs @@ -24,7 +24,9 @@ module Development.IDE.Core.Shake( IdeState, shakeSessionInit, shakeExtras, shakeDb, rootDir, ShakeExtras(..), getShakeExtras, getShakeExtrasRules, - KnownTargets(..), Target(..), toKnownFiles, unionKnownTargets, mkKnownTargets, + KnownTargets(..), Target(..), GarbageCollectVar(..), + OfInterestVar(..), + toKnownFiles, unionKnownTargets, mkKnownTargets, IdeRule, IdeResult, GetModificationTime(GetModificationTime, GetModificationTime_, missingFileDiagnostics), shakeOpen, shakeShut, @@ -71,8 +73,6 @@ module Development.IDE.Core.Shake( HieDb, HieDbWriter(..), addPersistentRule, - garbageCollectDirtyKeys, - garbageCollectDirtyKeysOlderThan, Log(..), VFSModified(..), getClientConfigAction, ThreadQueue(..), @@ -81,104 +81,119 @@ module Development.IDE.Core.Shake( import Control.Concurrent.Async import Control.Concurrent.STM -import Control.Concurrent.STM.Stats (atomicallyNamed) +import Control.Concurrent.STM.Stats (atomicallyNamed) import Control.Concurrent.Strict import Control.DeepSeq -import Control.Exception.Extra hiding (bracket_) -import Control.Lens ((%~), (&), (?~)) +import Control.Exception.Extra hiding (bracket_) +import Control.Lens ((%~), (&), (?~)) import Control.Monad.Extra import Control.Monad.IO.Class import Control.Monad.Reader import Control.Monad.Trans.Maybe -import Data.Aeson (Result (Success), - toJSON) -import qualified Data.Aeson.Types as A -import qualified Data.ByteString.Char8 as BS -import qualified Data.ByteString.Char8 as BS8 -import Data.Coerce (coerce) +import Data.Aeson (Result (Success), + toJSON) +import qualified Data.Aeson.Types as A +import qualified Data.ByteString.Char8 as BS +import qualified Data.ByteString.Char8 as BS8 +import Data.Coerce (coerce) import Data.Default import Data.Dynamic -import Data.EnumMap.Strict (EnumMap) -import qualified Data.EnumMap.Strict as EM -import Data.Foldable (find, for_) -import Data.Functor ((<&>)) +import Data.EnumMap.Strict (EnumMap) +import qualified Data.EnumMap.Strict as EM +import Data.Foldable (find, for_) +import Data.Functor ((<&>)) import Data.Functor.Identity import Data.Hashable -import qualified Data.HashMap.Strict as HMap -import Data.HashSet (HashSet) -import qualified Data.HashSet as HSet -import Data.List.Extra (foldl', partition, - takeEnd) -import qualified Data.Map.Strict as Map +import qualified Data.HashMap.Strict as HMap +import Data.HashSet (HashSet) +import qualified Data.HashSet as HSet +import Data.List.Extra (foldl', partition, + takeEnd) +import qualified Data.Map.Strict as Map import Data.Maybe -import qualified Data.SortedList as SL -import Data.String (fromString) -import qualified Data.Text as T +import qualified Data.SortedList as SL +import Data.String (fromString) +import qualified Data.Text as T import Data.Time import Data.Traversable import Data.Tuple.Extra import Data.Typeable import Data.Unique -import Data.Vector (Vector) -import qualified Data.Vector as Vector +import Data.Vector (Vector) +import qualified Data.Vector as Vector import Development.IDE.Core.Debouncer -import Development.IDE.Core.FileUtils (getModTime) +import Development.IDE.Core.FileUtils (getModTime) import Development.IDE.Core.PositionMapping import Development.IDE.Core.ProgressReporting import Development.IDE.Core.RuleTypes -import Development.IDE.Types.Options as Options -import qualified Language.LSP.Protocol.Message as LSP -import qualified Language.LSP.Server as LSP - -import Control.Concurrent (threadDelay) +import Development.IDE.Types.Options as Options +import qualified Language.LSP.Protocol.Message as LSP +import qualified Language.LSP.Server as LSP + +import Control.Concurrent (threadDelay) +import Control.Concurrent.Extra (readVar) +import Control.Monad (forever) +import Data.HashMap.Strict (HashMap) +import qualified Data.HashMap.Strict as HashMap +import Data.Int (Int64) +import Data.IORef.Extra (atomicModifyIORef'_, + readIORef) +import Data.Text.Encoding (encodeUtf8) import Development.IDE.Core.Tracing import Development.IDE.Core.WorkerThread -import Development.IDE.GHC.Compat (NameCache, - NameCacheUpdater, - initNameCache, - knownKeyNames) -import Development.IDE.GHC.Orphans () -import Development.IDE.Graph hiding (ShakeValue, - action) -import qualified Development.IDE.Graph as Shake -import Development.IDE.Graph.Database (ShakeDatabase, - shakeGetBuildStep, - shakeGetDatabaseKeys, - shakeNewDatabase, - shakeProfileDatabase, - shakeRunDatabaseForKeys) +import Development.IDE.GHC.Compat (NameCache, + NameCacheUpdater, + initNameCache, + knownKeyNames) +import Development.IDE.GHC.Orphans () +import Development.IDE.Graph hiding (ShakeValue, + action) +import qualified Development.IDE.Graph as Shake +import Development.IDE.Graph.Database (ShakeDatabase, + shakeGetBuildStep, + shakeGetDatabaseKeys, + shakeNewDatabase, + shakeProfileDatabase, + shakeRunDatabaseForKeys) +import Development.IDE.Graph.Internal.Database (garbageCollectKeys, + garbageCollectKeys1) +import Development.IDE.Graph.Internal.Types (Database) import Development.IDE.Graph.Rule import Development.IDE.Types.Action +import Development.IDE.Types.Action (isActionQueueEmpty) import Development.IDE.Types.Diagnostics -import Development.IDE.Types.Exports hiding (exportsMapSize) -import qualified Development.IDE.Types.Exports as ExportsMap +import Development.IDE.Types.Exports hiding (exportsMapSize) +import qualified Development.IDE.Types.Exports as ExportsMap import Development.IDE.Types.KnownTargets import Development.IDE.Types.Location -import Development.IDE.Types.Monitoring (Monitoring (..)) +import Development.IDE.Types.Monitoring (Monitoring (..)) import Development.IDE.Types.Shake import qualified Focus +import GHC.Base (undefined) import GHC.Fingerprint -import GHC.Stack (HasCallStack) -import GHC.TypeLits (KnownSymbol) +import GHC.Stack (HasCallStack) +import GHC.TypeLits (KnownSymbol) import HieDb.Types -import Ide.Logger hiding (Priority) -import qualified Ide.Logger as Logger +import Ide.Logger hiding (Priority) +import qualified Ide.Logger as Logger import Ide.Plugin.Config -import qualified Ide.PluginUtils as HLS +import qualified Ide.PluginUtils as HLS import Ide.Types -import qualified Language.LSP.Protocol.Lens as L +import Ide.Types (CheckParents (CheckOnSave)) +import qualified Language.LSP.Protocol.Lens as L import Language.LSP.Protocol.Message import Language.LSP.Protocol.Types -import qualified Language.LSP.Protocol.Types as LSP -import Language.LSP.VFS hiding (start) +import qualified Language.LSP.Protocol.Types as LSP +import Language.LSP.VFS hiding (start) import qualified "list-t" ListT -import OpenTelemetry.Eventlog hiding (addEvent) -import qualified Prettyprinter as Pretty -import qualified StmContainers.Map as STM -import System.FilePath hiding (makeRelative) -import System.IO.Unsafe (unsafePerformIO) +import OpenTelemetry.Eventlog hiding (addEvent) +import qualified Prettyprinter as Pretty +import qualified StmContainers.Map as STM +import System.FilePath hiding (makeRelative) +import System.IO.Unsafe (unsafePerformIO) import System.Time.Extra -import UnliftIO (MonadUnliftIO (withRunInIO)) +import UnliftIO (MonadUnliftIO (withRunInIO), + newIORef) data Log @@ -194,13 +209,16 @@ data Log | LogCancelledAction !T.Text | LogSessionInitialised | LogLookupPersistentKey !T.Text - | LogShakeGarbageCollection !T.Text !Int !Seconds + | LogShakeGarbageCollection !T.Text ![Key] !Seconds -- * OfInterest Log messages | LogSetFilesOfInterest ![(NormalizedFilePath, FileOfInterestStatus)] + | LogMonitering !T.Text !Int64 deriving Show instance Pretty Log where pretty = \case + LogMonitering name value -> + "Monitoring:" <+> pretty name <+> "value:" <+> pretty value LogCreateHieDbExportsMapStart -> "Initializing exports map from hiedb" LogCreateHieDbExportsMapFinish exportsMapSize -> @@ -235,8 +253,10 @@ instance Pretty Log where LogSessionInitialised -> "Shake session initialized" LogLookupPersistentKey key -> "LOOKUP PERSISTENT FOR:" <+> pretty key - LogShakeGarbageCollection label number duration -> - pretty label <+> "of" <+> pretty number <+> "keys (took " <+> pretty (showDuration duration) <> ")" + LogShakeGarbageCollection label victims duration -> + "ShakeGarbageCollect" <+> pretty (showDuration duration) <+> ", reson" <+> pretty label + <+> "removed" <+> pretty (length victims) <+> "keys" + <> hang 2 (pretty (map show victims)) LogSetFilesOfInterest ofInterest -> "Set files of interst to" <> Pretty.line <> indent 4 (pretty $ fmap (first fromNormalizedFilePath) ofInterest) @@ -388,6 +408,8 @@ addPersistentRule k getVal = do void $ liftIO $ atomically $ modifyTVar' persistentKeys $ insertKeyMap (newKey k) (fmap (fmap (first3 toDyn)) . getVal) class Typeable a => IsIdeGlobal a where +newtype OfInterestVar = OfInterestVar (Var (HashMap NormalizedFilePath FileOfInterestStatus)) +instance IsIdeGlobal OfInterestVar -- | Read a virtual file from the current snapshot getVirtualFile :: NormalizedFilePath -> Action (Maybe VirtualFile) @@ -658,7 +680,7 @@ shakeOpen :: Recorder (WithPriority Log) shakeOpen recorder lspEnv defaultConfig idePlugins debouncer shakeProfileDir (IdeReportProgress reportProgress) ideTesting - withHieDb threadQueue opts monitoring rules rootDir = mdo + withHieDb threadQueue opts argMonitoring rules rootDir = mdo -- see Note [Serializing runs in separate thread] let indexQueue = tIndexQueue threadQueue restartQueue = tRestartQueue threadQueue @@ -680,8 +702,8 @@ shakeOpen recorder lspEnv defaultConfig idePlugins debouncer indexCompleted <- newTVarIO 0 semanticTokensId <- newTVarIO 0 indexProgressReporting <- progressReportingNoTrace - (liftM2 (+) (length <$> readTVar indexPending) (readTVar indexCompleted)) - (readTVar indexCompleted) + (liftM2 (+) (length <$> readTVar indexPending) (readTVar indexCompleted) ) + (readTVar indexCompleted) (pure $ Nothing) lspEnv "Indexing" optProgressStyle let hiedbWriter = HieDbWriter{..} exportsMap <- newTVarIO mempty @@ -717,6 +739,8 @@ shakeOpen recorder lspEnv defaultConfig idePlugins debouncer checkParents <- optCheckParents + logMonitoring <- newLogMonitoring recorder + let monitoring = logMonitoring <> argMonitoring -- monitoring let readValuesCounter = fromIntegral . countRelevantKeys checkParents <$> getStateKeys shakeExtras readDirtyKeys = return 0 @@ -724,6 +748,9 @@ shakeOpen recorder lspEnv defaultConfig idePlugins debouncer readExportsMap = fromIntegral . ExportsMap.exportsMapSize <$> readTVarIO (exportsMap shakeExtras) readDatabaseCount = fromIntegral . countRelevantKeys checkParents . map fst <$> shakeGetDatabaseKeys shakeDb readDatabaseStep = fromIntegral <$> shakeGetBuildStep shakeDb + readNumActionsRunning = fromIntegral . length <$> atomically (peekInProgress $ actionQueue shakeExtras) + readIsActionQueueEmpty = let boolToInt b = if b then 1 else 0 + in boolToInt <$> atomically (isActionQueueEmpty $ actionQueue shakeExtras) registerGauge monitoring "ghcide.values_count" readValuesCounter registerGauge monitoring "ghcide.dirty_keys_count" readDirtyKeys @@ -731,12 +758,30 @@ shakeOpen recorder lspEnv defaultConfig idePlugins debouncer registerGauge monitoring "ghcide.exports_map_count" readExportsMap registerGauge monitoring "ghcide.database_count" readDatabaseCount registerCounter monitoring "ghcide.num_builds" readDatabaseStep + registerCounter monitoring "ghcide.num_actions_runnning" readNumActionsRunning + registerCounter monitoring "ghcide.isActionQueueEmpty" readIsActionQueueEmpty stopMonitoring <- start monitoring let ideState = IdeState{..} return ideState +newLogMonitoring :: MonadIO m => Recorder (WithPriority Log) -> m Monitoring +newLogMonitoring logger = do + actions <- newIORef [] + let registerCounter name readA = do + let update = do + val <- readA + logWith logger Info $ LogMonitering name (fromIntegral val) + atomicModifyIORef'_ actions (update :) + registerGauge = registerCounter + let start = do + a <- regularly 10 $ sequence_ =<< readIORef actions + return (cancel a) + return Monitoring{..} + where + regularly :: Seconds -> IO () -> IO (Async ()) + regularly delay act = async $ forever (act >> sleep delay) getStateKeys :: ShakeExtras -> IO [Key] getStateKeys = (fmap.fmap) fst . atomically . ListT.toList . STM.listT . stateValues @@ -829,7 +874,7 @@ shakeEnqueue ShakeExtras{actionQueue, shakeRecorder} act = do atomicallyNamed "actionQueue - push" $ pushQueue dai actionQueue let wait' barrier = waitBarrier barrier `catches` - [ Handler(\BlockedIndefinitelyOnMVar -> + [ Handler (\BlockedIndefinitelyOnMVar -> fail $ "internal bug: forever blocked on MVar for " <> actionName act) , Handler (\e@AsyncCancelled -> do @@ -842,6 +887,17 @@ shakeEnqueue ShakeExtras{actionQueue, shakeRecorder} act = do data VFSModified = VFSUnmodified | VFSModified !VFS +------------------------------------------------------------ +newtype GarbageCollectVar = GarbageCollectVar (Var Bool) +instance IsIdeGlobal GarbageCollectVar + + +getFilesOfInterest :: ShakeExtras -> IO [NormalizedFilePath] +getFilesOfInterest state = do + OfInterestVar var <- getIdeGlobalExtras state + mm <- readVar var + return $ map fst $ HashMap.toList mm + -- | Set up a new 'ShakeSession' with a set of initial actions -- Will crash if there is an existing 'ShakeSession' running. -- newSession @@ -860,6 +916,8 @@ newSession recorder extras@ShakeExtras{..} vfsMod shakeDb acts reason newDirtyKe VFSModified vfs -> atomically $ writeTVar vfsVar vfs IdeOptions{optRunSubset} <- getIdeOptionsIO extras + isActionQueueEmpty <- fmap ((&&) (null acts)) $ atomicallyNamed "actionQueue - is empty" $ isActionQueueEmpty actionQueue + reenqueued <- atomicallyNamed "actionQueue - peek" $ peekInProgress actionQueue let -- A daemon-like action used to inject additional work @@ -867,21 +925,6 @@ newSession recorder extras@ShakeExtras{..} vfsMod shakeDb acts reason newDirtyKe pumpActionThread otSpan = do d <- liftIO $ atomicallyNamed "action queue - pop" $ popQueue actionQueue actionFork (run otSpan d) $ \_ -> pumpActionThread otSpan - -- - -- garbageCollect = do - -- previousNumber <- countQueue actionQueue - -- liftIO $ threadDelay 2_000_000 - -- currentNumber <- countQueue actionQueue - -- if previousNumber + currentNumber == 0 - -- then do - -- logWith recorder Debug LogGarbageCollectingActionQueue - -- -- If the queue is empty, we can garbage collect it - -- -- This will remove all actions that are not running - -- atomicallyNamed "actionQueue - garbage collect" $ garbageCollectQueue actionQueue - -- else do - -- logWith recorder Debug LogGarbageCollectingActionQueueSkipped - - -- TODO figure out how to thread the otSpan into defineEarlyCutoff run _otSpan d = do @@ -900,6 +943,35 @@ newSession recorder extras@ShakeExtras{..} vfsMod shakeDb acts reason newDirtyKe let keysActs = pumpActionThread otSpan : map (run otSpan) (reenqueued ++ acts) res <- try @SomeException $ restore $ shakeRunDatabaseForKeys (if optRunSubset then newDirtyKeys else Nothing) shakeDb keysActs + -- We only do garbage collection if the action queue is empty + -- and if it has been scheduled + $ \db -> do + GarbageCollectVar var <- getIdeGlobalExtras extras + -- checkParentsOpt <- optCheckParents =<< getIdeOptionsIO extras + isGarbageCollectionScheduled <- readVar var + when (isActionQueueEmpty && isGarbageCollectionScheduled) $ do + -- reset garbage collection flag + liftIO $ writeVar var False + start <- offsetTime + -- todo do not remove keys that have FOI as its reverse deps. + -- top level + foiFiles <- getFilesOfInterest extras + -- We find a list of keys that are FOI and their dependencies, + -- and mark them as "needed". Then we delete all dirty keys not marked as needed. + let isFoiRules :: Key -> Bool + isFoiRules k = case fromKeyType k of + Just (_, path) | path `elem` foiFiles || path == "" -> True + _ -> False + + -- victims <- garbageCollectKeys db (isRelevantKey checkParentsOpt) + victims <- garbageCollectKeys1 db isFoiRules + -- also remove the keys from the stateValues map + (mapM_ $ \k -> STM.focus Focus.delete k stateValues) + when (coerce ideTesting) $ liftIO $ mRunLspT lspEnv $ + LSP.sendNotification (SMethod_CustomMethod (Proxy @"ghcide/GC")) + (toJSON $ mapMaybe (fmap (show . Q) . fromKeyType) victims) + runTime <- liftIO start + logWith recorder Info $ LogShakeGarbageCollection (T.pack reason) victims runTime return $ do let exception = case res of @@ -954,24 +1026,26 @@ getHiddenDiagnostics IdeState{shakeExtras = ShakeExtras{hiddenDiagnostics}} = do -- * position mapping store -- * indexing queue -- * exports map -garbageCollectDirtyKeys :: Action [Key] -garbageCollectDirtyKeys = do - IdeOptions{optCheckParents} <- getIdeOptions - checkParents <- liftIO optCheckParents - garbageCollectDirtyKeysOlderThan 0 checkParents - -garbageCollectDirtyKeysOlderThan :: Int -> CheckParents -> Action [Key] -garbageCollectDirtyKeysOlderThan maxAge checkParents = otTracedGarbageCollection "dirty GC" $ do - return [] - -- dirtySet <- getDirtySet - -- garbageCollectKeys "dirty GC" maxAge checkParents dirtySet - --- garbageCollectKeys :: String -> Int -> CheckParents -> [(Key, Int)] -> Action [Key] --- garbageCollectKeys label maxAge checkParents agedKeys = do +-- garbageCollectDirtyKeys :: Action [Key] +-- garbageCollectDirtyKeys = do +-- IdeOptions{optCheckParents} <- getIdeOptions +-- checkParents <- liftIO optCheckParents +-- garbageCollectDirtyKeysOlderThan TVar KeySet 0 checkParents + +-- garbageCollectDirtyKeysOlderThan :: TVar KeySet -> Int -> CheckParents -> Action [Key] +-- garbageCollectDirtyKeysOlderThan dirtyKeys maxAge checkParents = otTracedGarbageCollection "dirty GC" $ do +-- dirtySet <- getDirtySet +-- garbageCollectKeys dirtyKeys "dirty GC" maxAge checkParents dirtySet + +-- garbageCollectKeys :: Database -> String -> Int -> CheckParents -> [(Key, Int)] -> Action [Key] +-- garbageCollectKeys = undefined +-- garbageCollectKeys dirtyKeys label maxAge checkParents agedKeys = do -- start <- liftIO offsetTime --- ShakeExtras{state, lspEnv, shakeRecorder, ideTesting} <- getShakeExtras +-- ShakeExtras{stateValues, lspEnv, shakeRecorder, ideTesting} <- getShakeExtras -- (n::Int, garbage) <- liftIO $ --- foldM (removeDirtyKey dirtyKeys state) (0,[]) agedKeys +-- foldM (removeDirtyKey dirtyKeys stateValues) (0,[]) agedKeys +-- -- let n = 0 +-- -- let garbage = [] -- t <- liftIO start -- when (n>0) $ liftIO $ do -- logWith shakeRecorder Debug $ LogShakeGarbageCollection (T.pack label) n t @@ -997,8 +1071,15 @@ countRelevantKeys :: CheckParents -> [Key] -> Int countRelevantKeys checkParents = Prelude.length . filter (maybe False (not . (`HSet.member` preservedKeys checkParents) . fst) . fromKeyType) +-- A key is relevant if it is not in the preserved set +-- i.e. it is a key that can be garbage collected +isRelevantKey :: CheckParents -> Key -> Bool +isRelevantKey p k = maybe False (not . (`HSet.member` preservedKeys p) . fst) (fromKeyType k) + preservedKeys :: CheckParents -> HashSet TypeRep preservedKeys checkParents = HSet.fromList $ + -- always preserved + -- always preserved -- always preserved [ typeOf GetFileExists , typeOf GetModificationTime @@ -1161,6 +1242,7 @@ usesWithStale key files = do traverse (lastValue key) files -- we use separate fingerprint rules to trigger the rebuild of the rule +-- fingerKey should depend on the key, so we can use it to trigger a rebuild useWithSeparateFingerprintRule :: (IdeRule k v, IdeRule k1 Fingerprint) => k1 -> k -> NormalizedFilePath -> Action (Maybe v) diff --git a/ghcide/src/Development/IDE/Plugin/Test.hs b/ghcide/src/Development/IDE/Plugin/Test.hs index d99f61ddfd..a16f1a9781 100644 --- a/ghcide/src/Development/IDE/Plugin/Test.hs +++ b/ghcide/src/Development/IDE/Plugin/Test.hs @@ -53,7 +53,6 @@ import qualified "list-t" ListT import qualified StmContainers.Map as STM import System.Time.Extra -type Age = Int data TestRequest = BlockSeconds Seconds -- ^ :: Null | GetInterfaceFilesDir Uri -- ^ :: String @@ -64,7 +63,6 @@ data TestRequest | GetBuildKeysBuilt -- ^ :: [(String] | GetBuildKeysChanged -- ^ :: [(String] | GetBuildEdgesCount -- ^ :: Int - | GarbageCollectDirtyKeys CheckParents Age -- ^ :: [String] (list of keys collected) | GetStoredKeys -- ^ :: [String] (list of keys in store) | GetFilesOfInterest -- ^ :: [FilePath] | GetRebuildsCount -- ^ :: Int (number of times we recompiled with GHC) @@ -126,9 +124,6 @@ testRequestHandler s GetBuildKeysVisited = liftIO $ do testRequestHandler s GetBuildEdgesCount = liftIO $ do count <- shakeGetBuildEdges $ shakeDb s return $ Right $ toJSON count -testRequestHandler s (GarbageCollectDirtyKeys parents age) = do - res <- liftIO $ runAction "garbage collect dirty" s $ garbageCollectDirtyKeysOlderThan age parents - return $ Right $ toJSON $ map show res testRequestHandler s GetStoredKeys = do keys <- liftIO $ atomically $ map fst <$> ListT.toList (STM.listT $ stateValues $ shakeExtras s) return $ Right $ toJSON $ map show keys diff --git a/ghcide/src/Development/IDE/Types/Action.hs b/ghcide/src/Development/IDE/Types/Action.hs index 0aedd1d0da..225f5b603d 100644 --- a/ghcide/src/Development/IDE/Types/Action.hs +++ b/ghcide/src/Development/IDE/Types/Action.hs @@ -7,7 +7,9 @@ module Development.IDE.Types.Action popQueue, doneQueue, peekInProgress, - abortQueue,countQueue) + abortQueue, + countQueue, + isActionQueueEmpty) where import Control.Concurrent.STM @@ -86,3 +88,9 @@ countQueue ActionQueue{..} = do peekInProgress :: ActionQueue -> STM [DelayedActionInternal] peekInProgress ActionQueue {..} = Set.toList <$> readTVar inProgress + +isActionQueueEmpty :: ActionQueue -> STM Bool +isActionQueueEmpty ActionQueue {..} = do + emptyQueue <- isEmptyTQueue newActions + inProg <- Set.null <$> readTVar inProgress + return (emptyQueue && inProg) diff --git a/hls-graph/src/Development/IDE/Graph/Database.hs b/hls-graph/src/Development/IDE/Graph/Database.hs index bd8601cd16..902fe031d5 100644 --- a/hls-graph/src/Development/IDE/Graph/Database.hs +++ b/hls-graph/src/Development/IDE/Graph/Database.hs @@ -34,7 +34,7 @@ shakeNewDatabase opts rules = do pure $ ShakeDatabase (length actions) actions db shakeRunDatabase :: ShakeDatabase -> [Action a] -> IO [a] -shakeRunDatabase = shakeRunDatabaseForKeys Nothing +shakeRunDatabase a b = shakeRunDatabaseForKeys Nothing a b (const $ pure ()) -- | Returns the set of dirty keys annotated with their age (in # of builds) shakeGetDirtySet :: ShakeDatabase -> IO [(Key, Int)] @@ -57,9 +57,11 @@ shakeRunDatabaseForKeys -- ^ Set of keys changed since last run. 'Nothing' means everything has changed -> ShakeDatabase -> [Action a] + -> (Database -> IO ()) -> IO [a] -shakeRunDatabaseForKeys keysChanged (ShakeDatabase lenAs1 as1 db) as2 = do +shakeRunDatabaseForKeys keysChanged (ShakeDatabase lenAs1 as1 db) as2 garbageCollect = do incDatabase db keysChanged + garbageCollect db fmap (drop lenAs1) $ runActions db $ map unvoid as1 ++ as2 -- | Given a 'ShakeDatabase', write an HTML profile to the given file about the latest run. diff --git a/hls-graph/src/Development/IDE/Graph/Internal/Database.hs b/hls-graph/src/Development/IDE/Graph/Internal/Database.hs index afe985bd88..78bf196572 100644 --- a/hls-graph/src/Development/IDE/Graph/Internal/Database.hs +++ b/hls-graph/src/Development/IDE/Graph/Internal/Database.hs @@ -8,7 +8,7 @@ {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} -module Development.IDE.Graph.Internal.Database (compute, newDatabase, incDatabase, build, getDirtySet, getKeysAndVisitAge) where +module Development.IDE.Graph.Internal.Database (compute, newDatabase, incDatabase, build, getDirtySet, getKeysAndVisitAge, incDatabaseRaw, garbageCollectKeys, garbageCollectKeys1) where import Prelude hiding (unzip) @@ -57,6 +57,72 @@ newDatabase databaseExtra databaseRules = do databaseDirtyKeys <- atomically SSet.new pure Database{..} +incDatabaseRaw :: Database -> IO () +incDatabaseRaw db = + atomicallyNamed "incDatabaseRaw" $ do + modifyTVar' (databaseStep db) $ \(Step i) -> Step $ i + 1 + +garbageCollectKeys1 :: Database -> (Key -> Bool) -> ([Key] -> STM ()) -> IO [Key] +garbageCollectKeys1 db pred garbageCollectHook = do + -- GC policy: + -- We find a list of keys that are FOI and their dependencies, + -- and mark them as "needed". Then we delete all dirty keys not marked as needed. + let maxAge = 0 -- builds; tune as needed or make configurable upstream + -- on idle but still dirty keys + ks <- getKeysAndVisitAge db + let foiks = [ k | (k, _) <- ks, pred k ] + toKeep <- atomically $ transitiveSet db foiks + dirtyWithAge <- Development.IDE.Graph.Internal.Database.getDirtySet db + let victims = [k | (k, age) <- dirtyWithAge + , age >= maxAge + , not (k `memberKeySet` toKeep)] + unless (null victims) $ do + -- Delete victim keys and remove them from the dirty set + atomically $ do + forM_ victims $ \k -> do + SMap.focus cleanupDirty k (databaseValues db) + -- Remove the victim keys from reverse-dependency sets of remaining keys + let list = SMap.listT (databaseValues db) + ListT.traverse_ (\(k', _) -> + SMap.focus (Focus.adjust (onKeyReverseDeps (\ks -> foldr deleteKeySet ks victims))) k' (databaseValues db) + ) list + garbageCollectHook victims + pure () + return victims + +garbageCollectKeys :: Database -> (Key -> Bool) -> ([Key] -> STM ()) -> IO [Key] +garbageCollectKeys db pred garbageCollectHook = do + -- GC policy: + -- - Select dirty keys whose age >= maxAge and that satisfy the given predicate 'pred'. + -- - For each selected key (a victim), drop its previous result by setting its status to Dirty Nothing + -- and remove that key from every other key's reverse-dependency set. + -- - Finally, run the provided 'garbageCollectHook victims' within the same STM transaction. + let maxAge = 0 -- builds; tune as needed or make configurable upstream + -- on idle but still dirty keys + dirtyWithAge <- Development.IDE.Graph.Internal.Database.getDirtySet db + let victims = [k | (k, age) <- dirtyWithAge, age >= maxAge, pred k] + unless (null victims) $ do + -- Delete victim keys and remove them from the dirty set + atomically $ do + forM_ victims $ \k -> do + SMap.focus cleanupDirty k (databaseValues db) + -- Remove the victim keys from reverse-dependency sets of remaining keys + let list = SMap.listT (databaseValues db) + ListT.traverse_ (\(k', _) -> + SMap.focus (Focus.adjust (onKeyReverseDeps (\ks -> foldr deleteKeySet ks victims))) k' (databaseValues db) + ) list + garbageCollectHook victims + pure () + return victims + + +cleanupDirty :: Monad m => Focus.Focus KeyDetails m () +cleanupDirty = Focus.adjust $ \(KeyDetails status rdeps) -> + let status' + | Dirty _ <- status = Dirty Nothing + | otherwise = status + in KeyDetails status' rdeps + -- | Increment the step and mark dirty. -- Assumes that the database is not running a build incDatabase :: Database -> Maybe [Key] -> IO () @@ -298,6 +364,25 @@ transitiveDirtySet database = flip State.execStateT mempty . traverse_ loop next <- lift $ getReverseDependencies database x traverse_ loop (maybe mempty toListKeySet next) +getDependencies :: Database -> Key -> STM (Maybe KeySet) +getDependencies db k = do + m <- SMap.lookup k (databaseValues db) + pure $ do + KeyDetails st _ <- m + case getDeps st of + UnknownDeps -> Nothing + rd -> Just (getResultDepsDefault mempty rd) + +transitiveSet :: Foldable t => Database -> t Key -> STM KeySet +transitiveSet database = flip State.execStateT mempty . traverse_ loop + where + loop x = do + seen <- State.get + if x `memberKeySet` seen then pure () else do + State.put (insertKeySet x seen) + next <- lift $ getDependencies database x + traverse_ loop (maybe mempty toListKeySet next) + -------------------------------------------------------------------------------- -- Asynchronous computations with cancellation diff --git a/hls-graph/src/Development/IDE/Graph/Internal/Types.hs b/hls-graph/src/Development/IDE/Graph/Internal/Types.hs index cdf384f7f6..a99e817621 100644 --- a/hls-graph/src/Development/IDE/Graph/Internal/Types.hs +++ b/hls-graph/src/Development/IDE/Graph/Internal/Types.hs @@ -148,6 +148,12 @@ getResult (Clean re) = Just re getResult (Dirty m_re) = m_re getResult (Running _ _ _ m_re) = m_re -- watch out: this returns the previous result +getDeps :: Status -> ResultDeps +getDeps (Clean re) = resultDeps re +getDeps (Dirty (Just re)) = resultDeps re +getDeps (Dirty Nothing) = UnknownDeps +getDeps (Running _ _ re _) = resultDeps re + waitRunning :: Status -> IO () waitRunning Running{..} = runningWait waitRunning _ = return () diff --git a/hls-graph/test/ActionSpec.hs b/hls-graph/test/ActionSpec.hs index 97ab5555ac..3a0b8d6829 100644 --- a/hls-graph/test/ActionSpec.hs +++ b/hls-graph/test/ActionSpec.hs @@ -48,14 +48,17 @@ spec = do let parent = newKey CountRule -- instruct to RunDependenciesChanged then CountRule should be recomputed -- result should be changed 0, build 1 - _res1 <- shakeRunDatabaseForKeys (Just [child]) db [apply1 CountRule] -- count = 2 + _res1 <- shakeRunDatabaseForKeys (Just [child]) db [apply1 CountRule] $ \_ -> return () + -- count = 2 -- since child changed = parent build -- instruct to RunDependenciesSame then CountRule should not be recomputed -- result should be changed 0, build 1 - _res3 <- shakeRunDatabaseForKeys (Just [parent]) db [apply1 CountRule] -- count = 2 + _res3 <- shakeRunDatabaseForKeys (Just [parent]) db [apply1 CountRule] $ \_ -> return () + -- count = 2 -- invariant child changed = parent build should remains after RunDependenciesSame -- this used to be a bug, with additional computation, see https://github.com/haskell/haskell-language-server/pull/4238 - _res3 <- shakeRunDatabaseForKeys (Just [parent]) db [apply1 CountRule] -- count = 2 + _res3 <- shakeRunDatabaseForKeys (Just [parent]) db [apply1 CountRule] $ \_ -> return () + -- count = 2 c1 <- readMVar count1 c1 `shouldBe` 2 describe "apply1" $ do diff --git a/hls-test-utils/src/Development/IDE/Test.hs b/hls-test-utils/src/Development/IDE/Test.hs index a1bd2dec0e..70390ad118 100644 --- a/hls-test-utils/src/Development/IDE/Test.hs +++ b/hls-test-utils/src/Development/IDE/Test.hs @@ -25,7 +25,6 @@ module Development.IDE.Test , flushMessages , waitForAction , getInterfaceFilesDir - , garbageCollectDirtyKeys , getFilesOfInterest , waitForTypecheck , waitForBuildQueue @@ -218,8 +217,8 @@ waitForAction key TextDocumentIdentifier{_uri} = getInterfaceFilesDir :: TextDocumentIdentifier -> Session FilePath getInterfaceFilesDir TextDocumentIdentifier{_uri} = callTestPlugin (GetInterfaceFilesDir _uri) -garbageCollectDirtyKeys :: CheckParents -> Int -> Session [String] -garbageCollectDirtyKeys parents age = callTestPlugin (GarbageCollectDirtyKeys parents age) +-- garbageCollectDirtyKeys :: CheckParents -> Int -> Session [String] +-- garbageCollectDirtyKeys parents age = callTestPlugin (GarbageCollectDirtyKeys parents age) getStoredKeys :: Session [Text] getStoredKeys = callTestPlugin GetStoredKeys From 26aa9be0cf5602cf4e1fc90bde5505733e33c652 Mon Sep 17 00:00:00 2001 From: soulomoon Date: Mon, 1 Sep 2025 23:14:04 +0800 Subject: [PATCH 73/87] hls-graph: simplify AIO; scoped cancellation; fewer threads; safe cleanup - Replace ad-hoc AIO with structured concurrency (TVar + async registry); builder returns results directly; remove lazy splitIO/unsafePerformIO - Reduce redundant thread creation; use per-key builderOne and STM retry instead of spawning; fewer races - Add AsyncParentKill (ThreadId, Step) and treat it as async; use cancelWith from Shake to scope cancellation to the current session - Mask critical sections and do uninterruptible cleanup on exception (mark Dirty) to avoid stuck Running and hangs - Adjust types/wiring (Running payload, runAIO takes Step, compute/refresh signatures); minor tweaks in ghcide Shake/Plugin.Test Fixes #4718 --- ghcide/src/Development/IDE/Core/Shake.hs | 26 +-- ghcide/src/Development/IDE/Plugin/Test.hs | 4 +- .../src/Development/IDE/Graph/Database.hs | 9 +- .../Development/IDE/Graph/Internal/Action.hs | 2 + .../IDE/Graph/Internal/Database.hs | 220 +++++++----------- .../Development/IDE/Graph/Internal/Types.hs | 43 ++-- 6 files changed, 129 insertions(+), 175 deletions(-) diff --git a/ghcide/src/Development/IDE/Core/Shake.hs b/ghcide/src/Development/IDE/Core/Shake.hs index 53c9939ab2..99db32ae94 100644 --- a/ghcide/src/Development/IDE/Core/Shake.hs +++ b/ghcide/src/Development/IDE/Core/Shake.hs @@ -94,7 +94,6 @@ import Data.Aeson (Result (Success), toJSON) import qualified Data.Aeson.Types as A import qualified Data.ByteString.Char8 as BS -import qualified Data.ByteString.Char8 as BS8 import Data.Coerce (coerce) import Data.Default import Data.Dynamic @@ -107,8 +106,7 @@ import Data.Hashable import qualified Data.HashMap.Strict as HMap import Data.HashSet (HashSet) import qualified Data.HashSet as HSet -import Data.List.Extra (foldl', partition, - takeEnd) +import Data.List.Extra (partition, takeEnd) import qualified Data.Map.Strict as Map import Data.Maybe import qualified Data.SortedList as SL @@ -130,15 +128,11 @@ import Development.IDE.Types.Options as Options import qualified Language.LSP.Protocol.Message as LSP import qualified Language.LSP.Server as LSP -import Control.Concurrent (threadDelay) -import Control.Concurrent.Extra (readVar) -import Control.Monad (forever) import Data.HashMap.Strict (HashMap) import qualified Data.HashMap.Strict as HashMap import Data.Int (Int64) import Data.IORef.Extra (atomicModifyIORef'_, readIORef) -import Data.Text.Encoding (encodeUtf8) import Development.IDE.Core.Tracing import Development.IDE.Core.WorkerThread import Development.IDE.GHC.Compat (NameCache, @@ -149,18 +143,16 @@ import Development.IDE.GHC.Orphans () import Development.IDE.Graph hiding (ShakeValue, action) import qualified Development.IDE.Graph as Shake -import Development.IDE.Graph.Database (ShakeDatabase, +import Development.IDE.Graph.Database (AsyncParentKill (..), + ShakeDatabase, shakeGetBuildStep, shakeGetDatabaseKeys, shakeNewDatabase, shakeProfileDatabase, shakeRunDatabaseForKeys) -import Development.IDE.Graph.Internal.Database (garbageCollectKeys, - garbageCollectKeys1) -import Development.IDE.Graph.Internal.Types (Database) +import Development.IDE.Graph.Internal.Database (garbageCollectKeys1) import Development.IDE.Graph.Rule import Development.IDE.Types.Action -import Development.IDE.Types.Action (isActionQueueEmpty) import Development.IDE.Types.Diagnostics import Development.IDE.Types.Exports hiding (exportsMapSize) import qualified Development.IDE.Types.Exports as ExportsMap @@ -169,7 +161,6 @@ import Development.IDE.Types.Location import Development.IDE.Types.Monitoring (Monitoring (..)) import Development.IDE.Types.Shake import qualified Focus -import GHC.Base (undefined) import GHC.Fingerprint import GHC.Stack (HasCallStack) import GHC.TypeLits (KnownSymbol) @@ -179,7 +170,6 @@ import qualified Ide.Logger as Logger import Ide.Plugin.Config import qualified Ide.PluginUtils as HLS import Ide.Types -import Ide.Types (CheckParents (CheckOnSave)) import qualified Language.LSP.Protocol.Lens as L import Language.LSP.Protocol.Message import Language.LSP.Protocol.Types @@ -948,7 +938,8 @@ newSession recorder extras@ShakeExtras{..} vfsMod shakeDb acts reason newDirtyKe $ \db -> do GarbageCollectVar var <- getIdeGlobalExtras extras -- checkParentsOpt <- optCheckParents =<< getIdeOptionsIO extras - isGarbageCollectionScheduled <- readVar var + -- isGarbageCollectionScheduled <- readVar var + let isGarbageCollectionScheduled = False when (isActionQueueEmpty && isGarbageCollectionScheduled) $ do -- reset garbage collection flag liftIO $ writeVar var False @@ -989,8 +980,11 @@ newSession recorder extras@ShakeExtras{..} vfsMod shakeDb acts reason newDirtyKe -- Cancelling is required to flush the Shake database when either -- the filesystem or the Ghc configuration have changed + step <- shakeGetBuildStep shakeDb let cancelShakeSession :: IO () - cancelShakeSession = cancel workThread + cancelShakeSession = do + tid <- myThreadId + cancelWith workThread $ AsyncParentKill tid step pure (ShakeSession{..}) diff --git a/ghcide/src/Development/IDE/Plugin/Test.hs b/ghcide/src/Development/IDE/Plugin/Test.hs index a16f1a9781..be03cd5a8a 100644 --- a/ghcide/src/Development/IDE/Plugin/Test.hs +++ b/ghcide/src/Development/IDE/Plugin/Test.hs @@ -39,7 +39,7 @@ import Development.IDE.Graph.Database (ShakeDatabase, shakeGetBuildStep, shakeGetCleanKeys) import Development.IDE.Graph.Internal.Types (Result (resultBuilt, resultChanged, resultVisited), - Step (Step)) + Step) import qualified Development.IDE.Graph.Internal.Types as Graph import Development.IDE.Types.Action import Development.IDE.Types.HscEnvEq (HscEnvEq (hscEnv)) @@ -140,7 +140,7 @@ getDatabaseKeys :: (Graph.Result -> Step) getDatabaseKeys field db = do keys <- shakeGetCleanKeys db step <- shakeGetBuildStep db - return [ k | (k, res) <- keys, field res == Step step] + return [ k | (k, res) <- keys, field res == step] parseAction :: CI String -> NormalizedFilePath -> Action (Either Text Bool) parseAction "typecheck" fp = Right . isJust <$> use TypeCheck fp diff --git a/hls-graph/src/Development/IDE/Graph/Database.hs b/hls-graph/src/Development/IDE/Graph/Database.hs index 902fe031d5..65d946b547 100644 --- a/hls-graph/src/Development/IDE/Graph/Database.hs +++ b/hls-graph/src/Development/IDE/Graph/Database.hs @@ -1,4 +1,5 @@ module Development.IDE.Graph.Database( + AsyncParentKill(..), ShakeDatabase, ShakeValue, shakeNewDatabase, @@ -8,8 +9,8 @@ module Development.IDE.Graph.Database( shakeGetBuildStep, shakeGetDatabaseKeys, shakeGetDirtySet, - shakeGetCleanKeys - ,shakeGetBuildEdges) where + shakeGetCleanKeys, + shakeGetBuildEdges) where import Control.Concurrent.STM.Stats (readTVarIO) import Data.Dynamic import Data.Maybe @@ -42,9 +43,9 @@ shakeGetDirtySet (ShakeDatabase _ _ db) = Development.IDE.Graph.Internal.Database.getDirtySet db -- | Returns the build number -shakeGetBuildStep :: ShakeDatabase -> IO Int +shakeGetBuildStep :: ShakeDatabase -> IO Step shakeGetBuildStep (ShakeDatabase _ _ db) = do - Step s <- readTVarIO $ databaseStep db + s <- readTVarIO $ databaseStep db return s -- Only valid if we never pull on the results, which we don't diff --git a/hls-graph/src/Development/IDE/Graph/Internal/Action.hs b/hls-graph/src/Development/IDE/Graph/Internal/Action.hs index 6d47d9b511..30ef078ffe 100644 --- a/hls-graph/src/Development/IDE/Graph/Internal/Action.hs +++ b/hls-graph/src/Development/IDE/Graph/Internal/Action.hs @@ -81,8 +81,10 @@ actionFork act k = do isAsyncException :: SomeException -> Bool isAsyncException e + | Just (_ :: SomeAsyncException) <- fromException e = True | Just (_ :: AsyncCancelled) <- fromException e = True | Just (_ :: AsyncException) <- fromException e = True + | Just (_ :: AsyncParentKill) <- fromException e = True | Just (_ :: ExitCode) <- fromException e = True | otherwise = False diff --git a/hls-graph/src/Development/IDE/Graph/Internal/Database.hs b/hls-graph/src/Development/IDE/Graph/Internal/Database.hs index 78bf196572..0d27b73a11 100644 --- a/hls-graph/src/Development/IDE/Graph/Internal/Database.hs +++ b/hls-graph/src/Development/IDE/Graph/Internal/Database.hs @@ -8,16 +8,17 @@ {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} -module Development.IDE.Graph.Internal.Database (compute, newDatabase, incDatabase, build, getDirtySet, getKeysAndVisitAge, incDatabaseRaw, garbageCollectKeys, garbageCollectKeys1) where +module Development.IDE.Graph.Internal.Database (compute, newDatabase, incDatabase, build, getDirtySet, getKeysAndVisitAge, AsyncParentKill(..), garbageCollectKeys, garbageCollectKeys1) where import Prelude hiding (unzip) import Control.Concurrent.Async import Control.Concurrent.Extra -import Control.Concurrent.STM.Stats (STM, atomically, +import Control.Concurrent.STM.Stats (STM, TVar, atomically, atomicallyNamed, modifyTVar', newTVarIO, - readTVarIO) + readTVar, readTVarIO, + retry) import Control.Exception import Control.Monad import Control.Monad.IO.Class (MonadIO (liftIO)) @@ -25,7 +26,6 @@ import Control.Monad.Trans.Class (lift) import Control.Monad.Trans.Reader import qualified Control.Monad.Trans.State.Strict as State import Data.Dynamic -import Data.Either import Data.Foldable (for_, traverse_) import Data.IORef.Extra import Data.Maybe @@ -40,8 +40,9 @@ import qualified Focus import qualified ListT import qualified StmContainers.Map as SMap import qualified StmContainers.Set as SSet -import System.IO.Unsafe import System.Time.Extra (duration, sleep) +import UnliftIO (MonadUnliftIO (withRunInIO)) +import qualified UnliftIO.Exception as UE #if MIN_VERSION_base(4,19,0) import Data.Functor (unzip) @@ -57,11 +58,6 @@ newDatabase databaseExtra databaseRules = do databaseDirtyKeys <- atomically SSet.new pure Database{..} -incDatabaseRaw :: Database -> IO () -incDatabaseRaw db = - atomicallyNamed "incDatabaseRaw" $ do - modifyTVar' (databaseStep db) $ \(Step i) -> Step $ i + 1 - garbageCollectKeys1 :: Database -> (Key -> Bool) -> ([Key] -> STM ()) -> IO [Key] garbageCollectKeys1 db pred garbageCollectHook = do -- GC policy: @@ -145,7 +141,7 @@ incDatabase db Nothing = do updateDirty :: Monad m => Focus.Focus KeyDetails m () updateDirty = Focus.adjust $ \(KeyDetails status rdeps) -> let status' - | Running _ _ _ x <- status = Dirty x + | Running _ x <- status = Dirty x | Clean x <- status = Dirty (Just x) | otherwise = status in KeyDetails status' rdeps @@ -155,11 +151,8 @@ build => Database -> Stack -> f key -> IO (f Key, f value) -- build _ st k | traceShow ("build", st, k) False = undefined build db stack keys = do - built <- runAIO $ do - built <- builder db stack (fmap newKey keys) - case built of - Left clean -> return clean - Right dirty -> liftIO dirty + step <- readTVarIO $ databaseStep db + !built <- runAIO step $ builder db stack (fmap newKey keys) let (ids, vs) = unzip built pure (ids, fmap (asV . resultValue) vs) where @@ -169,44 +162,41 @@ build db stack keys = do -- | Build a list of keys and return their results. -- If none of the keys are dirty, we can return the results immediately. -- Otherwise, a blocking computation is returned *which must be evaluated asynchronously* to avoid deadlock. -builder - :: Traversable f => Database -> Stack -> f Key -> AIO (Either (f (Key, Result)) (IO (f (Key, Result)))) +builder :: (Traversable f) => Database -> Stack -> f Key -> AIO (f (Key, Result)) -- builder _ st kk | traceShow ("builder", st,kk) False = undefined -builder db@Database{..} stack keys = withRunInIO $ \(RunInIO run) -> do - -- Things that I need to force before my results are ready - toForce <- liftIO $ newTVarIO [] - current <- liftIO $ readTVarIO databaseStep - results <- liftIO $ for keys $ \id -> - -- Updating the status of all the dependencies atomically is not necessary. - -- Therefore, run one transaction per dep. to avoid contention - atomicallyNamed "builder" $ do - -- Spawn the id if needed - status <- SMap.lookup id databaseValues - val <- case viewDirty current $ maybe (Dirty Nothing) keyStatus status of - Clean r -> pure r - Running _ force val _ - | memberStack id stack -> throw $ StackException stack - | otherwise -> do - modifyTVar' toForce (Wait force :) - pure val - Dirty s -> do - let act = run (refresh db stack id s) - (force, val) = splitIO (join act) - SMap.focus (updateStatus $ Running current force val s) id databaseValues - modifyTVar' toForce (Spawn force:) - pure val - - pure (id, val) - - toForceList <- liftIO $ readTVarIO toForce - let waitAll = run $ waitConcurrently_ toForceList - case toForceList of - [] -> return $ Left results - _ -> return $ Right $ do - waitAll - pure results - - +builder db stack keys = do + keyWaits <- for keys $ \k -> builderOne db stack k + !res <- for keyWaits $ \(k, waitR) -> do + !v<- liftIO waitR + return (k, v) + return res + +builderOne :: Database -> Stack -> Key -> AIO (Key, IO Result) +builderOne db@Database {..} stack id = UE.mask $ \restore -> do + current <- liftIO $ readTVarIO databaseStep + (k, registerWaitResult) <- liftIO $ atomicallyNamed "builder" $ do + -- Spawn the id if needed + status <- SMap.lookup id databaseValues + val <- + let refreshRsult s = do + let act = + restore $ asyncWithCleanUp $ + refresh db stack id s + `UE.onException` (UE.uninterruptibleMask_ $ liftIO (atomicallyNamed "builder - onException" (SMap.focus updateDirty id databaseValues))) + + SMap.focus (updateStatus $ Running current s) id databaseValues + return act + in case viewDirty current $ maybe (Dirty Nothing) keyStatus status of + Dirty mbr -> refreshRsult mbr + Running step _mbr + | step /= current -> error $ "Inconsistent database state: key " ++ show id ++ " is marked Running at step " ++ show step ++ " but current step is " ++ show current + | memberStack id stack -> throw $ StackException stack + | otherwise -> retry + Clean r -> pure . pure . pure $ r + -- force here might contains async exceptions from previous runs + pure (id, val) + waitR <- registerWaitResult + return (k, waitR) -- | isDirty -- only dirty when it's build time is older than the changed time of one of its dependencies isDirty :: Foldable t => Result -> t (a, Result) -> Bool @@ -222,41 +212,37 @@ isDirty me = any (\(_,dep) -> resultBuilt me < resultChanged dep) refreshDeps :: KeySet -> Database -> Stack -> Key -> Result -> [KeySet] -> AIO Result refreshDeps visited db stack key result = \case -- no more deps to refresh - [] -> liftIO $ compute db stack key RunDependenciesSame (Just result) + [] -> compute' db stack key RunDependenciesSame (Just result) (dep:deps) -> do let newVisited = dep <> visited res <- builder db stack (toListKeySet (dep `differenceKeySet` visited)) - case res of - Left res -> if isDirty result res + if isDirty result res -- restart the computation if any of the deps are dirty - then liftIO $ compute db stack key RunDependenciesChanged (Just result) + then compute' db stack key RunDependenciesChanged (Just result) -- else kick the rest of the deps else refreshDeps newVisited db stack key result deps - Right iores -> do - res <- liftIO iores - if isDirty result res - then liftIO $ compute db stack key RunDependenciesChanged (Just result) - else refreshDeps newVisited db stack key result deps - --- | Refresh a key: -refresh :: Database -> Stack -> Key -> Maybe Result -> AIO (IO Result) + + +-- refresh :: Database -> Stack -> Key -> Maybe Result -> IO Result -- refresh _ st k _ | traceShow ("refresh", st, k) False = undefined +refresh :: Database -> Stack -> Key -> Maybe Result -> AIO Result refresh db stack key result = case (addStack key stack, result) of (Left e, _) -> throw e - (Right stack, Just me@Result{resultDeps = ResultDeps deps}) -> asyncWithCleanUp $ refreshDeps mempty db stack key me (reverse deps) - (Right stack, _) -> - asyncWithCleanUp $ liftIO $ compute db stack key RunDependenciesChanged result + (Right stack, Just me@Result{resultDeps = ResultDeps deps}) -> refreshDeps mempty db stack key me (reverse deps) + (Right stack, _) -> compute' db stack key RunDependenciesChanged result +compute' :: Database -> Stack -> Key -> RunMode -> Maybe Result -> AIO Result +compute' db stack key mode result = liftIO $ compute db stack key mode result -- | Compute a key. compute :: Database -> Stack -> Key -> RunMode -> Maybe Result -> IO Result -- compute _ st k _ _ | traceShow ("compute", st, k) False = undefined compute db@Database{..} stack key mode result = do let act = runRule databaseRules key (fmap resultData result) mode - deps <- newIORef UnknownDeps + deps <- liftIO $ newIORef UnknownDeps (execution, RunResult{..}) <- - duration $ runReaderT (fromAction act) $ SAction db deps stack - curStep <- readTVarIO databaseStep - deps <- readIORef deps + liftIO $ duration $ runReaderT (fromAction act) $ SAction db deps stack + curStep <- liftIO $ readTVarIO databaseStep + deps <- liftIO $ readIORef deps let lastChanged = maybe curStep resultChanged result let lastBuild = maybe curStep resultBuilt result -- changed time is always older than or equal to build time @@ -279,12 +265,12 @@ compute db@Database{..} stack key mode result = do -- If an async exception strikes before the deps have been recorded, -- we won't be able to accurately propagate dirtiness for this key -- on the next build. - void $ + liftIO $ void $ updateReverseDeps key db (getResultDepsDefault mempty previousDeps) deps _ -> pure () - atomicallyNamed "compute and run hook" $ do + liftIO $ atomicallyNamed "compute and run hook" $ do runHook SMap.focus (updateStatus $ Clean res) key databaseValues SSet.delete key databaseDirtyKeys @@ -315,18 +301,6 @@ getKeysAndVisitAge db = do getAge Result{resultVisited = Step s} = curr - s return keysWithVisitAge -------------------------------------------------------------------------------- --- Lazy IO trick - -data Box a = Box {fromBox :: a} - --- | Split an IO computation into an unsafe lazy value and a forcing computation -splitIO :: IO a -> (IO (), a) -splitIO act = do - let act2 = Box <$> act - let res = unsafePerformIO act2 - (void $ evaluate res, fromBox res) - --------------------------------------------------------------------------------- -- Reverse dependencies -- | Update the reverse dependencies of an Id @@ -388,14 +362,29 @@ transitiveSet database = flip State.execStateT mempty . traverse_ loop -- | A simple monad to implement cancellation on top of 'Async', -- generalizing 'withAsync' to monadic scopes. -newtype AIO a = AIO { unAIO :: ReaderT (IORef [Async ()]) IO a } +newtype AIO a = AIO { unAIO :: ReaderT (TVar [Async ()]) IO a } deriving newtype (Applicative, Functor, Monad, MonadIO) +data AsyncParentKill = AsyncParentKill ThreadId Step + deriving (Show, Eq) + +instance Exception AsyncParentKill where + toException = asyncExceptionToException + fromException = asyncExceptionFromException + -- | Run the monadic computation, cancelling all the spawned asyncs if an exception arises -runAIO :: AIO a -> IO a -runAIO (AIO act) = do - asyncs <- newIORef [] - runReaderT act asyncs `onException` cleanupAsync asyncs +runAIO :: Step -> AIO a -> IO a +runAIO s (AIO act) = do + asyncsRef <- newTVarIO [] + -- Log the exact exception (including async exceptions) before cleanup, + -- then rethrow to preserve previous semantics. + runReaderT act asyncsRef `onException` do + asyncs <- atomically $ do + r <- readTVar asyncsRef + modifyTVar' asyncsRef $ const [] + return r + tid <- myThreadId + cleanupAsync asyncs tid s -- | Like 'async' but with built-in cancellation. -- Returns an IO action to wait on the result. @@ -406,7 +395,7 @@ asyncWithCleanUp act = do -- mask to make sure we keep track of the spawned async liftIO $ uninterruptibleMask $ \restore -> do a <- async $ restore io - atomicModifyIORef'_ st (void a :) + atomically $ modifyTVar' st (void a :) return $ wait a unliftAIO :: AIO a -> AIO (IO a) @@ -414,19 +403,17 @@ unliftAIO act = do st <- AIO ask return $ runReaderT (unAIO act) st -newtype RunInIO = RunInIO (forall a. AIO a -> IO a) +instance MonadUnliftIO AIO where + withRunInIO k = do + st <- AIO ask + liftIO $ k (\aio -> runReaderT (unAIO aio) st) -withRunInIO :: (RunInIO -> AIO b) -> AIO b -withRunInIO k = do - st <- AIO ask - k $ RunInIO (\aio -> runReaderT (unAIO aio) st) - -cleanupAsync :: IORef [Async a] -> IO () +cleanupAsync :: [Async a] -> ThreadId -> Step -> IO () -- mask to make sure we interrupt all the asyncs -cleanupAsync ref = uninterruptibleMask $ \unmask -> do - asyncs <- atomicModifyIORef' ref ([],) +cleanupAsync asyncs tid step = uninterruptibleMask $ \unmask -> do -- interrupt all the asyncs without waiting - mapM_ (\a -> throwTo (asyncThreadId a) AsyncCancelled) asyncs + -- mapM_ (\a -> throwTo (asyncThreadId a) AsyncCancelled) asyncs + mapM_ (\a -> throwTo (asyncThreadId a) $ AsyncParentKill tid step) asyncs -- Wait until all the asyncs are done -- But if it takes more than 10 seconds, log to stderr unless (null asyncs) $ do @@ -435,32 +422,3 @@ cleanupAsync ref = uninterruptibleMask $ \unmask -> do traceM "cleanupAsync: waiting for asyncs to finish" withAsync warnIfTakingTooLong $ \_ -> mapM_ waitCatch asyncs - -data Wait - = Wait {justWait :: !(IO ())} - | Spawn {justWait :: !(IO ())} - -fmapWait :: (IO () -> IO ()) -> Wait -> Wait -fmapWait f (Wait io) = Wait (f io) -fmapWait f (Spawn io) = Spawn (f io) - -waitOrSpawn :: Wait -> IO (Either (IO ()) (Async ())) -waitOrSpawn (Wait io) = pure $ Left io -waitOrSpawn (Spawn io) = Right <$> async io - -waitConcurrently_ :: [Wait] -> AIO () -waitConcurrently_ [] = pure () -waitConcurrently_ [one] = liftIO $ justWait one -waitConcurrently_ many = do - ref <- AIO ask - -- spawn the async computations. - -- mask to make sure we keep track of all the asyncs. - (asyncs, syncs) <- liftIO $ uninterruptibleMask $ \unmask -> do - waits <- liftIO $ traverse (waitOrSpawn . fmapWait unmask) many - let (syncs, asyncs) = partitionEithers waits - liftIO $ atomicModifyIORef'_ ref (asyncs ++) - return (asyncs, syncs) - -- work on the sync computations - liftIO $ sequence_ syncs - -- wait for the async computations before returning - liftIO $ traverse_ wait asyncs diff --git a/hls-graph/src/Development/IDE/Graph/Internal/Types.hs b/hls-graph/src/Development/IDE/Graph/Internal/Types.hs index a99e817621..03e1f0b657 100644 --- a/hls-graph/src/Development/IDE/Graph/Internal/Types.hs +++ b/hls-graph/src/Development/IDE/Graph/Internal/Types.hs @@ -6,7 +6,6 @@ module Development.IDE.Graph.Internal.Types where import Control.Concurrent.STM (STM) -import Control.Monad ((>=>)) import Control.Monad.Catch import Control.Monad.IO.Class import Control.Monad.Trans.Reader @@ -81,8 +80,8 @@ getDatabase :: Action Database getDatabase = Action $ asks actionDatabase -- | waitForDatabaseRunningKeysAction waits for all keys in the database to finish running. -waitForDatabaseRunningKeysAction :: Action () -waitForDatabaseRunningKeysAction = getDatabase >>= liftIO . waitForDatabaseRunningKeys +-- waitForDatabaseRunningKeysAction :: Action () +-- waitForDatabaseRunningKeysAction = getDatabase >>= liftIO . waitForDatabaseRunningKeys --------------------------------------------------------------------- -- DATABASE @@ -90,7 +89,7 @@ waitForDatabaseRunningKeysAction = getDatabase >>= liftIO . waitForDatabaseRunni data ShakeDatabase = ShakeDatabase !Int [Action ()] Database newtype Step = Step Int - deriving newtype (Eq,Ord,Hashable,Show) + deriving newtype (Eq,Ord,Hashable,Show,Num,Enum,Real,Integral) --------------------------------------------------------------------- -- Keys @@ -119,8 +118,8 @@ data Database = Database { -- by the client, it would be removed once the target key is marked as clean. } -waitForDatabaseRunningKeys :: Database -> IO () -waitForDatabaseRunningKeys = getDatabaseValues >=> mapM_ (waitRunning . snd) +-- waitForDatabaseRunningKeys :: Database -> IO () +-- waitForDatabaseRunningKeys = getDatabaseValues >=> mapM_ (waitRunning . snd) getDatabaseValues :: Database -> IO [(Key, Status)] getDatabaseValues = atomically @@ -133,30 +132,30 @@ data Status = Clean !Result | Dirty (Maybe Result) | Running { - runningStep :: !Step, - runningWait :: !(IO ()), - runningResult :: Result, -- LAZY - runningPrev :: !(Maybe Result) + runningStep :: !Step, + -- runningWait :: !(IO ()), + -- runningResult :: Result, -- LAZY + runningPrev :: !(Maybe Result) } viewDirty :: Step -> Status -> Status -viewDirty currentStep (Running s _ _ re) | currentStep /= s = Dirty re +viewDirty currentStep (Running s re) | currentStep /= s = Dirty re viewDirty _ other = other getResult :: Status -> Maybe Result -getResult (Clean re) = Just re -getResult (Dirty m_re) = m_re -getResult (Running _ _ _ m_re) = m_re -- watch out: this returns the previous result +getResult (Clean re) = Just re +getResult (Dirty m_re) = m_re +getResult (Running _ m_re) = m_re -- watch out: this returns the previous result getDeps :: Status -> ResultDeps -getDeps (Clean re) = resultDeps re -getDeps (Dirty (Just re)) = resultDeps re -getDeps (Dirty Nothing) = UnknownDeps -getDeps (Running _ _ re _) = resultDeps re - -waitRunning :: Status -> IO () -waitRunning Running{..} = runningWait -waitRunning _ = return () +getDeps (Clean re) = resultDeps re +getDeps (Dirty (Just re)) = resultDeps re +getDeps (Dirty Nothing) = UnknownDeps +getDeps (Running _ re) = maybe mempty resultDeps re + +-- waitRunning :: Status -> IO () +-- waitRunning Running{..} = runningWait +-- waitRunning _ = return () data Result = Result { resultValue :: !Value, From a21981034d3d8ffbb7b04f0792b0f86859038c95 Mon Sep 17 00:00:00 2001 From: soulomoon Date: Sat, 6 Sep 2025 07:09:08 +0800 Subject: [PATCH 74/87] update hls-graph runtime --- ghcide/ghcide.cabal | 1 - .../session-loader/Development/IDE/Session.hs | 18 +- ghcide/src/Development/IDE/Core/Compile.hs | 4 +- ghcide/src/Development/IDE/Core/FileStore.hs | 8 +- ghcide/src/Development/IDE/Core/OfInterest.hs | 13 +- .../Development/IDE/Core/PositionMapping.hs | 1 - ghcide/src/Development/IDE/Core/Rules.hs | 2 +- ghcide/src/Development/IDE/Core/Shake.hs | 337 ++++++++---------- .../src/Development/IDE/Core/WorkerThread.hs | 59 --- ghcide/src/Development/IDE/GHC/Compat.hs | 1 - .../src/Development/IDE/LSP/LanguageServer.hs | 206 +++++++---- ghcide/src/Development/IDE/Plugin/Test.hs | 4 +- ghcide/src/Development/IDE/Types/HscEnvEq.hs | 1 - hls-graph/hls-graph.cabal | 6 + hls-graph/src/Development/IDE/Graph.hs | 3 +- .../src/Development/IDE/Graph/Database.hs | 34 +- .../Development/IDE/Graph/Internal/Action.hs | 52 ++- .../IDE/Graph/Internal/Database.hs | 269 ++++---------- .../Development/IDE/Graph/Internal/Types.hs | 100 ++++-- hls-graph/src/Development/IDE/WorkerThread.hs | 164 +++++++++ hls-graph/test/ActionSpec.hs | 71 ++-- hls-graph/test/DatabaseSpec.hs | 36 +- 22 files changed, 733 insertions(+), 657 deletions(-) delete mode 100644 ghcide/src/Development/IDE/Core/WorkerThread.hs create mode 100644 hls-graph/src/Development/IDE/WorkerThread.hs diff --git a/ghcide/ghcide.cabal b/ghcide/ghcide.cabal index d1c6d907a3..359b742771 100644 --- a/ghcide/ghcide.cabal +++ b/ghcide/ghcide.cabal @@ -142,7 +142,6 @@ library Development.IDE.Core.Shake Development.IDE.Core.Tracing Development.IDE.Core.UseStale - Development.IDE.Core.WorkerThread Development.IDE.GHC.Compat Development.IDE.GHC.Compat.Core Development.IDE.GHC.Compat.CmdLine diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 678acb13f4..2d43724f3f 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -81,7 +81,6 @@ import Data.Void import Control.Concurrent.STM.Stats (atomically, modifyTVar', readTVar, writeTVar) -import Control.Concurrent.STM.TQueue import Control.Monad.Trans.Cont (ContT (ContT, runContT)) import Data.Foldable (for_) import Data.HashMap.Strict (HashMap) @@ -89,7 +88,6 @@ import Data.HashSet (HashSet) import qualified Data.HashSet as Set import Database.SQLite.Simple import Development.IDE.Core.Tracing (withTrace) -import Development.IDE.Core.WorkerThread (withWorkerQueue) import Development.IDE.Session.Dependency import Development.IDE.Session.Diagnostics (renderCradleError) import Development.IDE.Session.Ghc hiding (Log) @@ -108,6 +106,7 @@ import qualified Control.Monad.STM as STM import Control.Monad.Trans.Reader import qualified Development.IDE.Session.Ghc as Ghc import qualified Development.IDE.Session.OrderedSet as S +import Development.IDE.WorkerThread import qualified Focus import qualified StmContainers.Map as STM @@ -133,10 +132,13 @@ data Log | LogLookupSessionCache !FilePath | LogTime !String | LogSessionGhc Ghc.Log + | LogSessionWorkerThread LogWorkerThread deriving instance Show Log + instance Pretty Log where pretty = \case + LogSessionWorkerThread lt -> pretty lt LogTime s -> "Time:" <+> pretty s LogLookupSessionCache path -> "Looking up session cache for" <+> pretty path LogGetOptionsLoop fp -> "Loop: getOptions for" <+> pretty fp @@ -362,7 +364,7 @@ runWithDb recorder fp = ContT $ \k -> do _ <- withWriteDbRetryable deleteMissingRealFiles _ <- withWriteDbRetryable garbageCollectTypeNames - runContT (withWorkerQueue (writer withWriteDbRetryable)) $ \chan -> + runContT (withWorkerQueue (logWith (cmapWithPrio LogSessionWorkerThread recorder) Debug) "hiedb thread" (writer withWriteDbRetryable)) $ \chan -> withHieDb fp (\readDb -> k (WithHieDbShield $ makeWithHieDbRetryable recorder rng readDb, chan)) where writer withHieDbRetryable l = do @@ -589,7 +591,7 @@ newSessionState = do -- components mapping to the same hie.yaml file are mapped to the same -- HscEnv which is updated as new components are discovered. -loadSessionWithOptions :: Recorder (WithPriority Log) -> SessionLoadingOptions -> FilePath -> TQueue (IO ()) -> IO (Action IdeGhcSession) +loadSessionWithOptions :: Recorder (WithPriority Log) -> SessionLoadingOptions -> FilePath -> TaskQueue (IO ()) -> IO (Action IdeGhcSession) loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let toAbsolutePath = toAbsolute rootDir -- see Note [Root Directory] @@ -617,7 +619,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do -- see Note [Serializing runs in separate thread] -- Start the getOptionsLoop if the queue is empty - liftIO $ atomically $ Extra.whenM (isEmptyTQueue que) $ do + liftIO $ atomically $ Extra.whenM (isEmptyTaskQueue que) $ do let newSessionLoadingOptions = SessionLoadingOptions { findCradle = cradleLoc , .. @@ -636,7 +638,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do , sessionLoadingOptions = newSessionLoadingOptions } - writeTQueue que (runReaderT (getOptionsLoop recorder sessionShake sessionState knownTargetsVar) sessionEnv) + writeTaskQueue que (runReaderT (getOptionsLoop recorder sessionShake sessionState knownTargetsVar) sessionEnv) -- Each one of deps will be registered as a FileSystemWatcher in the GhcSession action -- so that we can get a workspace/didChangeWatchedFiles notification when a dep changes. @@ -935,7 +937,7 @@ loadCradleWithNotifications recorder sessionState loadCradle hieYaml cfp = do when (isNothing hieYaml) $ logWith recorder Warning $ LogCradleNotFound lfpLog cradle <- liftIO $ loadCradle hieYaml rootDir - when (isTesting) $ mRunLspT lspEnv $ + when isTesting $ mRunLspT lspEnv $ sendNotification (SMethod_CustomMethod (Proxy @"ghcide/cradle/loaded")) (toJSON cfp) -- Display a user friendly progress message here: They probably don't know what a cradle is @@ -1034,7 +1036,7 @@ data PackageSetupException { compileTime :: !Version , runTime :: !Version } - deriving (Eq, Show, Typeable) + deriving (Eq, Show) instance Exception PackageSetupException diff --git a/ghcide/src/Development/IDE/Core/Compile.hs b/ghcide/src/Development/IDE/Core/Compile.hs index 1e77a4c2f3..8065e56325 100644 --- a/ghcide/src/Development/IDE/Core/Compile.hs +++ b/ghcide/src/Development/IDE/Core/Compile.hs @@ -114,7 +114,6 @@ import qualified GHC.Runtime.Loader as Loader import GHC.Tc.Gen.Splice import GHC.Types.Error import GHC.Types.ForeignStubs -import GHC.Types.HpcInfo import GHC.Types.TypeEnv -- See Note [Guidelines For Using CPP In GHCIDE Import Statements] @@ -151,6 +150,7 @@ import GHC.Iface.Ext.Types (NameEntityInfo) #if MIN_VERSION_ghc(9,12,0) import Development.IDE.Import.FindImports +import Development.IDE.WorkerThread (writeTaskQueue) #endif --Simple constants to make sure the source is consistently named @@ -883,7 +883,7 @@ indexHieFile se mod_summary srcPath !hash hf = do -- hiedb doesn't use the Haskell src, so we clear it to avoid unnecessarily keeping it around let !hf' = hf{hie_hs_src = mempty} modifyTVar' indexPending $ HashMap.insert srcPath hash - writeTQueue indexQueue $ \withHieDb -> do + writeTaskQueue indexQueue $ \withHieDb -> do -- We are now in the worker thread -- Check if a newer index of this file has been scheduled, and if so skip this one newerScheduled <- atomically $ do diff --git a/ghcide/src/Development/IDE/Core/FileStore.hs b/ghcide/src/Development/IDE/Core/FileStore.hs index 2cde86713f..07b104d26d 100644 --- a/ghcide/src/Development/IDE/Core/FileStore.hs +++ b/ghcide/src/Development/IDE/Core/FileStore.hs @@ -25,7 +25,6 @@ module Development.IDE.Core.FileStore( ) where import Control.Concurrent.STM.Stats (STM, atomically) -import Control.Concurrent.STM.TQueue (writeTQueue) import Control.Exception import Control.Lens ((^.)) import Control.Monad.Extra @@ -52,6 +51,7 @@ import Development.IDE.Types.Diagnostics import Development.IDE.Types.Location import Development.IDE.Types.Options import Development.IDE.Types.Shake (toKey) +import Development.IDE.WorkerThread (writeTaskQueue) import HieDb.Create (deleteMissingRealFiles) import Ide.Logger (Pretty (pretty), Priority (Info), @@ -82,7 +82,6 @@ data Log = LogCouldNotIdentifyReverseDeps !NormalizedFilePath | LogTypeCheckingReverseDeps !NormalizedFilePath !(Maybe [NormalizedFilePath]) | LogShake Shake.Log - | LogGetModificationTime !NormalizedFilePath deriving Show instance Pretty Log where @@ -95,8 +94,6 @@ instance Pretty Log where <> ":" <+> pretty (fmap (fmap show) reverseDepPaths) LogShake msg -> pretty msg - LogGetModificationTime path -> - "Getting modification time for" <+> viaShow path addWatchedFileRule :: Recorder (WithPriority Log) -> (NormalizedFilePath -> Action Bool) -> Rules () addWatchedFileRule recorder isWatched = defineNoDiagnostics (cmapWithPrio LogShake recorder) $ \AddWatchedFile f -> do @@ -113,7 +110,6 @@ addWatchedFileRule recorder isWatched = defineNoDiagnostics (cmapWithPrio LogSha getModificationTimeRule :: Recorder (WithPriority Log) -> Rules () getModificationTimeRule recorder = defineEarlyCutoff (cmapWithPrio LogShake recorder) $ Rule $ \(GetModificationTime_ missingFileDiags) file -> do - logWith recorder Info $ LogGetModificationTime file getModificationTimeImpl missingFileDiags file getModificationTimeImpl @@ -306,7 +302,7 @@ typecheckParentsAction recorder nfp = do setSomethingModified :: VFSModified -> IdeState -> String -> IO [Key] -> IO () setSomethingModified vfs state reason actionBetweenSession = do -- Update database to remove any files that might have been renamed/deleted - atomically $ writeTQueue (indexQueue $ hiedbWriter $ shakeExtras state) (\withHieDb -> withHieDb deleteMissingRealFiles) + atomically $ writeTaskQueue (indexQueue $ hiedbWriter $ shakeExtras state) (\withHieDb -> withHieDb deleteMissingRealFiles) void $ restartShakeSession (shakeExtras state) vfs reason [] actionBetweenSession registerFileWatches :: [String] -> LSP.LspT Config IO Bool diff --git a/ghcide/src/Development/IDE/Core/OfInterest.hs b/ghcide/src/Development/IDE/Core/OfInterest.hs index abdc224898..19e0f40e24 100644 --- a/ghcide/src/Development/IDE/Core/OfInterest.hs +++ b/ghcide/src/Development/IDE/Core/OfInterest.hs @@ -57,6 +57,10 @@ instance Pretty Log where pretty = \case LogShake msg -> pretty msg +newtype OfInterestVar = OfInterestVar (Var (HashMap NormalizedFilePath FileOfInterestStatus)) + +instance IsIdeGlobal OfInterestVar + -- | The rule that initialises the files of interest state. ofInterestRules :: Recorder (WithPriority Log) -> Rules () ofInterestRules recorder = do @@ -75,6 +79,9 @@ ofInterestRules recorder = do summarize (IsFOI (Modified False)) = BS.singleton 2 summarize (IsFOI (Modified True)) = BS.singleton 3 +------------------------------------------------------------ +newtype GarbageCollectVar = GarbageCollectVar (Var Bool) +instance IsIdeGlobal GarbageCollectVar ------------------------------------------------------------ -- Exposed API @@ -147,6 +154,10 @@ kick = do liftIO $ progressUpdate progress ProgressCompleted + GarbageCollectVar var <- getIdeGlobalAction + garbageCollectionScheduled <- liftIO $ readVar var + when garbageCollectionScheduled $ do + void garbageCollectDirtyKeys + liftIO $ writeVar var False signal (Proxy @"kick/done") - diff --git a/ghcide/src/Development/IDE/Core/PositionMapping.hs b/ghcide/src/Development/IDE/Core/PositionMapping.hs index de02f5b1f7..41f9ca50e0 100644 --- a/ghcide/src/Development/IDE/Core/PositionMapping.hs +++ b/ghcide/src/Development/IDE/Core/PositionMapping.hs @@ -28,7 +28,6 @@ import Control.Lens ((^.)) import Control.Monad import Data.Algorithm.Diff import Data.Bifunctor -import Data.List import qualified Data.Text as T import qualified Data.Vector.Unboxed as V import qualified Language.LSP.Protocol.Lens as L diff --git a/ghcide/src/Development/IDE/Core/Rules.hs b/ghcide/src/Development/IDE/Core/Rules.hs index c123c9d4a8..8273570aca 100644 --- a/ghcide/src/Development/IDE/Core/Rules.hs +++ b/ghcide/src/Development/IDE/Core/Rules.hs @@ -722,7 +722,7 @@ loadGhcSession recorder ghcSessionDepsConfig = do itExists <- getFileExists nfp when itExists $ void $ do use_ GetPhysicalModificationTime nfp - logWith recorder Logger.Info $ LogDependencies file deps + logWith recorder Logger.Debug $ LogDependencies file deps mapM_ addDependency deps let cutoffHash = LBS.toStrict $ B.encode (hash (snd val)) diff --git a/ghcide/src/Development/IDE/Core/Shake.hs b/ghcide/src/Development/IDE/Core/Shake.hs index 99db32ae94..94bf9f733c 100644 --- a/ghcide/src/Development/IDE/Core/Shake.hs +++ b/ghcide/src/Development/IDE/Core/Shake.hs @@ -24,10 +24,8 @@ module Development.IDE.Core.Shake( IdeState, shakeSessionInit, shakeExtras, shakeDb, rootDir, ShakeExtras(..), getShakeExtras, getShakeExtrasRules, - KnownTargets(..), Target(..), GarbageCollectVar(..), - OfInterestVar(..), - toKnownFiles, unionKnownTargets, mkKnownTargets, - IdeRule, IdeResult, + KnownTargets(..), Target(..), toKnownFiles, unionKnownTargets, mkKnownTargets, + IdeRule, IdeResult, RestartQueue, GetModificationTime(GetModificationTime, GetModificationTime_, missingFileDiagnostics), shakeOpen, shakeShut, shakeEnqueue, @@ -73,6 +71,8 @@ module Development.IDE.Core.Shake( HieDb, HieDbWriter(..), addPersistentRule, + garbageCollectDirtyKeys, + garbageCollectDirtyKeysOlderThan, Log(..), VFSModified(..), getClientConfigAction, ThreadQueue(..), @@ -94,6 +94,7 @@ import Data.Aeson (Result (Success), toJSON) import qualified Data.Aeson.Types as A import qualified Data.ByteString.Char8 as BS +import qualified Data.ByteString.Char8 as BS8 import Data.Coerce (coerce) import Data.Default import Data.Dynamic @@ -128,13 +129,10 @@ import Development.IDE.Types.Options as Options import qualified Language.LSP.Protocol.Message as LSP import qualified Language.LSP.Server as LSP -import Data.HashMap.Strict (HashMap) -import qualified Data.HashMap.Strict as HashMap -import Data.Int (Int64) -import Data.IORef.Extra (atomicModifyIORef'_, - readIORef) +import Control.Exception (Exception (fromException)) +import Data.Either (isLeft, isRight, + lefts) import Development.IDE.Core.Tracing -import Development.IDE.Core.WorkerThread import Development.IDE.GHC.Compat (NameCache, NameCacheUpdater, initNameCache, @@ -143,14 +141,17 @@ import Development.IDE.GHC.Orphans () import Development.IDE.Graph hiding (ShakeValue, action) import qualified Development.IDE.Graph as Shake -import Development.IDE.Graph.Database (AsyncParentKill (..), - ShakeDatabase, +import Development.IDE.Graph.Database (ShakeDatabase, shakeGetBuildStep, shakeGetDatabaseKeys, shakeNewDatabase, shakeProfileDatabase, - shakeRunDatabaseForKeys) -import Development.IDE.Graph.Internal.Database (garbageCollectKeys1) + shakeRunDatabaseForKeys, + shakeShutDatabase) +import Development.IDE.Graph.Internal.Action (runActionInDb) +import Development.IDE.Graph.Internal.Database (AsyncParentKill (AsyncParentKill)) +import Development.IDE.Graph.Internal.Types (Step (..), + getShakeStep) import Development.IDE.Graph.Rule import Development.IDE.Types.Action import Development.IDE.Types.Diagnostics @@ -160,6 +161,7 @@ import Development.IDE.Types.KnownTargets import Development.IDE.Types.Location import Development.IDE.Types.Monitoring (Monitoring (..)) import Development.IDE.Types.Shake +import Development.IDE.WorkerThread import qualified Focus import GHC.Fingerprint import GHC.Stack (HasCallStack) @@ -182,33 +184,33 @@ import qualified StmContainers.Map as STM import System.FilePath hiding (makeRelative) import System.IO.Unsafe (unsafePerformIO) import System.Time.Extra -import UnliftIO (MonadUnliftIO (withRunInIO), - newIORef) +import UnliftIO (MonadUnliftIO (withRunInIO)) +import qualified UnliftIO.Exception as UE + data Log = LogCreateHieDbExportsMapStart | LogCreateHieDbExportsMapFinish !Int - | LogBuildSessionRestart !String ![DelayedActionInternal] ![Key] !Seconds !(Maybe FilePath) + | LogBuildSessionRestart !String ![DelayedActionInternal] !KeySet !Seconds !(Maybe FilePath) | LogBuildSessionRestartTakingTooLong !Seconds | LogDelayedAction !(DelayedAction ()) !Seconds - | LogBuildSessionFinish !(Maybe SomeException) + | LogBuildSessionFinish !Step !(Either SomeException [Either SomeException ()]) | LogDiagsDiffButNoLspEnv ![FileDiagnostic] | LogDefineEarlyCutoffRuleNoDiagHasDiag !FileDiagnostic | LogDefineEarlyCutoffRuleCustomNewnessHasDiag !FileDiagnostic | LogCancelledAction !T.Text | LogSessionInitialised | LogLookupPersistentKey !T.Text - | LogShakeGarbageCollection !T.Text ![Key] !Seconds + | LogShakeGarbageCollection !T.Text !Int !Seconds -- * OfInterest Log messages | LogSetFilesOfInterest ![(NormalizedFilePath, FileOfInterestStatus)] - | LogMonitering !T.Text !Int64 + | LogShakeText !T.Text deriving Show instance Pretty Log where pretty = \case - LogMonitering name value -> - "Monitoring:" <+> pretty name <+> "value:" <+> pretty value + LogShakeText msg -> pretty msg LogCreateHieDbExportsMapStart -> "Initializing exports map from hiedb" LogCreateHieDbExportsMapFinish exportsMapSize -> @@ -217,7 +219,7 @@ instance Pretty Log where vcat [ "Restarting build session due to" <+> pretty reason , "Action Queue:" <+> pretty (map actionName actionQueue) - , "Keys:" <+> pretty (map show keyBackLog) + , "Keys:" <+> pretty (map show $ toListKeySet keyBackLog) , "Aborting previous build session took" <+> pretty (showDuration abortDuration) <+> pretty shakeProfilePath ] LogBuildSessionRestartTakingTooLong seconds -> "Build restart is taking too long (" <> pretty seconds <> " seconds)" @@ -225,10 +227,18 @@ instance Pretty Log where hsep [ "Finished:" <+> pretty (actionName delayedAct) , "Took:" <+> pretty (showDuration seconds) ] - LogBuildSessionFinish e -> + LogBuildSessionFinish step e -> vcat [ "Finished build session" - , pretty (fmap displayException e) ] + , "Step:" <+> pretty (show step) + , "Result:" <+> case e of + Left ex -> "Exception:" <+> pretty (show ex) + Right rs -> + if all isRight rs then + "Success" + else + "Exceptions in actions:" <+> pretty (fmap displayException $ lefts rs) + ] LogDiagsDiffButNoLspEnv fileDiagnostics -> "updateFileDiagnostics published different from new diagnostics - file diagnostics:" <+> pretty (showDiagnosticsColored fileDiagnostics) @@ -243,10 +253,8 @@ instance Pretty Log where LogSessionInitialised -> "Shake session initialized" LogLookupPersistentKey key -> "LOOKUP PERSISTENT FOR:" <+> pretty key - LogShakeGarbageCollection label victims duration -> - "ShakeGarbageCollect" <+> pretty (showDuration duration) <+> ", reson" <+> pretty label - <+> "removed" <+> pretty (length victims) <+> "keys" - <> hang 2 (pretty (map show victims)) + LogShakeGarbageCollection label number duration -> + pretty label <+> "of" <+> pretty number <+> "keys (took " <+> pretty (showDuration duration) <> ")" LogSetFilesOfInterest ofInterest -> "Set files of interst to" <> Pretty.line <> indent 4 (pretty $ fmap (first fromNormalizedFilePath) ofInterest) @@ -265,12 +273,15 @@ data HieDbWriter -- | Actions to queue up on the index worker thread -- The inner `(HieDb -> IO ()) -> IO ()` wraps `HieDb -> IO ()` -- with (currently) retry functionality -type IndexQueue = TQueue (((HieDb -> IO ()) -> IO ()) -> IO ()) +type IndexQueue = TaskQueue (((HieDb -> IO ()) -> IO ()) -> IO ()) +type RestartQueue = TaskQueue (IO ()) +type LoaderQueue = TaskQueue (IO ()) + data ThreadQueue = ThreadQueue { tIndexQueue :: IndexQueue - , tRestartQueue :: TQueue (IO ()) - , tLoaderQueue :: TQueue (IO ()) + , tRestartQueue :: RestartQueue + , tLoaderQueue :: LoaderQueue } -- Note [Semantic Tokens Cache Location] @@ -339,9 +350,11 @@ data ShakeExtras = ShakeExtras -- We don't need a STM.Map because we never update individual keys ourselves. , defaultConfig :: Config -- ^ Default HLS config, only relevant if the client does not provide any Config - , restartQueue :: TQueue (IO ()) + , dirtyKeys :: TVar KeySet + -- ^ Set of dirty rule keys since the last Shake run + , restartQueue :: RestartQueue -- ^ Queue of restart actions to be run. - , loaderQueue :: TQueue (IO ()) + , loaderQueue :: LoaderQueue -- ^ Queue of loader actions to be run. } @@ -398,14 +411,14 @@ addPersistentRule k getVal = do void $ liftIO $ atomically $ modifyTVar' persistentKeys $ insertKeyMap (newKey k) (fmap (fmap (first3 toDyn)) . getVal) class Typeable a => IsIdeGlobal a where -newtype OfInterestVar = OfInterestVar (Var (HashMap NormalizedFilePath FileOfInterestStatus)) -instance IsIdeGlobal OfInterestVar +-- data VirtualFileEntry = Open VirtualFile | Closed ClosedVirtualFile -- | Read a virtual file from the current snapshot getVirtualFile :: NormalizedFilePath -> Action (Maybe VirtualFile) getVirtualFile nf = do vfs <- fmap _vfsMap . liftIO . readTVarIO . vfsVar =<< getShakeExtras - pure $! Map.lookup (filePathToUri' nf) vfs -- Don't leak a reference to the entire map + let file = Map.lookup (filePathToUri' nf) vfs + pure $! file -- Don't leak a reference to the entire map -- Take a snapshot of the current LSP VFS vfsSnapshot :: Maybe (LSP.LanguageContextEnv a) -> IO VFS @@ -670,7 +683,7 @@ shakeOpen :: Recorder (WithPriority Log) shakeOpen recorder lspEnv defaultConfig idePlugins debouncer shakeProfileDir (IdeReportProgress reportProgress) ideTesting - withHieDb threadQueue opts argMonitoring rules rootDir = mdo + withHieDb threadQueue opts monitoring rules rootDir = mdo -- see Note [Serializing runs in separate thread] let indexQueue = tIndexQueue threadQueue restartQueue = tRestartQueue threadQueue @@ -712,11 +725,13 @@ shakeOpen recorder lspEnv defaultConfig idePlugins debouncer actionQueue <- newQueue let clientCapabilities = maybe def LSP.resClientCapabilities lspEnv + dirtyKeys <- newTVarIO mempty -- Take one VFS snapshot at the start vfsVar <- newTVarIO =<< vfsSnapshot lspEnv pure ShakeExtras{shakeRecorder = recorder, ..} shakeDb <- shakeNewDatabase + restartQueue opts { shakeExtra = newShakeExtra shakeExtras } rules shakeSession <- newEmptyMVar @@ -729,18 +744,13 @@ shakeOpen recorder lspEnv defaultConfig idePlugins debouncer checkParents <- optCheckParents - logMonitoring <- newLogMonitoring recorder - let monitoring = logMonitoring <> argMonitoring -- monitoring let readValuesCounter = fromIntegral . countRelevantKeys checkParents <$> getStateKeys shakeExtras - readDirtyKeys = return 0 + readDirtyKeys = fromIntegral . countRelevantKeys checkParents . toListKeySet <$> readTVarIO(dirtyKeys shakeExtras) readIndexPending = fromIntegral . HMap.size <$> readTVarIO (indexPending $ hiedbWriter shakeExtras) readExportsMap = fromIntegral . ExportsMap.exportsMapSize <$> readTVarIO (exportsMap shakeExtras) readDatabaseCount = fromIntegral . countRelevantKeys checkParents . map fst <$> shakeGetDatabaseKeys shakeDb readDatabaseStep = fromIntegral <$> shakeGetBuildStep shakeDb - readNumActionsRunning = fromIntegral . length <$> atomically (peekInProgress $ actionQueue shakeExtras) - readIsActionQueueEmpty = let boolToInt b = if b then 1 else 0 - in boolToInt <$> atomically (isActionQueueEmpty $ actionQueue shakeExtras) registerGauge monitoring "ghcide.values_count" readValuesCounter registerGauge monitoring "ghcide.dirty_keys_count" readDirtyKeys @@ -748,30 +758,12 @@ shakeOpen recorder lspEnv defaultConfig idePlugins debouncer registerGauge monitoring "ghcide.exports_map_count" readExportsMap registerGauge monitoring "ghcide.database_count" readDatabaseCount registerCounter monitoring "ghcide.num_builds" readDatabaseStep - registerCounter monitoring "ghcide.num_actions_runnning" readNumActionsRunning - registerCounter monitoring "ghcide.isActionQueueEmpty" readIsActionQueueEmpty stopMonitoring <- start monitoring let ideState = IdeState{..} return ideState -newLogMonitoring :: MonadIO m => Recorder (WithPriority Log) -> m Monitoring -newLogMonitoring logger = do - actions <- newIORef [] - let registerCounter name readA = do - let update = do - val <- readA - logWith logger Info $ LogMonitering name (fromIntegral val) - atomicModifyIORef'_ actions (update :) - registerGauge = registerCounter - let start = do - a <- regularly 10 $ sequence_ =<< readIORef actions - return (cancel a) - return Monitoring{..} - where - regularly :: Seconds -> IO () -> IO (Async ()) - regularly delay act = async $ forever (act >> sleep delay) getStateKeys :: ShakeExtras -> IO [Key] getStateKeys = (fmap.fmap) fst . atomically . ListT.toList . STM.listT . stateValues @@ -782,7 +774,7 @@ shakeSessionInit recorder IdeState{..} = do -- Take a snapshot of the VFS - it should be empty as we've received no notifications -- till now, but it can't hurt to be in sync with the `lsp` library. vfs <- vfsSnapshot (lspEnv shakeExtras) - initSession <- newSession recorder shakeExtras (VFSModified vfs) shakeDb [] "shakeSessionInit" Nothing + initSession <- newSession recorder shakeExtras (VFSModified vfs) shakeDb [] "shakeSessionInit" putMVar shakeSession initSession logWith recorder Debug LogSessionInitialised @@ -831,22 +823,21 @@ shakeRestart recorder IdeState{..} vfs reason acts ioActionBetweenShakeSession = shakeSession (\runner -> do (stopTime,()) <- duration $ logErrorAfter 10 $ cancelShakeSession runner - newDirtyKeys <- ioActionBetweenShakeSession + queue <- atomicallyNamed "actionQueue - peek" $ peekInProgress $ actionQueue shakeExtras + keys <- ioActionBetweenShakeSession -- it is every important to update the dirty keys after we enter the critical section -- see Note [Housekeeping rule cache and dirty key outside of hls-graph] - -- atomically $ modifyTVar' (dirtyKeys shakeExtras) $ \x -> foldl' (flip insertKeySet) x keys + atomically $ modifyTVar' (dirtyKeys shakeExtras) $ \x -> foldl' (flip insertKeySet) x keys res <- shakeDatabaseProfile shakeDb - queue <- atomicallyNamed "actionQueue - peek" $ peekInProgress $ actionQueue shakeExtras - + backlog <- readTVarIO $ dirtyKeys shakeExtras -- this log is required by tests - logWith recorder Debug $ LogBuildSessionRestart reason queue newDirtyKeys stopTime res - return newDirtyKeys + logWith recorder Debug $ LogBuildSessionRestart reason queue backlog stopTime res ) -- It is crucial to be masked here, otherwise we can get killed -- between spawning the new thread and updating shakeSession. -- See https://github.com/haskell/ghcide/issues/79 - (\newDirtyKeys -> do - (,()) <$> newSession recorder shakeExtras vfs shakeDb acts reason (Just newDirtyKeys)) + (\() -> do + (,()) <$> newSession recorder shakeExtras vfs shakeDb acts reason) where logErrorAfter :: Seconds -> IO () -> IO () logErrorAfter seconds action = flip withAsync (const action) $ do @@ -864,7 +855,7 @@ shakeEnqueue ShakeExtras{actionQueue, shakeRecorder} act = do atomicallyNamed "actionQueue - push" $ pushQueue dai actionQueue let wait' barrier = waitBarrier barrier `catches` - [ Handler (\BlockedIndefinitelyOnMVar -> + [ Handler(\BlockedIndefinitelyOnMVar -> fail $ "internal bug: forever blocked on MVar for " <> actionName act) , Handler (\e@AsyncCancelled -> do @@ -877,28 +868,17 @@ shakeEnqueue ShakeExtras{actionQueue, shakeRecorder} act = do data VFSModified = VFSUnmodified | VFSModified !VFS ------------------------------------------------------------- -newtype GarbageCollectVar = GarbageCollectVar (Var Bool) -instance IsIdeGlobal GarbageCollectVar - - -getFilesOfInterest :: ShakeExtras -> IO [NormalizedFilePath] -getFilesOfInterest state = do - OfInterestVar var <- getIdeGlobalExtras state - mm <- readVar var - return $ map fst $ HashMap.toList mm - -- | Set up a new 'ShakeSession' with a set of initial actions -- Will crash if there is an existing 'ShakeSession' running. --- newSession --- :: Recorder (WithPriority Log) --- -> ShakeExtras --- -> VFSModified --- -> ShakeDatabase --- -> [DelayedActionInternal] --- -> String --- -> IO ShakeSession -newSession recorder extras@ShakeExtras{..} vfsMod shakeDb acts reason newDirtyKeys = do +newSession + :: Recorder (WithPriority Log) + -> ShakeExtras + -> VFSModified + -> ShakeDatabase + -> [DelayedActionInternal] + -> String + -> IO ShakeSession +newSession recorder extras@ShakeExtras{..} vfsMod shakeDb acts reason = do -- Take a new VFS snapshot case vfsMod of @@ -906,18 +886,28 @@ newSession recorder extras@ShakeExtras{..} vfsMod shakeDb acts reason newDirtyKe VFSModified vfs -> atomically $ writeTVar vfsVar vfs IdeOptions{optRunSubset} <- getIdeOptionsIO extras - isActionQueueEmpty <- fmap ((&&) (null acts)) $ atomicallyNamed "actionQueue - is empty" $ isActionQueueEmpty actionQueue - reenqueued <- atomicallyNamed "actionQueue - peek" $ peekInProgress actionQueue + step <- getShakeStep shakeDb + allPendingKeys <- + if optRunSubset + then Just <$> readTVarIO dirtyKeys + else return Nothing let -- A daemon-like action used to inject additional work -- Runs actions from the work queue sequentially - pumpActionThread otSpan = do + logResult :: Show a => String -> [Either SomeException a] -> IO () + logResult label results = for_ results $ \case + Left e | Just (AsyncParentKill _ _) <- fromException e -> logWith recorder Debug $ LogShakeText (T.pack $ label ++ " failed: " ++ show e) + Left e -> logWith recorder Error $ LogShakeText (T.pack $ label ++ " failed: " ++ show e) + Right r -> logWith recorder Debug $ LogShakeText (T.pack $ label ++ " finished: " ++ show r) + pumpActionThread = do d <- liftIO $ atomicallyNamed "action queue - pop" $ popQueue actionQueue - actionFork (run otSpan d) $ \_ -> pumpActionThread otSpan + r <- runActionInDb [run d] + liftIO $ logResult "pumpActionThread" r + pumpActionThread -- TODO figure out how to thread the otSpan into defineEarlyCutoff - run _otSpan d = do + run d = do start <- liftIO offsetTime getAction d liftIO $ atomicallyNamed "actionQueue - done" $ doneQueue d actionQueue @@ -925,66 +915,34 @@ newSession recorder extras@ShakeExtras{..} vfsMod shakeDb acts reason newDirtyKe logWith recorder (actionPriority d) $ LogDelayedAction d runTime -- The inferred type signature doesn't work in ghc >= 9.0.1 - workRun :: (forall b. IO b -> IO b) -> IO (IO ()) + workRun :: (forall b. IO b -> IO b) -> IO () workRun restore = withSpan "Shake session" $ \otSpan -> do setTag otSpan "reason" (fromString reason) setTag otSpan "queue" (fromString $ unlines $ map actionName reenqueued) - -- whenJust allPendingKeys $ \kk -> setTag otSpan "keys" (BS8.pack $ unlines $ map show $ toListKeySet kk) - let keysActs = pumpActionThread otSpan : map (run otSpan) (reenqueued ++ acts) + whenJust allPendingKeys $ \kk -> setTag otSpan "keys" (BS8.pack $ unlines $ map show $ toListKeySet kk) + let keysActs = pumpActionThread : map run (reenqueued ++ acts) res <- try @SomeException $ - restore $ shakeRunDatabaseForKeys (if optRunSubset then newDirtyKeys else Nothing) shakeDb keysActs - -- We only do garbage collection if the action queue is empty - -- and if it has been scheduled - $ \db -> do - GarbageCollectVar var <- getIdeGlobalExtras extras - -- checkParentsOpt <- optCheckParents =<< getIdeOptionsIO extras - -- isGarbageCollectionScheduled <- readVar var - let isGarbageCollectionScheduled = False - when (isActionQueueEmpty && isGarbageCollectionScheduled) $ do - -- reset garbage collection flag - liftIO $ writeVar var False - start <- offsetTime - -- todo do not remove keys that have FOI as its reverse deps. - -- top level - foiFiles <- getFilesOfInterest extras - -- We find a list of keys that are FOI and their dependencies, - -- and mark them as "needed". Then we delete all dirty keys not marked as needed. - let isFoiRules :: Key -> Bool - isFoiRules k = case fromKeyType k of - Just (_, path) | path `elem` foiFiles || path == "" -> True - _ -> False - - -- victims <- garbageCollectKeys db (isRelevantKey checkParentsOpt) - victims <- garbageCollectKeys1 db isFoiRules - -- also remove the keys from the stateValues map - (mapM_ $ \k -> STM.focus Focus.delete k stateValues) - when (coerce ideTesting) $ liftIO $ mRunLspT lspEnv $ - LSP.sendNotification (SMethod_CustomMethod (Proxy @"ghcide/GC")) - (toJSON $ mapMaybe (fmap (show . Q) . fromKeyType) victims) - runTime <- liftIO start - logWith recorder Info $ LogShakeGarbageCollection (T.pack reason) victims runTime - return $ do - let exception = - case res of - Left e -> Just e - _ -> Nothing - logWith recorder Debug $ LogBuildSessionFinish exception + restore $ shakeRunDatabaseForKeys (toListKeySet <$> allPendingKeys) shakeDb keysActs + logWith recorder Debug $ LogBuildSessionFinish step $ res -- Do the work in a background thread - workThread <- asyncWithUnmask workRun + parentTid <- myThreadId + workThread <- asyncWithUnmask $ \x -> do + childThreadId <- myThreadId + logWith recorder Info $ LogShakeText ("Starting shake thread: " <> T.pack (show childThreadId) <> " (parent: " <> T.pack (show parentTid) <> ")") + workRun x - -- run the wrap up in a separate thread since it contains interruptible - -- commands (and we are not using uninterruptible mask) - -- TODO: can possibly swallow exceptions? - _ <- async $ join $ wait workThread -- Cancelling is required to flush the Shake database when either -- the filesystem or the Ghc configuration have changed - step <- shakeGetBuildStep shakeDb + let cancelShakeSession :: IO () cancelShakeSession = do + logWith recorder Info $ LogShakeText ("Starting shake cancellation: " <> " (" <> T.pack (show reason) <> ")") tid <- myThreadId cancelWith workThread $ AsyncParentKill tid step + shakeShutDatabase shakeDb + pure (ShakeSession{..}) @@ -1020,60 +978,50 @@ getHiddenDiagnostics IdeState{shakeExtras = ShakeExtras{hiddenDiagnostics}} = do -- * position mapping store -- * indexing queue -- * exports map --- garbageCollectDirtyKeys :: Action [Key] --- garbageCollectDirtyKeys = do --- IdeOptions{optCheckParents} <- getIdeOptions --- checkParents <- liftIO optCheckParents --- garbageCollectDirtyKeysOlderThan TVar KeySet 0 checkParents - --- garbageCollectDirtyKeysOlderThan :: TVar KeySet -> Int -> CheckParents -> Action [Key] --- garbageCollectDirtyKeysOlderThan dirtyKeys maxAge checkParents = otTracedGarbageCollection "dirty GC" $ do --- dirtySet <- getDirtySet --- garbageCollectKeys dirtyKeys "dirty GC" maxAge checkParents dirtySet - --- garbageCollectKeys :: Database -> String -> Int -> CheckParents -> [(Key, Int)] -> Action [Key] --- garbageCollectKeys = undefined --- garbageCollectKeys dirtyKeys label maxAge checkParents agedKeys = do --- start <- liftIO offsetTime --- ShakeExtras{stateValues, lspEnv, shakeRecorder, ideTesting} <- getShakeExtras --- (n::Int, garbage) <- liftIO $ --- foldM (removeDirtyKey dirtyKeys stateValues) (0,[]) agedKeys --- -- let n = 0 --- -- let garbage = [] --- t <- liftIO start --- when (n>0) $ liftIO $ do --- logWith shakeRecorder Debug $ LogShakeGarbageCollection (T.pack label) n t --- when (coerce ideTesting) $ liftIO $ mRunLspT lspEnv $ --- LSP.sendNotification (SMethod_CustomMethod (Proxy @"ghcide/GC")) --- (toJSON $ mapMaybe (fmap showKey . fromKeyType) garbage) --- return garbage - --- where --- showKey = show . Q --- removeDirtyKey dk values st@(!counter, keys) (k, age) --- | age > maxAge --- , Just (kt,_) <- fromKeyType k --- , not(kt `HSet.member` preservedKeys checkParents) --- = atomicallyNamed "GC" $ do --- gotIt <- STM.focus (Focus.member <* Focus.delete) k values --- when gotIt $ --- modifyTVar' dk (insertKeySet k) --- return $ if gotIt then (counter+1, k:keys) else st --- | otherwise = pure st +garbageCollectDirtyKeys :: Action [Key] +garbageCollectDirtyKeys = do + IdeOptions{optCheckParents} <- getIdeOptions + checkParents <- liftIO optCheckParents + garbageCollectDirtyKeysOlderThan 0 checkParents + +garbageCollectDirtyKeysOlderThan :: Int -> CheckParents -> Action [Key] +garbageCollectDirtyKeysOlderThan maxAge checkParents = otTracedGarbageCollection "dirty GC" $ do + dirtySet <- getDirtySet + garbageCollectKeys "dirty GC" maxAge checkParents dirtySet + +garbageCollectKeys :: String -> Int -> CheckParents -> [(Key, Int)] -> Action [Key] +garbageCollectKeys label maxAge checkParents agedKeys = do + start <- liftIO offsetTime + ShakeExtras{stateValues, dirtyKeys, lspEnv, shakeRecorder, ideTesting} <- getShakeExtras + (n::Int, garbage) <- liftIO $ + foldM (removeDirtyKey dirtyKeys stateValues) (0,[]) agedKeys + t <- liftIO start + when (n>0) $ liftIO $ do + logWith shakeRecorder Debug $ LogShakeGarbageCollection (T.pack label) n t + when (coerce ideTesting) $ liftIO $ mRunLspT lspEnv $ + LSP.sendNotification (SMethod_CustomMethod (Proxy @"ghcide/GC")) + (toJSON $ mapMaybe (fmap showKey . fromKeyType) garbage) + return garbage + + where + showKey = show . Q + removeDirtyKey dk values st@(!counter, keys) (k, age) + | age > maxAge + , Just (kt,_) <- fromKeyType k + , not(kt `HSet.member` preservedKeys checkParents) + = atomicallyNamed "GC" $ do + gotIt <- STM.focus (Focus.member <* Focus.delete) k values + when gotIt $ + modifyTVar' dk (insertKeySet k) + return $ if gotIt then (counter+1, k:keys) else st + | otherwise = pure st countRelevantKeys :: CheckParents -> [Key] -> Int countRelevantKeys checkParents = Prelude.length . filter (maybe False (not . (`HSet.member` preservedKeys checkParents) . fst) . fromKeyType) --- A key is relevant if it is not in the preserved set --- i.e. it is a key that can be garbage collected -isRelevantKey :: CheckParents -> Key -> Bool -isRelevantKey p k = maybe False (not . (`HSet.member` preservedKeys p) . fst) (fromKeyType k) - preservedKeys :: CheckParents -> HashSet TypeRep preservedKeys checkParents = HSet.fromList $ - -- always preserved - -- always preserved -- always preserved [ typeOf GetFileExists , typeOf GetModificationTime @@ -1236,7 +1184,6 @@ usesWithStale key files = do traverse (lastValue key) files -- we use separate fingerprint rules to trigger the rebuild of the rule --- fingerKey should depend on the key, so we can use it to trigger a rebuild useWithSeparateFingerprintRule :: (IdeRule k v, IdeRule k1 Fingerprint) => k1 -> k -> NormalizedFilePath -> Action (Maybe v) @@ -1321,7 +1268,7 @@ defineEarlyCutoff' -> (Value v -> Action (Maybe BS.ByteString, IdeResult v)) -> Action (RunResult (A (RuleResult k))) defineEarlyCutoff' doDiagnostics cmp key file mbOld mode action = do - ShakeExtras{stateValues, progress} <- getShakeExtras + ShakeExtras{stateValues, progress, dirtyKeys} <- getShakeExtras options <- getIdeOptions let trans g x = withRunInIO $ \run -> g (run x) (if optSkipProgress options key then id else trans (inProgress progress file)) $ do @@ -1372,6 +1319,7 @@ defineEarlyCutoff' doDiagnostics cmp key file mbOld mode action = do -- this hook needs to be run in the same transaction as the key is marked clean -- see Note [Housekeeping rule cache and dirty key outside of hls-graph] setValues stateValues key file res (Vector.fromList diags) + modifyTVar' dirtyKeys (deleteKeySet $ toKey key file) return res where -- Highly unsafe helper to compute the version of a file @@ -1453,8 +1401,9 @@ updateFileDiagnostics recorder fp ver k ShakeExtras{diagnostics, hiddenDiagnosti newDiags <- liftIO $ atomicallyNamed "diagnostics - update" $ update (addTagUnsafe "shown ") currentShown diagnostics _ <- liftIO $ atomicallyNamed "diagnostics - hidden" $ update (addTagUnsafe "hidden ") currentHidden hiddenDiagnostics let uri' = filePathToUri' fp - let delay = if null newDiags then 0.1 else 0 - registerEvent debouncer delay uri' $ withTrace ("report diagnostics " <> fromString (fromNormalizedFilePath fp)) $ \tag -> do + -- let delay = if null newDiags then 0.1 else 0 + -- registerEvent debouncer delay uri' $ withTrace ("report diagnostics " <> fromString (fromNormalizedFilePath fp)) $ \tag -> do + withTrace ("report diagnostics " <> fromString (fromNormalizedFilePath fp)) $ \tag -> do join $ mask_ $ do lastPublish <- atomicallyNamed "diagnostics - publish" $ STM.focus (Focus.lookupWithDefault [] <* Focus.insert newDiags) uri' publishedDiagnostics let action = when (lastPublish /= newDiags) $ case lspEnv of diff --git a/ghcide/src/Development/IDE/Core/WorkerThread.hs b/ghcide/src/Development/IDE/Core/WorkerThread.hs deleted file mode 100644 index 6d141c7ef3..0000000000 --- a/ghcide/src/Development/IDE/Core/WorkerThread.hs +++ /dev/null @@ -1,59 +0,0 @@ -{- -Module : Development.IDE.Core.WorkerThread -Author : @soulomoon -SPDX-License-Identifier: Apache-2.0 - -Description : This module provides an API for managing worker threads in the IDE. -see Note [Serializing runs in separate thread] --} -module Development.IDE.Core.WorkerThread - (withWorkerQueue, awaitRunInThread) - where - -import Control.Concurrent.Async (AsyncCancelled (AsyncCancelled), - withAsync) -import Control.Concurrent.STM -import Control.Concurrent.Strict (newBarrier, signalBarrier, - waitBarrier) -import Control.Exception.Safe (Exception (fromException), - SomeException, throwIO, try) -import Control.Monad (forever) -import Control.Monad.Cont (ContT (ContT)) - -{- -Note [Serializing runs in separate thread] -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -We often want to take long-running actions using some resource that cannot be shared. -In this instance it is useful to have a queue of jobs to run using the resource. -Like the db writes, session loading in session loader, shake session restarts. - -Originally we used various ways to implement this, but it was hard to maintain and error prone. -Moreover, we can not stop these threads uniformly when we are shutting down the server. --} - --- | 'withWorkerQueue' creates a new 'TQueue', and launches a worker --- thread which polls the queue for requests and runs the given worker --- function on them. -withWorkerQueue :: (t -> IO a) -> ContT () IO (TQueue t) -withWorkerQueue workerAction = ContT $ \mainAction -> do - q <- newTQueueIO - withAsync (writerThread q) $ \_ -> mainAction q - where - writerThread q = - forever $ do - l <- atomically $ readTQueue q - workerAction l - --- | 'awaitRunInThread' queues up an 'IO' action to be run by a worker thread, --- and then blocks until the result is computed. If the action throws an --- non-async exception, it is rethrown in the calling thread. -awaitRunInThread :: TQueue (IO ()) -> IO result -> IO result -awaitRunInThread q act = do - -- Take an action from TQueue, run it and - -- use barrier to wait for the result - barrier <- newBarrier - atomically $ writeTQueue q $ try act >>= signalBarrier barrier - resultOrException <- waitBarrier barrier - case resultOrException of - Left e -> throwIO (e :: SomeException) - Right r -> return r diff --git a/ghcide/src/Development/IDE/GHC/Compat.hs b/ghcide/src/Development/IDE/GHC/Compat.hs index befd22c8de..d4750f1a2e 100644 --- a/ghcide/src/Development/IDE/GHC/Compat.hs +++ b/ghcide/src/Development/IDE/GHC/Compat.hs @@ -115,7 +115,6 @@ module Development.IDE.GHC.Compat( import Control.Applicative ((<|>)) import qualified Data.ByteString as BS import Data.Coerce (coerce) -import Data.List (foldl') import qualified Data.Map as Map import qualified Data.Set as S import Data.String (IsString (fromString)) diff --git a/ghcide/src/Development/IDE/LSP/LanguageServer.hs b/ghcide/src/Development/IDE/LSP/LanguageServer.hs index 918e024a4f..8948d719d1 100644 --- a/ghcide/src/Development/IDE/LSP/LanguageServer.hs +++ b/ghcide/src/Development/IDE/LSP/LanguageServer.hs @@ -12,6 +12,8 @@ module Development.IDE.LSP.LanguageServer , ThreadQueue , runWithWorkerThreads , Setup (..) + , InitializationContext (..) + , untilMVar' ) where import Control.Concurrent.STM @@ -35,32 +37,56 @@ import UnliftIO.Directory import UnliftIO.Exception import qualified Colog.Core as Colog +import Control.Concurrent.Extra (newBarrier, + signalBarrier, + waitBarrier) import Control.Monad.IO.Unlift (MonadUnliftIO) import Control.Monad.Trans.Cont (evalContT) import Development.IDE.Core.IdeConfiguration +import Development.IDE.Core.Service (shutdown) import Development.IDE.Core.Shake hiding (Log) import Development.IDE.Core.Tracing -import Development.IDE.Core.WorkerThread (withWorkerQueue) import qualified Development.IDE.Session as Session import Development.IDE.Types.Shake (WithHieDb, WithHieDbShield (..)) +import Development.IDE.WorkerThread import Ide.Logger import Language.LSP.Server (LanguageContextEnv, LspServerLog, type (<~>)) +import System.Time.Extra (Seconds, sleep) +import System.Timeout (timeout) data Log = LogRegisteringIdeConfig !IdeConfiguration | LogReactorThreadException !SomeException | LogReactorMessageActionException !SomeException - | LogReactorThreadStopped + | LogReactorThreadStopped Int | LogCancelledRequest !SomeLspId | LogSession Session.Log | LogLspServer LspServerLog - | LogServerShutdownMessage + | LogReactorShutdownRequested Bool + | LogShutDownTimeout Int + | LogServerExitWith (Either () Int) + | LogReactorShutdownConfirmed !T.Text + | LogInitializeIdeStateTookTooLong Seconds deriving Show instance Pretty Log where pretty = \case + LogInitializeIdeStateTookTooLong seconds -> + "Building the initial session took more than" <+> pretty seconds <+> "seconds" + LogReactorShutdownRequested b -> + "Requested reactor shutdown; stop signal posted: " <+> pretty b + LogReactorShutdownConfirmed msg -> + "Reactor shutdown confirmed: " <+> pretty msg + LogServerExitWith (Right 0) -> + "Server exited successfully" + LogServerExitWith (Right code) -> + "Server exited with failure code" <+> pretty code + LogServerExitWith (Left _) -> + "Server forcefully exited due to exception in reactor thread" + LogShutDownTimeout seconds -> + "Shutdown timeout, the server will exit now after waiting for" <+> pretty seconds <+> "seconds" LogRegisteringIdeConfig ideConfig -> -- This log is also used to identify if HLS starts successfully in vscode-haskell, -- don't forget to update the corresponding test in vscode-haskell if the text in @@ -74,13 +100,38 @@ instance Pretty Log where vcat [ "ReactorMessageActionException" , pretty $ displayException e ] - LogReactorThreadStopped -> - "Reactor thread stopped" + LogReactorThreadStopped i -> + "Reactor thread stopped" <+> pretty i LogCancelledRequest requestId -> "Cancelled request" <+> viaShow requestId LogSession msg -> pretty msg LogLspServer msg -> pretty msg - LogServerShutdownMessage -> "Received shutdown message" + +-- | Context for initializing the LSP language server. +-- This record encapsulates all the configuration and callback functions +-- needed to set up and run the language server initialization process. +data InitializationContext config = InitializationContext + { ctxRecorder :: Recorder (WithPriority Log) + -- ^ Logger for recording server events and diagnostics + , ctxDefaultRoot :: FilePath + -- ^ Default root directory for the workspace, see Note [Root Directory] + , ctxGetHieDbLoc :: FilePath -> IO FilePath + -- ^ Function to determine the HIE database location for a given root path + , ctxGetIdeState :: LSP.LanguageContextEnv config -> FilePath -> WithHieDb -> ThreadQueue -> IO IdeState + -- ^ Function to create and initialize the IDE state with the given environment + , ctxUntilReactorStopSignal :: IO () -> IO () + -- ^ Lifetime control: MVar to signal reactor shutdown + , ctxconfirmReactorShutdown :: T.Text -> IO () + -- ^ Callback to log/confirm reactor shutdown with a reason + , ctxForceShutdown :: IO () + -- ^ Action to forcefully exit the server when exception occurs + , ctxClearReqId :: SomeLspId -> IO () + -- ^ Function to clear/cancel a request by its ID + , ctxWaitForCancel :: SomeLspId -> IO () + -- ^ Function to wait for a request cancellation by its ID + , ctxClientMsgChan :: Chan ReactorMessage + -- ^ Channel for communicating with the reactor message loop + } data Setup config m a = MkSetup @@ -136,8 +187,8 @@ runLanguageServer recorder options inH outH defaultConfig parseConfig onConfigCh outH serverDefinition - untilMVar clientMsgVar $ - runServer `finally` sequence_ onExit + untilMVar' clientMsgVar runServer `finally` sequence_ onExit + >>= logWith recorder Info . LogServerExitWith setupLSP :: forall config. @@ -155,8 +206,21 @@ setupLSP recorder defaultRoot getHieDbLoc userHandlers getIdeState clientMsgVar -- An MVar to control the lifetime of the reactor loop. -- The loop will be stopped and resources freed when it's full - reactorLifetime <- newEmptyMVar - let stopReactorLoop = void $ tryPutMVar reactorLifetime () + reactorStopSignal <- newEmptyMVar + reactorConfirmBarrier <- newBarrier + let + untilReactorStopSignal = untilMVar reactorStopSignal + confirmReactorShutdown reason = do + logWith recorder Debug $ LogReactorShutdownConfirmed reason + signalBarrier reactorConfirmBarrier () + requestReactorShutdown = do + k <- tryPutMVar reactorStopSignal () + logWith recorder Info $ LogReactorShutdownRequested k + let timeOutSeconds = 2 + timeout (timeOutSeconds * 1_000_000) (waitBarrier reactorConfirmBarrier) >>= \case + Just () -> pure () + -- If we don't get confirmation within 2 seconds, we log a warning and shutdown anyway. + Nothing -> logWith recorder Warning $ LogShutDownTimeout timeOutSeconds -- Forcefully exit let exit = void $ tryPutMVar clientMsgVar () @@ -185,49 +249,63 @@ setupLSP recorder defaultRoot getHieDbLoc userHandlers getIdeState clientMsgVar let staticHandlers = mconcat [ userHandlers , cancelHandler cancelRequest - , exitHandler exit - , shutdownHandler recorder stopReactorLoop + , shutdownHandler recorder requestReactorShutdown ] -- Cancel requests are special since they need to be handled -- out of order to be useful. Existing handlers are run afterwards. - let doInitialize = handleInit recorder defaultRoot getHieDbLoc getIdeState reactorLifetime exit clearReqId waitForCancel clientMsgChan + let initParams = InitializationContext + { ctxRecorder = recorder + , ctxDefaultRoot = defaultRoot + , ctxGetHieDbLoc = getHieDbLoc + , ctxGetIdeState = getIdeState + , ctxUntilReactorStopSignal = untilReactorStopSignal + , ctxconfirmReactorShutdown = confirmReactorShutdown + , ctxForceShutdown = exit + , ctxClearReqId = clearReqId + , ctxWaitForCancel = waitForCancel + , ctxClientMsgChan = clientMsgChan + } + + let doInitialize = handleInit initParams let interpretHandler (env, st) = LSP.Iso (LSP.runLspT env . flip (runReaderT . unServerM) (clientMsgChan,st)) liftIO - - let onExit = [stopReactorLoop, exit] + let onExit = [void $ tryPutMVar reactorStopSignal ()] pure MkSetup {doInitialize, staticHandlers, interpretHandler, onExit} handleInit - :: Recorder (WithPriority Log) - -> FilePath -- ^ root directory, see Note [Root Directory] - -> (FilePath -> IO FilePath) - -> (LSP.LanguageContextEnv config -> FilePath -> WithHieDb -> ThreadQueue -> IO IdeState) - -> MVar () - -> IO () - -> (SomeLspId -> IO ()) - -> (SomeLspId -> IO ()) - -> Chan ReactorMessage + :: InitializationContext config -> LSP.LanguageContextEnv config -> TRequestMessage Method_Initialize -> IO (Either err (LSP.LanguageContextEnv config, IdeState)) -handleInit recorder defaultRoot getHieDbLoc getIdeState lifetime exitClientMsg clearReqId waitForCancel clientMsgChan env (TRequestMessage _ _ m params) = otTracedHandler "Initialize" (show m) $ \sp -> do +handleInit initParams env (TRequestMessage _ _ m params) = otTracedHandler "Initialize" (show m) $ \sp -> do traceWithSpan sp params - -- only shift if lsp root is different from the rootDir - -- see Note [Root Directory] + -- only shift if lsp root is different from the rootDir + -- see Note [Root Directory] + let + recorder = ctxRecorder initParams + defaultRoot = ctxDefaultRoot initParams + untilReactorStopSignal = ctxUntilReactorStopSignal initParams + lifetimeConfirm = ctxconfirmReactorShutdown initParams root <- case LSP.resRootPath env of - Just lspRoot | lspRoot /= defaultRoot -> setCurrentDirectory lspRoot >> return lspRoot - _ -> pure defaultRoot - dbLoc <- getHieDbLoc root + Just lspRoot | lspRoot /= defaultRoot -> setCurrentDirectory lspRoot >> return lspRoot + _ -> pure defaultRoot + dbLoc <- ctxGetHieDbLoc initParams root let initConfig = parseConfiguration params logWith recorder Info $ LogRegisteringIdeConfig initConfig - dbMVar <- newEmptyMVar - - - let handleServerException (Left e) = do - logWith recorder Error $ LogReactorThreadException e - exitClientMsg - handleServerException (Right _) = pure () + ideMVar <- newEmptyMVar + + let handleServerExceptionOrShutDown me = do + -- shutdown shake + tryReadMVar ideMVar >>= mapM_ shutdown + case me of + Left e -> do + lifetimeConfirm "due to exception in reactor thread" + logWith recorder Error $ LogReactorThreadException e + ctxForceShutdown initParams + _ -> do + lifetimeConfirm "due to shutdown message" + return () exceptionInHandler e = do logWith recorder Error $ LogReactorMessageActionException e @@ -235,13 +313,13 @@ handleInit recorder defaultRoot getHieDbLoc getIdeState lifetime exitClientMsg c checkCancelled :: forall m . LspId m -> IO () -> (TResponseError m -> IO ()) -> IO () checkCancelled _id act k = let sid = SomeLspId _id - in flip finally (clearReqId sid) $ + in flip finally (ctxClearReqId initParams sid) $ catch (do -- We could optimize this by first checking if the id -- is in the cancelled set. However, this is unlikely to be a -- bottleneck and the additional check might hide -- issues with async exceptions that need to be fixed. - cancelOrRes <- race (waitForCancel sid) act + cancelOrRes <- race (ctxWaitForCancel initParams sid) act case cancelOrRes of Left () -> do logWith recorder Debug $ LogCancelledRequest sid @@ -250,20 +328,22 @@ handleInit recorder defaultRoot getHieDbLoc getIdeState lifetime exitClientMsg c ) $ \(e :: SomeException) -> do exceptionInHandler e k $ TResponseError (InR ErrorCodes_InternalError) (T.pack $ show e) Nothing - _ <- flip forkFinally handleServerException $ do - untilMVar lifetime $ runWithWorkerThreads (cmapWithPrio LogSession recorder) dbLoc $ \withHieDb' threadQueue' -> do - putMVar dbMVar (WithHieDbShield withHieDb',threadQueue') - forever $ do - msg <- readChan clientMsgChan - -- We dispatch notifications synchronously and requests asynchronously - -- This is to ensure that all file edits and config changes are applied before a request is handled - case msg of - ReactorNotification act -> handle exceptionInHandler act - ReactorRequest _id act k -> void $ async $ checkCancelled _id act k - logWith recorder Info LogReactorThreadStopped - - (WithHieDbShield withHieDb, threadQueue) <- takeMVar dbMVar - ide <- getIdeState env root withHieDb threadQueue + _ <- flip forkFinally handleServerExceptionOrShutDown $ do + runWithWorkerThreads (cmapWithPrio LogSession recorder) dbLoc $ \withHieDb' threadQueue' -> + do + ide <- ctxGetIdeState initParams env root withHieDb' threadQueue' + putMVar ideMVar ide + -- We might be blocked indefinitly at initialization if reactorStop is signaled + -- before we putMVar. + untilReactorStopSignal $ forever $ do + msg <- readChan $ ctxClientMsgChan initParams + -- We dispatch notifications synchronously and requests asynchronously + -- This is to ensure that all file edits and config changes are applied before a request is handled + case msg of + ReactorNotification act -> handle exceptionInHandler act + ReactorRequest _id act k -> void $ async $ checkCancelled _id act k + + ide <- readMVar ideMVar registerIdeConfiguration (shakeExtras ide) initConfig pure $ Right (env,ide) @@ -273,9 +353,9 @@ handleInit recorder defaultRoot getHieDbLoc getIdeState lifetime exitClientMsg c -- see Note [Serializing runs in separate thread] runWithWorkerThreads :: Recorder (WithPriority Session.Log) -> FilePath -> (WithHieDb -> ThreadQueue -> IO ()) -> IO () runWithWorkerThreads recorder dbLoc f = evalContT $ do - sessionRestartTQueue <- withWorkerQueue id - sessionLoaderTQueue <- withWorkerQueue id (WithHieDbShield hiedb, threadQueue) <- runWithDb recorder dbLoc + sessionRestartTQueue <- withWorkerQueueSimple (logWith (cmapWithPrio Session.LogSessionWorkerThread recorder) Debug) "RestartTQueue" + sessionLoaderTQueue <- withWorkerQueueSimple (logWith (cmapWithPrio Session.LogSessionWorkerThread recorder) Debug) "SessionLoaderTQueue" liftIO $ f hiedb (ThreadQueue threadQueue sessionRestartTQueue sessionLoaderTQueue) -- | Runs the action until it ends or until the given MVar is put. @@ -286,6 +366,9 @@ runWithWorkerThreads recorder dbLoc f = evalContT $ do untilMVar :: MonadUnliftIO m => MVar () -> m a -> m () untilMVar mvar io = race_ (readMVar mvar) io +untilMVar' :: MonadUnliftIO m => MVar a -> m b -> m (Either a b) +untilMVar' mvar io = race (readMVar mvar) io + cancelHandler :: (SomeLspId -> IO ()) -> LSP.Handlers (ServerM c) cancelHandler cancelRequest = LSP.notificationHandler SMethod_CancelRequest $ \TNotificationMessage{_params=CancelParams{_id}} -> liftIO $ cancelRequest (SomeLspId (toLspId _id)) @@ -294,18 +377,11 @@ cancelHandler cancelRequest = LSP.notificationHandler SMethod_CancelRequest $ \T toLspId (InR y) = IdString y shutdownHandler :: Recorder (WithPriority Log) -> IO () -> LSP.Handlers (ServerM c) -shutdownHandler recorder stopReactor = LSP.requestHandler SMethod_Shutdown $ \_ resp -> do - (_, ide) <- ask - liftIO $ logWith recorder Debug LogServerShutdownMessage - -- stop the reactor to free up the hiedb connection - liftIO stopReactor - -- flush out the Shake session to record a Shake profile if applicable - liftIO $ shakeShut ide +shutdownHandler _recorder requestReactorShutdown = LSP.requestHandler SMethod_Shutdown $ \_ resp -> do + -- stop the reactor to free up the hiedb connection and shut down shake + liftIO requestReactorShutdown resp $ Right Null -exitHandler :: IO () -> LSP.Handlers (ServerM c) -exitHandler exit = LSP.notificationHandler SMethod_Exit $ const $ liftIO exit - modifyOptions :: LSP.Options -> LSP.Options modifyOptions x = x{ LSP.optTextDocumentSync = Just $ tweakTDS origTDS } diff --git a/ghcide/src/Development/IDE/Plugin/Test.hs b/ghcide/src/Development/IDE/Plugin/Test.hs index be03cd5a8a..8c0733b22f 100644 --- a/ghcide/src/Development/IDE/Plugin/Test.hs +++ b/ghcide/src/Development/IDE/Plugin/Test.hs @@ -39,7 +39,7 @@ import Development.IDE.Graph.Database (ShakeDatabase, shakeGetBuildStep, shakeGetCleanKeys) import Development.IDE.Graph.Internal.Types (Result (resultBuilt, resultChanged, resultVisited), - Step) + Step (..)) import qualified Development.IDE.Graph.Internal.Types as Graph import Development.IDE.Types.Action import Development.IDE.Types.HscEnvEq (HscEnvEq (hscEnv)) @@ -140,7 +140,7 @@ getDatabaseKeys :: (Graph.Result -> Step) getDatabaseKeys field db = do keys <- shakeGetCleanKeys db step <- shakeGetBuildStep db - return [ k | (k, res) <- keys, field res == step] + return [ k | (k, res) <- keys, field res == Step step] parseAction :: CI String -> NormalizedFilePath -> Action (Either Text Bool) parseAction "typecheck" fp = Right . isJust <$> use TypeCheck fp diff --git a/ghcide/src/Development/IDE/Types/HscEnvEq.hs b/ghcide/src/Development/IDE/Types/HscEnvEq.hs index 1c2ed1732f..26eb8d5395 100644 --- a/ghcide/src/Development/IDE/Types/HscEnvEq.hs +++ b/ghcide/src/Development/IDE/Types/HscEnvEq.hs @@ -54,7 +54,6 @@ newHscEnvEq :: HscEnv -> IO HscEnvEq newHscEnvEq hscEnv' = do mod_cache <- newIORef emptyInstalledModuleEnv - file_cache <- newIORef M.empty -- This finder cache is for things which are outside of things which are tracked -- by HLS. For example, non-home modules, dependent object files etc #if MIN_VERSION_ghc(9,11,0) diff --git a/hls-graph/hls-graph.cabal b/hls-graph/hls-graph.cabal index 5eccb4d75e..d1bbb61c31 100644 --- a/hls-graph/hls-graph.cabal +++ b/hls-graph/hls-graph.cabal @@ -65,11 +65,14 @@ library Development.IDE.Graph.KeyMap Development.IDE.Graph.KeySet Development.IDE.Graph.Rule + Development.IDE.WorkerThread Paths_hls_graph autogen-modules: Paths_hls_graph hs-source-dirs: src build-depends: + , mtl ^>=2.3.1 + , safe-exceptions ^>=0.1.7.4 , aeson , async >=2.0 , base >=4.12 && <5 @@ -92,6 +95,7 @@ library , transformers , unliftio , unordered-containers + , prettyprinter if flag(embed-files) cpp-options: -DFILE_EMBED @@ -129,6 +133,7 @@ test-suite tests -threaded -rtsopts -with-rtsopts=-N -fno-ignore-asserts build-depends: + , transformers ^>=0.6.1.2 , base , extra , hls-graph @@ -138,5 +143,6 @@ test-suite tests , tasty , tasty-hspec >= 1.2 , tasty-rerun + , transformers build-tool-depends: hspec-discover:hspec-discover diff --git a/hls-graph/src/Development/IDE/Graph.hs b/hls-graph/src/Development/IDE/Graph.hs index 81ad3b3dfd..bb973c6130 100644 --- a/hls-graph/src/Development/IDE/Graph.hs +++ b/hls-graph/src/Development/IDE/Graph.hs @@ -5,7 +5,7 @@ module Development.IDE.Graph( Action, action, pattern Key, newKey, renderKey, - actionFinally, actionBracket, actionCatch, actionFork, + actionFinally, actionBracket, actionCatch, -- * Configuration ShakeOptions(shakeAllowRedefineRules, shakeExtra), getShakeExtra, getShakeExtraRules, newShakeExtra, @@ -18,6 +18,7 @@ module Development.IDE.Graph( -- * Actions for inspecting the keys in the database getDirtySet, getKeysAndVisitedAge, + module Development.IDE.Graph.KeyMap, module Development.IDE.Graph.KeySet, ) where diff --git a/hls-graph/src/Development/IDE/Graph/Database.hs b/hls-graph/src/Development/IDE/Graph/Database.hs index 65d946b547..c7b4e6a6be 100644 --- a/hls-graph/src/Development/IDE/Graph/Database.hs +++ b/hls-graph/src/Development/IDE/Graph/Database.hs @@ -1,5 +1,4 @@ module Development.IDE.Graph.Database( - AsyncParentKill(..), ShakeDatabase, ShakeValue, shakeNewDatabase, @@ -9,9 +8,11 @@ module Development.IDE.Graph.Database( shakeGetBuildStep, shakeGetDatabaseKeys, shakeGetDirtySet, - shakeGetCleanKeys, - shakeGetBuildEdges) where + shakeGetCleanKeys + ,shakeGetBuildEdges, + shakeShutDatabase) where import Control.Concurrent.STM.Stats (readTVarIO) +import Control.Exception (SomeException) import Data.Dynamic import Data.Maybe import Development.IDE.Graph.Classes () @@ -22,20 +23,24 @@ import Development.IDE.Graph.Internal.Options import Development.IDE.Graph.Internal.Profile (writeProfile) import Development.IDE.Graph.Internal.Rules import Development.IDE.Graph.Internal.Types +import Development.IDE.WorkerThread (TaskQueue) -- Placeholder to be the 'extra' if the user doesn't set it data NonExportedType = NonExportedType -shakeNewDatabase :: ShakeOptions -> Rules () -> IO ShakeDatabase -shakeNewDatabase opts rules = do +shakeShutDatabase :: ShakeDatabase -> IO () +shakeShutDatabase (ShakeDatabase _ _ db) = shutDatabase db + +shakeNewDatabase :: TaskQueue (IO ()) -> ShakeOptions -> Rules () -> IO ShakeDatabase +shakeNewDatabase que opts rules = do let extra = fromMaybe (toDyn NonExportedType) $ shakeExtra opts (theRules, actions) <- runRules extra rules - db <- newDatabase extra theRules + db <- newDatabase que extra theRules pure $ ShakeDatabase (length actions) actions db -shakeRunDatabase :: ShakeDatabase -> [Action a] -> IO [a] -shakeRunDatabase a b = shakeRunDatabaseForKeys Nothing a b (const $ pure ()) +shakeRunDatabase :: ShakeDatabase -> [Action a] -> IO [Either SomeException a] +shakeRunDatabase = shakeRunDatabaseForKeys Nothing -- | Returns the set of dirty keys annotated with their age (in # of builds) shakeGetDirtySet :: ShakeDatabase -> IO [(Key, Int)] @@ -43,9 +48,9 @@ shakeGetDirtySet (ShakeDatabase _ _ db) = Development.IDE.Graph.Internal.Database.getDirtySet db -- | Returns the build number -shakeGetBuildStep :: ShakeDatabase -> IO Step +shakeGetBuildStep :: ShakeDatabase -> IO Int shakeGetBuildStep (ShakeDatabase _ _ db) = do - s <- readTVarIO $ databaseStep db + Step s <- readTVarIO $ databaseStep db return s -- Only valid if we never pull on the results, which we don't @@ -58,12 +63,11 @@ shakeRunDatabaseForKeys -- ^ Set of keys changed since last run. 'Nothing' means everything has changed -> ShakeDatabase -> [Action a] - -> (Database -> IO ()) - -> IO [a] -shakeRunDatabaseForKeys keysChanged (ShakeDatabase lenAs1 as1 db) as2 garbageCollect = do + -> IO [Either SomeException a] +shakeRunDatabaseForKeys keysChanged (ShakeDatabase lenAs1 as1 db) as2 = do incDatabase db keysChanged - garbageCollect db - fmap (drop lenAs1) $ runActions db $ map unvoid as1 ++ as2 + drop lenAs1 <$> runActions db (map unvoid as1 ++ as2) + -- | Given a 'ShakeDatabase', write an HTML profile to the given file about the latest run. shakeProfileDatabase :: ShakeDatabase -> FilePath -> IO () diff --git a/hls-graph/src/Development/IDE/Graph/Internal/Action.hs b/hls-graph/src/Development/IDE/Graph/Internal/Action.hs index 30ef078ffe..ce1e7e432d 100644 --- a/hls-graph/src/Development/IDE/Graph/Internal/Action.hs +++ b/hls-graph/src/Development/IDE/Graph/Internal/Action.hs @@ -2,7 +2,6 @@ module Development.IDE.Graph.Internal.Action ( ShakeValue -, actionFork , actionBracket , actionCatch , actionFinally @@ -14,6 +13,7 @@ module Development.IDE.Graph.Internal.Action , runActions , Development.IDE.Graph.Internal.Action.getDirtySet , getKeysAndVisitedAge +, runActionInDb ) where import Control.Concurrent.Async @@ -31,6 +31,9 @@ import Development.IDE.Graph.Internal.Key import Development.IDE.Graph.Internal.Rules (RuleResult) import Development.IDE.Graph.Internal.Types import System.Exit +import UnliftIO (atomically, + newEmptyTMVarIO, + putTMVar, readTMVar) type ShakeValue a = (Show a, Typeable a, Eq a, Hashable a, NFData a) @@ -40,45 +43,36 @@ alwaysRerun = do ref <- Action $ asks actionDeps liftIO $ modifyIORef' ref (AlwaysRerunDeps mempty <>) -parallel :: [Action a] -> Action [a] -parallel [] = pure [] -parallel [x] = fmap (:[]) x +parallel :: [Action a] -> Action [Either SomeException a] +parallel [] = return [] parallel xs = do a <- Action ask deps <- liftIO $ readIORef $ actionDeps a + case deps of UnknownDeps -> -- if we are already in the rerun mode, nothing we do is going to impact our state - liftIO $ mapConcurrently (ignoreState a) xs - deps -> do - (newDeps, res) <- liftIO $ unzip <$> mapConcurrently (usingState a) xs - liftIO $ writeIORef (actionDeps a) $ mconcat $ deps : newDeps - pure res - where - usingState a x = do - ref <- newIORef mempty - res <- runReaderT (fromAction x) a{actionDeps=ref} - deps <- readIORef ref - pure (deps, res) + runActionInDb xs + deps -> error $ "parallel not supported when we have precise dependencies: " ++ show deps + -- (newDeps, res) <- liftIO $ unzip <$> runActionInDb usingState xs + -- liftIO $ writeIORef (actionDeps a) $ mconcat $ deps : newDeps + -- return () + +runActionInDb :: [Action a] -> Action [Either SomeException a] +runActionInDb acts = do + a <- Action ask + xs <- mapM (\x -> do + barrier <- newEmptyTMVarIO + return (x, barrier)) acts + liftIO $ atomically $ runInDataBase (actionDatabase a) (map (\(x, b) -> (ignoreState a x >>= (atomically . putTMVar b . Right), atomically . putTMVar b . Left)) xs) + results <- liftIO $ mapM (atomically . readTMVar) $ fmap snd xs + return results ignoreState :: SAction -> Action b -> IO b ignoreState a x = do ref <- newIORef mempty runReaderT (fromAction x) a{actionDeps=ref} -actionFork :: Action a -> (Async a -> Action b) -> Action b -actionFork act k = do - a <- Action ask - deps <- liftIO $ readIORef $ actionDeps a - let db = actionDatabase a - case deps of - UnknownDeps -> do - -- if we are already in the rerun mode, nothing we do is going to impact our state - [res] <- liftIO $ withAsync (ignoreState a act) $ \as -> runActions db [k as] - return res - _ -> - error "please help me" - isAsyncException :: SomeException -> Bool isAsyncException e | Just (_ :: SomeAsyncException) <- fromException e = True @@ -130,7 +124,7 @@ applyWithoutDependency ks = do (_, vs) <- liftIO $ build db stack ks pure vs -runActions :: Database -> [Action a] -> IO [a] +runActions :: Database -> [Action a] -> IO [Either SomeException a] runActions db xs = do deps <- newIORef mempty runReaderT (fromAction $ parallel xs) $ SAction db deps emptyStack diff --git a/hls-graph/src/Development/IDE/Graph/Internal/Database.hs b/hls-graph/src/Development/IDE/Graph/Internal/Database.hs index 0d27b73a11..83ded5168d 100644 --- a/hls-graph/src/Development/IDE/Graph/Internal/Database.hs +++ b/hls-graph/src/Development/IDE/Graph/Internal/Database.hs @@ -8,13 +8,11 @@ {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} -module Development.IDE.Graph.Internal.Database (compute, newDatabase, incDatabase, build, getDirtySet, getKeysAndVisitAge, AsyncParentKill(..), garbageCollectKeys, garbageCollectKeys1) where +module Development.IDE.Graph.Internal.Database (compute, newDatabase, incDatabase, build, getDirtySet, getKeysAndVisitAge, AsyncParentKill(..)) where import Prelude hiding (unzip) -import Control.Concurrent.Async -import Control.Concurrent.Extra -import Control.Concurrent.STM.Stats (STM, TVar, atomically, +import Control.Concurrent.STM.Stats (STM, atomically, atomicallyNamed, modifyTVar', newTVarIO, readTVar, readTVarIO, @@ -31,7 +29,7 @@ import Data.IORef.Extra import Data.Maybe import Data.Traversable (for) import Data.Tuple.Extra -import Debug.Trace (traceM) +import Debug.Trace (traceEvent) import Development.IDE.Graph.Classes import Development.IDE.Graph.Internal.Key import Development.IDE.Graph.Internal.Rules @@ -39,102 +37,40 @@ import Development.IDE.Graph.Internal.Types import qualified Focus import qualified ListT import qualified StmContainers.Map as SMap -import qualified StmContainers.Set as SSet -import System.Time.Extra (duration, sleep) -import UnliftIO (MonadUnliftIO (withRunInIO)) -import qualified UnliftIO.Exception as UE +import System.Time.Extra (duration) #if MIN_VERSION_base(4,19,0) import Data.Functor (unzip) #else import Data.List.NonEmpty (unzip) #endif +import Development.IDE.WorkerThread (TaskQueue) -newDatabase :: Dynamic -> TheRules -> IO Database -newDatabase databaseExtra databaseRules = do +newDatabase :: TaskQueue (IO ()) -> Dynamic -> TheRules -> IO Database +newDatabase databaseQueue databaseExtra databaseRules = do databaseStep <- newTVarIO $ Step 0 + databaseThreads <- newTVarIO [] databaseValues <- atomically SMap.new - databaseDirtyKeys <- atomically SSet.new pure Database{..} -garbageCollectKeys1 :: Database -> (Key -> Bool) -> ([Key] -> STM ()) -> IO [Key] -garbageCollectKeys1 db pred garbageCollectHook = do - -- GC policy: - -- We find a list of keys that are FOI and their dependencies, - -- and mark them as "needed". Then we delete all dirty keys not marked as needed. - let maxAge = 0 -- builds; tune as needed or make configurable upstream - -- on idle but still dirty keys - ks <- getKeysAndVisitAge db - let foiks = [ k | (k, _) <- ks, pred k ] - toKeep <- atomically $ transitiveSet db foiks - dirtyWithAge <- Development.IDE.Graph.Internal.Database.getDirtySet db - let victims = [k | (k, age) <- dirtyWithAge - , age >= maxAge - , not (k `memberKeySet` toKeep)] - unless (null victims) $ do - -- Delete victim keys and remove them from the dirty set - atomically $ do - forM_ victims $ \k -> do - SMap.focus cleanupDirty k (databaseValues db) - -- Remove the victim keys from reverse-dependency sets of remaining keys - let list = SMap.listT (databaseValues db) - ListT.traverse_ (\(k', _) -> - SMap.focus (Focus.adjust (onKeyReverseDeps (\ks -> foldr deleteKeySet ks victims))) k' (databaseValues db) - ) list - garbageCollectHook victims - pure () - return victims - -garbageCollectKeys :: Database -> (Key -> Bool) -> ([Key] -> STM ()) -> IO [Key] -garbageCollectKeys db pred garbageCollectHook = do - -- GC policy: - -- - Select dirty keys whose age >= maxAge and that satisfy the given predicate 'pred'. - -- - For each selected key (a victim), drop its previous result by setting its status to Dirty Nothing - -- and remove that key from every other key's reverse-dependency set. - -- - Finally, run the provided 'garbageCollectHook victims' within the same STM transaction. - let maxAge = 0 -- builds; tune as needed or make configurable upstream - -- on idle but still dirty keys - dirtyWithAge <- Development.IDE.Graph.Internal.Database.getDirtySet db - let victims = [k | (k, age) <- dirtyWithAge, age >= maxAge, pred k] - unless (null victims) $ do - -- Delete victim keys and remove them from the dirty set - atomically $ do - forM_ victims $ \k -> do - SMap.focus cleanupDirty k (databaseValues db) - -- Remove the victim keys from reverse-dependency sets of remaining keys - let list = SMap.listT (databaseValues db) - ListT.traverse_ (\(k', _) -> - SMap.focus (Focus.adjust (onKeyReverseDeps (\ks -> foldr deleteKeySet ks victims))) k' (databaseValues db) - ) list - garbageCollectHook victims - pure () - return victims - - -cleanupDirty :: Monad m => Focus.Focus KeyDetails m () -cleanupDirty = Focus.adjust $ \(KeyDetails status rdeps) -> - let status' - | Dirty _ <- status = Dirty Nothing - | otherwise = status - in KeyDetails status' rdeps - -- | Increment the step and mark dirty. -- Assumes that the database is not running a build incDatabase :: Database -> Maybe [Key] -> IO () -- only some keys are dirty incDatabase db (Just kk) = do - atomicallyNamed "incDatabase" $ do - modifyTVar' (databaseStep db) $ \(Step i) -> Step $ i + 1 - for_ kk $ \k -> SSet.insert k (databaseDirtyKeys db) - keys <- ListT.toList $ SSet.listT (databaseDirtyKeys db) - transitiveDirtyKeys <- transitiveDirtySet db keys - for_ (toListKeySet transitiveDirtyKeys) $ \k -> SMap.focus updateDirty k (databaseValues db) - + atomicallyNamed "incDatabase" $ modifyTVar' (databaseStep db) $ \(Step i) -> Step $ i + 1 + transitiveDirtyKeys <- transitiveDirtySet db kk + for_ (toListKeySet transitiveDirtyKeys) $ \k -> + -- Updating all the keys atomically is not necessary + -- since we assume that no build is mutating the db. + -- Therefore run one transaction per key to minimise contention. + atomicallyNamed "incDatabase" $ SMap.focus updateDirty k (databaseValues db) -- all keys are dirty incDatabase db Nothing = do atomically $ modifyTVar' (databaseStep db) $ \(Step i) -> Step $ i + 1 let list = SMap.listT (databaseValues db) + -- all running keys are also dirty atomicallyNamed "incDatabase - all " $ flip ListT.traverse_ list $ \(k,_) -> SMap.focus updateDirty k (databaseValues db) @@ -152,51 +88,54 @@ build -- build _ st k | traceShow ("build", st, k) False = undefined build db stack keys = do step <- readTVarIO $ databaseStep db - !built <- runAIO step $ builder db stack (fmap newKey keys) - let (ids, vs) = unzip built - pure (ids, fmap (asV . resultValue) vs) + go `catch` \e@(AsyncParentKill i s) -> do + if s == step + then throw e + else throw $ AsyncParentKill i $ Step (-1) where - asV :: Value -> value - asV (Value x) = unwrapDynamic x + go = do + -- step <- readTVarIO $ databaseStep db + -- built <- mapConcurrently (builderOne db stack) (fmap newKey keys) + built <- builder db stack (fmap newKey keys) + let (ids, vs) = unzip built + pure (ids, fmap (asV . resultValue) vs) + where + asV :: Value -> value + asV (Value x) = unwrapDynamic x + -- | Build a list of keys and return their results. -- If none of the keys are dirty, we can return the results immediately. -- Otherwise, a blocking computation is returned *which must be evaluated asynchronously* to avoid deadlock. -builder :: (Traversable f) => Database -> Stack -> f Key -> AIO (f (Key, Result)) +builder :: (Traversable f) => Database -> Stack -> f Key -> IO (f (Key, Result)) -- builder _ st kk | traceShow ("builder", st,kk) False = undefined -builder db stack keys = do - keyWaits <- for keys $ \k -> builderOne db stack k - !res <- for keyWaits $ \(k, waitR) -> do - !v<- liftIO waitR - return (k, v) - return res +builder db stack keys = for keys $ \k -> builderOne db stack k -builderOne :: Database -> Stack -> Key -> AIO (Key, IO Result) -builderOne db@Database {..} stack id = UE.mask $ \restore -> do - current <- liftIO $ readTVarIO databaseStep - (k, registerWaitResult) <- liftIO $ atomicallyNamed "builder" $ do +builderOne :: Database -> Stack -> Key -> IO (Key, Result) +builderOne db@Database {..} stack id = do + traceEvent ("builderOne: " ++ show id) return () + res <- liftIO $ atomicallyNamed "builder" $ do -- Spawn the id if needed status <- SMap.lookup id databaseValues - val <- - let refreshRsult s = do - let act = - restore $ asyncWithCleanUp $ - refresh db stack id s - `UE.onException` (UE.uninterruptibleMask_ $ liftIO (atomicallyNamed "builder - onException" (SMap.focus updateDirty id databaseValues))) + current <- readTVar databaseStep + + val <- case viewDirty current $ maybe (Dirty Nothing) keyStatus status of + Dirty s -> do + SMap.focus (updateStatus $ Running current s) id databaseValues + traceEvent ("Starting build of key: " ++ show id ++ ", step "++ show current) + $ runOneInDataBase db (refresh db stack id s) $ \e -> atomically $ SMap.focus (updateStatus $ Exception current e s) id databaseValues + return Nothing + Clean r -> return $ Just r + -- force here might contains async exceptions from previous runs + Running _step _s + | memberStack id stack -> throw $ StackException stack + | otherwise -> retry + Exception _ e _s -> throw e + pure val + case res of + Just r -> return (id, r) + Nothing -> builderOne db stack id - SMap.focus (updateStatus $ Running current s) id databaseValues - return act - in case viewDirty current $ maybe (Dirty Nothing) keyStatus status of - Dirty mbr -> refreshRsult mbr - Running step _mbr - | step /= current -> error $ "Inconsistent database state: key " ++ show id ++ " is marked Running at step " ++ show step ++ " but current step is " ++ show current - | memberStack id stack -> throw $ StackException stack - | otherwise -> retry - Clean r -> pure . pure . pure $ r - -- force here might contains async exceptions from previous runs - pure (id, val) - waitR <- registerWaitResult - return (k, waitR) -- | isDirty -- only dirty when it's build time is older than the changed time of one of its dependencies isDirty :: Foldable t => Result -> t (a, Result) -> Bool @@ -209,30 +148,27 @@ isDirty me = any (\(_,dep) -> resultBuilt me < resultChanged dep) -- * If no dirty dependencies and we have evaluated the key previously, then we refresh it in the current thread. -- This assumes that the implementation will be a lookup -- * Otherwise, we spawn a new thread to refresh the dirty deps (if any) and the key itself -refreshDeps :: KeySet -> Database -> Stack -> Key -> Result -> [KeySet] -> AIO Result +refreshDeps :: KeySet -> Database -> Stack -> Key -> Result -> [KeySet] -> IO Result refreshDeps visited db stack key result = \case -- no more deps to refresh - [] -> compute' db stack key RunDependenciesSame (Just result) + [] -> compute db stack key RunDependenciesSame (Just result) (dep:deps) -> do let newVisited = dep <> visited res <- builder db stack (toListKeySet (dep `differenceKeySet` visited)) if isDirty result res -- restart the computation if any of the deps are dirty - then compute' db stack key RunDependenciesChanged (Just result) + then compute db stack key RunDependenciesChanged (Just result) -- else kick the rest of the deps else refreshDeps newVisited db stack key result deps -- refresh :: Database -> Stack -> Key -> Maybe Result -> IO Result -- refresh _ st k _ | traceShow ("refresh", st, k) False = undefined -refresh :: Database -> Stack -> Key -> Maybe Result -> AIO Result +refresh :: Database -> Stack -> Key -> Maybe Result -> IO Result refresh db stack key result = case (addStack key stack, result) of (Left e, _) -> throw e (Right stack, Just me@Result{resultDeps = ResultDeps deps}) -> refreshDeps mempty db stack key me (reverse deps) - (Right stack, _) -> compute' db stack key RunDependenciesChanged result - -compute' :: Database -> Stack -> Key -> RunMode -> Maybe Result -> AIO Result -compute' db stack key mode result = liftIO $ compute db stack key mode result + (Right stack, _) -> compute db stack key RunDependenciesChanged result -- | Compute a key. compute :: Database -> Stack -> Key -> RunMode -> Maybe Result -> IO Result -- compute _ st k _ _ | traceShow ("compute", st, k) False = undefined @@ -273,7 +209,6 @@ compute db@Database{..} stack key mode result = do liftIO $ atomicallyNamed "compute and run hook" $ do runHook SMap.focus (updateStatus $ Clean res) key databaseValues - SSet.delete key databaseDirtyKeys pure res updateStatus :: Monad m => Status -> Focus.Focus KeyDetails m () @@ -328,97 +263,15 @@ updateReverseDeps myId db prev new = do getReverseDependencies :: Database -> Key -> STM (Maybe KeySet) getReverseDependencies db = (fmap.fmap) keyReverseDeps . flip SMap.lookup (databaseValues db) -transitiveDirtySet :: Foldable t => Database -> t Key -> STM KeySet +transitiveDirtySet :: Foldable t => Database -> t Key -> IO KeySet transitiveDirtySet database = flip State.execStateT mempty . traverse_ loop where loop x = do seen <- State.get if x `memberKeySet` seen then pure () else do State.put (insertKeySet x seen) - next <- lift $ getReverseDependencies database x + next <- lift $ atomically $ getReverseDependencies database x traverse_ loop (maybe mempty toListKeySet next) -getDependencies :: Database -> Key -> STM (Maybe KeySet) -getDependencies db k = do - m <- SMap.lookup k (databaseValues db) - pure $ do - KeyDetails st _ <- m - case getDeps st of - UnknownDeps -> Nothing - rd -> Just (getResultDepsDefault mempty rd) - -transitiveSet :: Foldable t => Database -> t Key -> STM KeySet -transitiveSet database = flip State.execStateT mempty . traverse_ loop - where - loop x = do - seen <- State.get - if x `memberKeySet` seen then pure () else do - State.put (insertKeySet x seen) - next <- lift $ getDependencies database x - traverse_ loop (maybe mempty toListKeySet next) - --------------------------------------------------------------------------------- --- Asynchronous computations with cancellation - --- | A simple monad to implement cancellation on top of 'Async', --- generalizing 'withAsync' to monadic scopes. -newtype AIO a = AIO { unAIO :: ReaderT (TVar [Async ()]) IO a } - deriving newtype (Applicative, Functor, Monad, MonadIO) - -data AsyncParentKill = AsyncParentKill ThreadId Step - deriving (Show, Eq) - -instance Exception AsyncParentKill where - toException = asyncExceptionToException - fromException = asyncExceptionFromException - --- | Run the monadic computation, cancelling all the spawned asyncs if an exception arises -runAIO :: Step -> AIO a -> IO a -runAIO s (AIO act) = do - asyncsRef <- newTVarIO [] - -- Log the exact exception (including async exceptions) before cleanup, - -- then rethrow to preserve previous semantics. - runReaderT act asyncsRef `onException` do - asyncs <- atomically $ do - r <- readTVar asyncsRef - modifyTVar' asyncsRef $ const [] - return r - tid <- myThreadId - cleanupAsync asyncs tid s - --- | Like 'async' but with built-in cancellation. --- Returns an IO action to wait on the result. -asyncWithCleanUp :: AIO a -> AIO (IO a) -asyncWithCleanUp act = do - st <- AIO ask - io <- unliftAIO act - -- mask to make sure we keep track of the spawned async - liftIO $ uninterruptibleMask $ \restore -> do - a <- async $ restore io - atomically $ modifyTVar' st (void a :) - return $ wait a - -unliftAIO :: AIO a -> AIO (IO a) -unliftAIO act = do - st <- AIO ask - return $ runReaderT (unAIO act) st -instance MonadUnliftIO AIO where - withRunInIO k = do - st <- AIO ask - liftIO $ k (\aio -> runReaderT (unAIO aio) st) -cleanupAsync :: [Async a] -> ThreadId -> Step -> IO () --- mask to make sure we interrupt all the asyncs -cleanupAsync asyncs tid step = uninterruptibleMask $ \unmask -> do - -- interrupt all the asyncs without waiting - -- mapM_ (\a -> throwTo (asyncThreadId a) AsyncCancelled) asyncs - mapM_ (\a -> throwTo (asyncThreadId a) $ AsyncParentKill tid step) asyncs - -- Wait until all the asyncs are done - -- But if it takes more than 10 seconds, log to stderr - unless (null asyncs) $ do - let warnIfTakingTooLong = unmask $ forever $ do - sleep 10 - traceM "cleanupAsync: waiting for asyncs to finish" - withAsync warnIfTakingTooLong $ \_ -> - mapM_ waitCatch asyncs diff --git a/hls-graph/src/Development/IDE/Graph/Internal/Types.hs b/hls-graph/src/Development/IDE/Graph/Internal/Types.hs index 03e1f0b657..08b911e765 100644 --- a/hls-graph/src/Development/IDE/Graph/Internal/Types.hs +++ b/hls-graph/src/Development/IDE/Graph/Internal/Types.hs @@ -5,7 +5,8 @@ module Development.IDE.Graph.Internal.Types where -import Control.Concurrent.STM (STM) +import Control.Concurrent.STM (STM, modifyTVar') +import Control.Monad (forever, unless) import Control.Monad.Catch import Control.Monad.IO.Class import Control.Monad.Trans.Reader @@ -19,16 +20,25 @@ import Data.IORef import Data.List (intercalate) import Data.Maybe import Data.Typeable +import Debug.Trace (traceEventIO, traceM) import Development.IDE.Graph.Classes import Development.IDE.Graph.Internal.Key +import Development.IDE.WorkerThread (TaskQueue, + awaitRunInThreadStmInNewThreads) import GHC.Conc (TVar, atomically) import GHC.Generics (Generic) import qualified ListT import qualified StmContainers.Map as SMap import StmContainers.Map (Map) -import StmContainers.Set (Set) -import System.Time.Extra (Seconds) -import UnliftIO (MonadUnliftIO) +import System.Time.Extra (Seconds, sleep) +import UnliftIO (Async (asyncThreadId), + MonadUnliftIO, + asyncExceptionFromException, + asyncExceptionToException, + readTVar, readTVarIO, + throwTo, waitCatch, + withAsync) +import UnliftIO.Concurrent (ThreadId, myThreadId) #if !MIN_VERSION_base(4,18,0) import Control.Applicative (liftA2) @@ -89,14 +99,16 @@ getDatabase = Action $ asks actionDatabase data ShakeDatabase = ShakeDatabase !Int [Action ()] Database newtype Step = Step Int - deriving newtype (Eq,Ord,Hashable,Show,Num,Enum,Real,Integral) - ---------------------------------------------------------------------- --- Keys - + deriving newtype (Eq,Ord,Hashable,Show) +getShakeStep :: MonadIO m => ShakeDatabase -> m Step +getShakeStep (ShakeDatabase _ _ db) = do + s <- readTVarIO $ databaseStep db + return s +--------------------------------------------------------------------- +-- Keys newtype Value = Value Dynamic data KeyDetails = KeyDetails { @@ -109,15 +121,56 @@ onKeyReverseDeps f it@KeyDetails{..} = it{keyReverseDeps = f keyReverseDeps} data Database = Database { - databaseExtra :: Dynamic, - databaseRules :: TheRules, - databaseStep :: !(TVar Step), - databaseValues :: !(Map Key KeyDetails), - databaseDirtyKeys :: !(Set Key) - -- ^ The set of dirty keys, which are the keys that have been marked as dirty - -- by the client, it would be removed once the target key is marked as clean. + databaseExtra :: Dynamic, + + databaseThreads :: TVar [Async ()], + databaseQueue :: TaskQueue (IO ()), + + databaseRules :: TheRules, + databaseStep :: !(TVar Step), + databaseValues :: !(Map Key KeyDetails) } +runInDataBase :: Database -> [(IO result, SomeException -> IO ())] -> STM () +runInDataBase db acts = do + s <- getDataBaseStepInt db + awaitRunInThreadStmInNewThreads (getDataBaseStepInt db) s (databaseQueue db) (databaseThreads db) acts + +runOneInDataBase :: Database -> IO result -> (SomeException -> IO ()) -> STM () +runOneInDataBase db act handler = do + s <- getDataBaseStepInt db + awaitRunInThreadStmInNewThreads (getDataBaseStepInt db) s (databaseQueue db) (databaseThreads db) [(act, handler)] + +getDataBaseStepInt :: Database -> STM Int +getDataBaseStepInt db = do + Step s <- readTVar $ databaseStep db + return s + +data AsyncParentKill = AsyncParentKill ThreadId Step + deriving (Show, Eq) + +instance Exception AsyncParentKill where + toException = asyncExceptionToException + fromException = asyncExceptionFromException + +shutDatabase :: Database -> IO () +shutDatabase Database{..} = uninterruptibleMask $ \unmask -> do + -- wait for all threads to finish + asyncs <- readTVarIO databaseThreads + step <- readTVarIO databaseStep + tid <- myThreadId + traceEventIO ("shutDatabase: cancelling " ++ show (length asyncs) ++ " asyncs, step " ++ show step) + mapM_ (\a -> throwTo (asyncThreadId a) $ AsyncParentKill tid step) asyncs + atomically $ modifyTVar' databaseThreads (const []) + -- Wait until all the asyncs are done + -- But if it takes more than 10 seconds, log to stderr + unless (null asyncs) $ do + let warnIfTakingTooLong = unmask $ forever $ do + sleep 10 + traceM "cleanupAsync: waiting for asyncs to finish" + withAsync warnIfTakingTooLong $ \_ -> + mapM_ waitCatch asyncs + -- waitForDatabaseRunningKeys :: Database -> IO () -- waitForDatabaseRunningKeys = getDatabaseValues >=> mapM_ (waitRunning . snd) @@ -131,6 +184,7 @@ getDatabaseValues = atomically data Status = Clean !Result | Dirty (Maybe Result) + | Exception !Step !SomeException !(Maybe Result) | Running { runningStep :: !Step, -- runningWait :: !(IO ()), @@ -140,18 +194,14 @@ data Status viewDirty :: Step -> Status -> Status viewDirty currentStep (Running s re) | currentStep /= s = Dirty re +viewDirty currentStep (Exception s _ re) | currentStep /= s = Dirty re viewDirty _ other = other getResult :: Status -> Maybe Result -getResult (Clean re) = Just re -getResult (Dirty m_re) = m_re -getResult (Running _ m_re) = m_re -- watch out: this returns the previous result - -getDeps :: Status -> ResultDeps -getDeps (Clean re) = resultDeps re -getDeps (Dirty (Just re)) = resultDeps re -getDeps (Dirty Nothing) = UnknownDeps -getDeps (Running _ re) = maybe mempty resultDeps re +getResult (Clean re) = Just re +getResult (Dirty m_re) = m_re +getResult (Running _ m_re) = m_re -- watch out: this returns the previous result +getResult (Exception _ _ m_re) = m_re -- waitRunning :: Status -> IO () -- waitRunning Running{..} = runningWait diff --git a/hls-graph/src/Development/IDE/WorkerThread.hs b/hls-graph/src/Development/IDE/WorkerThread.hs new file mode 100644 index 0000000000..2f496f6cff --- /dev/null +++ b/hls-graph/src/Development/IDE/WorkerThread.hs @@ -0,0 +1,164 @@ +{- +Module : Development.IDE.WorkerThread +Author : @soulomoon +SPDX-License-Identifier: Apache-2.0 + +Description : This module provides an API for managing worker threads in the IDE. +see Note [Serializing runs in separate thread] +-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} + +module Development.IDE.WorkerThread + ( LogWorkerThread (..), + withWorkerQueue, + awaitRunInThread, + TaskQueue, + writeTaskQueue, + withWorkerQueueSimple, + awaitRunInThreadStm, + awaitRunInThreadStmInNewThread, + awaitRunInThreadStmInNewThreads, + isEmptyTaskQueue + ) where + +import Control.Concurrent.Async (Async, async, withAsync) +import Control.Concurrent.STM +import Control.Exception.Safe (MonadMask (..), + SomeException (SomeException), + finally, throw, try) +import Control.Monad.Cont (ContT (ContT)) +import qualified Data.Text as T + +import Control.Concurrent +import Control.Exception (catch) +import Control.Monad (void, when) +import Prettyprinter + +data LogWorkerThread + = LogThreadEnding !T.Text + | LogThreadEnded !T.Text + | LogSingleWorkStarting !T.Text + | LogSingleWorkEnded !T.Text + | LogMainThreadId !T.Text !ThreadId + deriving (Show) + +instance Pretty LogWorkerThread where + pretty = \case + LogThreadEnding t -> "Worker thread ending:" <+> pretty t + LogThreadEnded t -> "Worker thread ended:" <+> pretty t + LogSingleWorkStarting t -> "Worker starting a unit of work: " <+> pretty t + LogSingleWorkEnded t -> "Worker ended a unit of work: " <+> pretty t + LogMainThreadId t tid -> "Main thread for" <+> pretty t <+> "is" <+> pretty (show tid) + + +{- +Note [Serializing runs in separate thread] +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +We often want to take long-running actions using some resource that cannot be shared. +In this instance it is useful to have a queue of jobs to run using the resource. +Like the db writes, session loading in session loader, shake session restarts. + +Originally we used various ways to implement this, but it was hard to maintain and error prone. +Moreover, we can not stop these threads uniformly when we are shutting down the server. +-} +data TaskQueue a = TaskQueue (TQueue a) +newTaskQueueIO :: IO (TaskQueue a) +newTaskQueueIO = TaskQueue <$> newTQueueIO +data ExitOrTask t = Exit | Task t +type Logger = LogWorkerThread -> IO () + +-- | 'withWorkerQueue' creates a new 'TQueue', and launches a worker +-- thread which polls the queue for requests and runs the given worker +-- function on them. +withWorkerQueueSimple :: Logger -> T.Text -> ContT () IO (TaskQueue (IO ())) +withWorkerQueueSimple log title = withWorkerQueue log title id +withWorkerQueue :: Logger -> T.Text -> (t -> IO ()) -> ContT () IO (TaskQueue t) +withWorkerQueue log title workerAction = ContT $ \mainAction -> do + tid <- myThreadId + log (LogMainThreadId title tid) + q <- newTaskQueueIO + -- Use a TMVar as a stop flag to coordinate graceful shutdown. + -- The worker thread checks this flag before dequeuing each job; if set, it exits immediately, + -- ensuring that no new work is started after shutdown is requested. + -- This mechanism is necessary because some downstream code may swallow async exceptions, + -- making 'cancel' unreliable for stopping the thread in all cases. + -- If 'cancel' does interrupt the thread (e.g., while blocked in STM or in a cooperative job), + -- the thread exits immediately and never checks the TMVar; in such cases, the stop flag is redundant. + b <- newEmptyTMVarIO + withAsync (writerThread q b) $ \_ -> do + mainAction q + -- if we want to debug the exact location the worker swallows an async exception, we can + -- temporarily comment out the `finally` clause. + `finally` atomically (putTMVar b ()) + log (LogThreadEnding title) + log (LogThreadEnded title) + where + -- writerThread :: TaskQueue t -> TMVar () -> (forall a. IO a -> IO a) -> IO () + writerThread q b = + -- See above: check stop flag before dequeuing, exit if set, otherwise run next job. + do + task <- atomically $ do + task <- tryReadTaskQueue q + isEm <- isEmptyTMVar b + case (isEm, task) of + (False, _) -> return Exit -- stop flag set, exit + (_, Just t) -> return $ Task t -- got a task, run it + (_, Nothing) -> retry -- no task, wait + case task of + Exit -> return () + Task t -> do + log $ LogSingleWorkStarting title + workerAction t + log $ LogSingleWorkEnded title + writerThread q b + + +-- | 'awaitRunInThread' queues up an 'IO' action to be run by a worker thread, +-- and then blocks until the result is computed. If the action throws an +-- non-async exception, it is rethrown in the calling thread. +awaitRunInThreadStm :: TaskQueue (IO ()) -> IO result -> STM result +awaitRunInThreadStm (TaskQueue q) act = do + barrier <- newEmptyTMVar + -- Take an action from TQueue, run it and + -- use barrier to wait for the result + writeTQueue q (try act >>= atomically . putTMVar barrier) + resultOrException <- takeTMVar barrier + case resultOrException of + Left e -> throw (e :: SomeException) + Right r -> return r + +awaitRunInThreadStmInNewThread :: STM Int -> Int -> TaskQueue (IO ()) -> TVar [Async ()] -> IO result -> (SomeException -> IO ()) -> STM () +awaitRunInThreadStmInNewThread getStep deliverStep q tthreads act handler = awaitRunInThreadStmInNewThreads getStep deliverStep q tthreads [(act, handler)] + +awaitRunInThreadStmInNewThreads :: STM Int -> Int -> TaskQueue (IO ()) -> TVar [Async ()] -> [(IO result, SomeException -> IO ())] -> STM () +awaitRunInThreadStmInNewThreads getStep deliverStep (TaskQueue q) tthreads acts = do + -- Take an action from TQueue, run it and + -- use barrier to wait for the result + writeTQueue q (uninterruptibleMask $ \restore -> do + curStep <- atomically getStep + when (curStep == deliverStep) $ do + syncs <- mapM (\(act, handler) -> async (restore (void act `catch` \(SomeException e) -> handler (SomeException e)))) acts + atomically $ modifyTVar' tthreads (syncs++) + ) + +awaitRunInThread :: TaskQueue (IO ()) -> IO result -> IO result +awaitRunInThread (TaskQueue q) act = do + barrier <- newEmptyTMVarIO + -- Take an action from TQueue, run it and + -- use barrier to wait for the result + atomically $ writeTQueue q (try act >>= atomically . putTMVar barrier) + resultOrException <- atomically $ takeTMVar barrier + case resultOrException of + Left e -> throw (e :: SomeException) + Right r -> return r + +writeTaskQueue :: TaskQueue a -> a -> STM () +writeTaskQueue (TaskQueue q) = writeTQueue q + +tryReadTaskQueue :: TaskQueue a -> STM (Maybe a) +tryReadTaskQueue (TaskQueue q) = tryReadTQueue q + +isEmptyTaskQueue :: TaskQueue a -> STM Bool +isEmptyTaskQueue (TaskQueue q) = isEmptyTQueue q + diff --git a/hls-graph/test/ActionSpec.hs b/hls-graph/test/ActionSpec.hs index 3a0b8d6829..826d542e21 100644 --- a/hls-graph/test/ActionSpec.hs +++ b/hls-graph/test/ActionSpec.hs @@ -7,6 +7,7 @@ import Control.Concurrent (MVar, readMVar) import qualified Control.Concurrent as C import Control.Concurrent.STM import Control.Monad.IO.Class (MonadIO (..)) +import Control.Monad.Trans.Cont (evalContT) import Development.IDE.Graph (shakeOptions) import Development.IDE.Graph.Database (shakeNewDatabase, shakeRunDatabase, @@ -15,15 +16,28 @@ import Development.IDE.Graph.Internal.Database (build, incDatabase) import Development.IDE.Graph.Internal.Key import Development.IDE.Graph.Internal.Types import Development.IDE.Graph.Rule +import Development.IDE.WorkerThread (TaskQueue, + withWorkerQueueSimple) import Example import qualified StmContainers.Map as STM import Test.Hspec +itInThread :: String -> (TaskQueue (IO ()) -> IO ()) -> SpecWith () +itInThread name ex = it name $ evalContT $ do + thread <- withWorkerQueueSimple (const $ return ()) "hls-graph test" + liftIO $ ex thread + +shakeRunDatabaseFromRight :: ShakeDatabase -> [Action a] -> IO [a] +shakeRunDatabaseFromRight db as = do + res <- shakeRunDatabase db as + case sequence res of + Left e -> error $ "shakeRunDatabaseFromRight: unexpected exception: " ++ show e + Right v -> return v spec :: Spec spec = do - describe "apply1" $ it "Test build update, Buggy dirty mechanism in hls-graph #4237" $ do + describe "apply1" $ itInThread "Test build update, Buggy dirty mechanism in hls-graph #4237" $ \q -> do let ruleStep1 :: MVar Int -> Rules () ruleStep1 m = addRule $ \CountRule _old mode -> do -- depends on ruleSubBranch, it always changed if dirty @@ -39,68 +53,65 @@ spec = do return $ RunResult ChangedNothing "" r (return ()) count <- C.newMVar 0 count1 <- C.newMVar 0 - db <- shakeNewDatabase shakeOptions $ do + db <- shakeNewDatabase q shakeOptions $ do ruleSubBranch count ruleStep1 count1 -- bootstrapping the database - _ <- shakeRunDatabase db $ pure $ apply1 CountRule -- count = 1 + _ <- shakeRunDatabaseFromRight db $ pure $ apply1 CountRule -- count = 1 let child = newKey SubBranchRule let parent = newKey CountRule -- instruct to RunDependenciesChanged then CountRule should be recomputed -- result should be changed 0, build 1 - _res1 <- shakeRunDatabaseForKeys (Just [child]) db [apply1 CountRule] $ \_ -> return () - -- count = 2 + _res1 <- shakeRunDatabaseForKeys (Just [child]) db [apply1 CountRule] -- count = 2 -- since child changed = parent build -- instruct to RunDependenciesSame then CountRule should not be recomputed -- result should be changed 0, build 1 - _res3 <- shakeRunDatabaseForKeys (Just [parent]) db [apply1 CountRule] $ \_ -> return () - -- count = 2 + _res3 <- shakeRunDatabaseForKeys (Just [parent]) db [apply1 CountRule] -- count = 2 -- invariant child changed = parent build should remains after RunDependenciesSame -- this used to be a bug, with additional computation, see https://github.com/haskell/haskell-language-server/pull/4238 - _res3 <- shakeRunDatabaseForKeys (Just [parent]) db [apply1 CountRule] $ \_ -> return () - -- count = 2 + _res3 <- shakeRunDatabaseForKeys (Just [parent]) db [apply1 CountRule] -- count = 2 c1 <- readMVar count1 c1 `shouldBe` 2 - describe "apply1" $ do - it "computes a rule with no dependencies" $ do - db <- shakeNewDatabase shakeOptions ruleUnit - res <- shakeRunDatabase db $ + describe "apply1" $ do + itInThread "computes a rule with no dependencies" $ \q -> do + db <- shakeNewDatabase q shakeOptions ruleUnit + res <- shakeRunDatabaseFromRight db $ pure $ apply1 (Rule @()) res `shouldBe` [()] - it "computes a rule with one dependency" $ do - db <- shakeNewDatabase shakeOptions $ do + itInThread "computes a rule with one dependency" $ \q -> do + db <- shakeNewDatabase q shakeOptions $ do ruleUnit ruleBool - res <- shakeRunDatabase db $ pure $ apply1 Rule + res <- shakeRunDatabaseFromRight db $ pure $ apply1 Rule res `shouldBe` [True] - it "tracks direct dependencies" $ do - db@(ShakeDatabase _ _ theDb) <- shakeNewDatabase shakeOptions $ do + itInThread "tracks direct dependencies" $ \q -> do + db@(ShakeDatabase _ _ theDb) <- shakeNewDatabase q shakeOptions $ do ruleUnit ruleBool let theKey = Rule @Bool - res <- shakeRunDatabase db $ + res <- shakeRunDatabaseFromRight db $ pure $ apply1 theKey res `shouldBe` [True] Just (Clean res) <- lookup (newKey theKey) <$> getDatabaseValues theDb resultDeps res `shouldBe` ResultDeps [singletonKeySet $ newKey (Rule @())] - it "tracks reverse dependencies" $ do - db@(ShakeDatabase _ _ Database {..}) <- shakeNewDatabase shakeOptions $ do + itInThread "tracks reverse dependencies" $ \q -> do + db@(ShakeDatabase _ _ Database {..}) <- shakeNewDatabase q shakeOptions $ do ruleUnit ruleBool let theKey = Rule @Bool - res <- shakeRunDatabase db $ + res <- shakeRunDatabaseFromRight db $ pure $ apply1 theKey res `shouldBe` [True] Just KeyDetails {..} <- atomically $ STM.lookup (newKey (Rule @())) databaseValues keyReverseDeps `shouldBe` singletonKeySet (newKey theKey) - it "rethrows exceptions" $ do - db <- shakeNewDatabase shakeOptions $ addRule $ \(Rule :: Rule ()) _old _mode -> error "boom" - let res = shakeRunDatabase db $ pure $ apply1 (Rule @()) + itInThread "rethrows exceptions" $ \q -> do + db <- shakeNewDatabase q shakeOptions $ addRule $ \(Rule :: Rule ()) _old _mode -> error "boom" + let res = shakeRunDatabaseFromRight db $ pure $ apply1 (Rule @()) res `shouldThrow` anyErrorCall - it "computes a rule with branching dependencies does not invoke phantom dependencies #3423" $ do + itInThread "computes a rule with branching dependencies does not invoke phantom dependencies #3423" $ \q -> do cond <- C.newMVar True count <- C.newMVar 0 - (ShakeDatabase _ _ theDb) <- shakeNewDatabase shakeOptions $ do + (ShakeDatabase _ _ theDb) <- shakeNewDatabase q shakeOptions $ do ruleUnit ruleCond cond ruleSubBranch count @@ -119,15 +130,15 @@ spec = do countRes <- build theDb emptyStack [SubBranchRule] snd countRes `shouldBe` [1 :: Int] - describe "applyWithoutDependency" $ it "does not track dependencies" $ do - db@(ShakeDatabase _ _ theDb) <- shakeNewDatabase shakeOptions $ do + describe "applyWithoutDependency" $ itInThread "does not track dependencies" $ \q -> do + db@(ShakeDatabase _ _ theDb) <- shakeNewDatabase q shakeOptions $ do ruleUnit addRule $ \Rule _old _mode -> do [()] <- applyWithoutDependency [Rule] return $ RunResult ChangedRecomputeDiff "" True $ return () let theKey = Rule @Bool - res <- shakeRunDatabase db $ + res <- shakeRunDatabaseFromRight db $ pure $ applyWithoutDependency [theKey] res `shouldBe` [[True]] Just (Clean res) <- lookup (newKey theKey) <$> getDatabaseValues theDb diff --git a/hls-graph/test/DatabaseSpec.hs b/hls-graph/test/DatabaseSpec.hs index 9061bfa89d..64ace32ce5 100644 --- a/hls-graph/test/DatabaseSpec.hs +++ b/hls-graph/test/DatabaseSpec.hs @@ -2,6 +2,13 @@ module DatabaseSpec where +import Control.Exception (Exception (..), + SomeException, + evaluate, throw) +import Control.Monad (join) +import Control.Monad.IO.Class (MonadIO (..)) +import Control.Monad.Trans.Cont (evalContT) +import Debug.Trace (traceM) import Development.IDE.Graph (newKey, shakeOptions) import Development.IDE.Graph.Database (shakeNewDatabase, shakeRunDatabase) @@ -9,28 +16,43 @@ import Development.IDE.Graph.Internal.Action (apply1) import Development.IDE.Graph.Internal.Database (compute, incDatabase) import Development.IDE.Graph.Internal.Rules (addRule) import Development.IDE.Graph.Internal.Types +import Development.IDE.WorkerThread import Example import System.Time.Extra (timeout) import Test.Hspec +itInThread :: String -> (TaskQueue (IO ()) -> IO ()) -> SpecWith () +itInThread name ex = it name $ evalContT $ do + thread <- withWorkerQueueSimple (const $ return ()) "hls-graph test" + liftIO $ ex thread + +exractException :: [Either SomeException ()] -> Maybe StackException +exractException [] = Nothing +exractException (Left e : _) | Just ne@StackException{} <- fromGraphException e = return ne +exractException (_: xs) = exractException xs + + spec :: Spec spec = do describe "Evaluation" $ do - it "detects cycles" $ do - db <- shakeNewDatabase shakeOptions $ do + itInThread "detects cycles" $ \q -> do + db <- shakeNewDatabase q shakeOptions $ do ruleBool addRule $ \Rule _old _mode -> do True <- apply1 (Rule @Bool) return $ RunResult ChangedRecomputeDiff "" () (return ()) - let res = shakeRunDatabase db $ pure $ apply1 (Rule @()) - timeout 1 res `shouldThrow` \StackException{} -> True + res <- timeout 1 $ shakeRunDatabase db $ pure $ apply1 (Rule @()) + let x = exractException =<< res + let throwStack x = case x + of Just e -> throw e + Nothing -> error "Expected a StackException, got none" + throwStack x `shouldThrow` \StackException{} -> True describe "compute" $ do - it "build step and changed step updated correctly" $ do - (ShakeDatabase _ _ theDb) <- shakeNewDatabase shakeOptions $ do + itInThread "build step and changed step updated correctly" $ \q -> do + (ShakeDatabase _ _ theDb) <- shakeNewDatabase q shakeOptions $ do ruleStep - let k = newKey $ Rule @() -- ChangedRecomputeSame r1@Result{resultChanged=rc1, resultBuilt=rb1} <- compute theDb emptyStack k RunDependenciesChanged Nothing From 86fff58d7db312060d0ffcd60720c1e2782e146c Mon Sep 17 00:00:00 2001 From: soulomoon Date: Sat, 6 Sep 2025 08:01:58 +0800 Subject: [PATCH 75/87] exit --- ghcide/src/Development/IDE/LSP/LanguageServer.hs | 8 +++++++- hls-graph/test/DatabaseSpec.hs | 6 +----- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/ghcide/src/Development/IDE/LSP/LanguageServer.hs b/ghcide/src/Development/IDE/LSP/LanguageServer.hs index 8948d719d1..bff90cb66f 100644 --- a/ghcide/src/Development/IDE/LSP/LanguageServer.hs +++ b/ghcide/src/Development/IDE/LSP/LanguageServer.hs @@ -216,7 +216,7 @@ setupLSP recorder defaultRoot getHieDbLoc userHandlers getIdeState clientMsgVar requestReactorShutdown = do k <- tryPutMVar reactorStopSignal () logWith recorder Info $ LogReactorShutdownRequested k - let timeOutSeconds = 2 + let timeOutSeconds = 3 timeout (timeOutSeconds * 1_000_000) (waitBarrier reactorConfirmBarrier) >>= \case Just () -> pure () -- If we don't get confirmation within 2 seconds, we log a warning and shutdown anyway. @@ -250,6 +250,7 @@ setupLSP recorder defaultRoot getHieDbLoc userHandlers getIdeState clientMsgVar [ userHandlers , cancelHandler cancelRequest , shutdownHandler recorder requestReactorShutdown + , exitHandler recorder exit ] -- Cancel requests are special since they need to be handled -- out of order to be useful. Existing handlers are run afterwards. @@ -382,6 +383,11 @@ shutdownHandler _recorder requestReactorShutdown = LSP.requestHandler SMethod_Sh liftIO requestReactorShutdown resp $ Right Null +exitHandler :: Recorder (WithPriority Log) -> IO () -> LSP.Handlers (ServerM c) +exitHandler _recorder exit = LSP.notificationHandler SMethod_Exit $ \_ -> do + -- stop the reactor to free up the hiedb connection and shut down shake + liftIO exit + modifyOptions :: LSP.Options -> LSP.Options modifyOptions x = x{ LSP.optTextDocumentSync = Just $ tweakTDS origTDS } diff --git a/hls-graph/test/DatabaseSpec.hs b/hls-graph/test/DatabaseSpec.hs index 64ace32ce5..427dd2ceea 100644 --- a/hls-graph/test/DatabaseSpec.hs +++ b/hls-graph/test/DatabaseSpec.hs @@ -2,13 +2,9 @@ module DatabaseSpec where -import Control.Exception (Exception (..), - SomeException, - evaluate, throw) -import Control.Monad (join) +import Control.Exception (SomeException, throw) import Control.Monad.IO.Class (MonadIO (..)) import Control.Monad.Trans.Cont (evalContT) -import Debug.Trace (traceM) import Development.IDE.Graph (newKey, shakeOptions) import Development.IDE.Graph.Database (shakeNewDatabase, shakeRunDatabase) From 9b952c849cfc9540b4a574c81c0236a9db4d650b Mon Sep 17 00:00:00 2001 From: Lin Jian Date: Sat, 6 Sep 2025 11:45:44 +0800 Subject: [PATCH 76/87] Implement signature help (#4626) --- .github/workflows/test.yml | 4 + CODEOWNERS | 1 + docs/configuration.md | 2 +- docs/features.md | 8 +- docs/support/plugin-support.md | 1 + ghcide/src/Development/IDE/Core/Actions.hs | 2 +- ghcide/src/Development/IDE/Core/RuleTypes.hs | 10 +- ghcide/src/Development/IDE/Core/Rules.hs | 2 +- .../Development/IDE/GHC/Compat/Outputable.hs | 12 +- ghcide/src/Development/IDE/GHC/Util.hs | 11 +- .../src/Development/IDE/Plugin/Completions.hs | 6 +- ghcide/src/Development/IDE/Spans/AtPoint.hs | 2 +- ghcide/src/Development/IDE/Spans/Common.hs | 3 + .../Development/IDE/Spans/Documentation.hs | 25 +- haskell-language-server.cabal | 54 ++ hls-plugin-api/src/Ide/Plugin/Config.hs | 1 + hls-plugin-api/src/Ide/Plugin/ConfigUtils.hs | 2 + hls-plugin-api/src/Ide/Types.hs | 11 +- .../src/Ide/Plugin/SignatureHelp.hs | 345 ++++++++++++ .../hls-signature-help-plugin/test/Main.hs | 515 ++++++++++++++++++ src/HlsPlugins.hs | 8 +- .../schema/ghc910/default-config.golden.json | 3 + .../vscode-extension-schema.golden.json | 6 + .../schema/ghc912/default-config.golden.json | 3 + .../vscode-extension-schema.golden.json | 6 + .../schema/ghc96/default-config.golden.json | 3 + .../ghc96/vscode-extension-schema.golden.json | 6 + .../schema/ghc98/default-config.golden.json | 3 + .../ghc98/vscode-extension-schema.golden.json | 6 + 29 files changed, 1036 insertions(+), 25 deletions(-) create mode 100644 plugins/hls-signature-help-plugin/src/Ide/Plugin/SignatureHelp.hs create mode 100644 plugins/hls-signature-help-plugin/test/Main.hs diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 984758a310..b2870d3076 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -261,6 +261,10 @@ jobs: name: Compile the plugin-tutorial run: cabal build plugin-tutorial + - if: matrix.test + name: Test hls-signature-help-plugin test suite + run: cabal test hls-signature-help-plugin-tests || cabal test hls-signature-help-plugin-tests + test_post_job: if: always() runs-on: ubuntu-latest diff --git a/CODEOWNERS b/CODEOWNERS index 820661ceeb..8d54022dc5 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -37,6 +37,7 @@ /plugins/hls-rename-plugin /plugins/hls-retrie-plugin @wz1000 /plugins/hls-semantic-tokens-plugin @soulomoon +/plugins/hls-signature-help-plugin @jian-lin /plugins/hls-splice-plugin @konn /plugins/hls-stan-plugin @0rphee /plugins/hls-stylish-haskell-plugin @michaelpj diff --git a/docs/configuration.md b/docs/configuration.md index 2b0eb55e1e..66422e5677 100644 --- a/docs/configuration.md +++ b/docs/configuration.md @@ -52,7 +52,7 @@ Here is a list of the additional settings currently supported by `haskell-langua Plugins have a generic config to control their behaviour. The schema of such config is: - `haskell.plugin.${pluginName}.globalOn`: usually with default true. Whether the plugin is enabled at runtime or it is not. That is the option you might use if you want to disable completely a plugin. - - Actual plugin names are: `ghcide-code-actions-fill-holes`, `ghcide-completions`, `ghcide-hover-and-symbols`, `ghcide-type-lenses`, `ghcide-code-actions-type-signatures`, `ghcide-code-actions-bindings`, `ghcide-code-actions-imports-exports`, `eval`, `moduleName`, `pragmas`, `importLens`, `class`, `hlint`, `retrie`, `rename`, `splice`, `stan`. + - Actual plugin names are: `ghcide-code-actions-fill-holes`, `ghcide-completions`, `ghcide-hover-and-symbols`, `ghcide-type-lenses`, `ghcide-code-actions-type-signatures`, `ghcide-code-actions-bindings`, `ghcide-code-actions-imports-exports`, `eval`, `moduleName`, `pragmas`, `importLens`, `class`, `hlint`, `retrie`, `rename`, `splice`, `stan`, `signatureHelp`. - So to disable the import lens with an explicit list of module definitions you could set `haskell.plugin.importLens.globalOn: false` - `haskell.plugin.${pluginName}.${lspCapability}On`: usually with default true. Whether a concrete plugin capability is enabled. - Capabilities are the different ways a lsp server can interact with the editor. The current available capabilities of the server are: `callHierarchy`, `codeActions`, `codeLens`, `diagnostics`, `hover`, `symbols`, `completion`, `rename`. diff --git a/docs/features.md b/docs/features.md index 1eab0054b4..2f34f501cc 100644 --- a/docs/features.md +++ b/docs/features.md @@ -7,6 +7,7 @@ Many of these are standard LSP features, but a lot of special features are provi | --------------------------------------------------- | ------------------------------------------------------------------------------------------------- | | [Diagnostics](#diagnostics) | `textDocument/publishDiagnostics` | | [Hovers](#hovers) | `textDocument/hover` | +| [Signature help](#signature-help) | `textDocument/signatureHelp` | | [Jump to definition](#jump-to-definition) | `textDocument/definition` | | [Jump to type definition](#jump-to-type-definition) | `textDocument/typeDefinition` | | [Find references](#find-references) | `textDocument/references` | @@ -63,6 +64,12 @@ Provided by: `hls-explicit-fixity-plugin` Provides fixity information. +## Signature help + +Provided by: `hls-signature-help-plugin` + +Shows and highlights the function signature, the function documentation and the arguments documentation when the cursor is at a function argument. + ## Jump to definition Provided by: `ghcide` @@ -445,7 +452,6 @@ Contributions welcome! | Feature | Status | [LSP method](./what-is-hls.md#lsp-terminology) | | ---------------------- | ----------------- | ---------------------------------------------- | -| Signature help | Unimplemented | `textDocument/signatureHelp` | | Jump to declaration | Unclear if useful | `textDocument/declaration` | | Jump to implementation | Unclear if useful | `textDocument/implementation` | | Linked editing | Unimplemented | `textDocument/linkedEditingRange` | diff --git a/docs/support/plugin-support.md b/docs/support/plugin-support.md index 4263f0d035..724ca99da0 100644 --- a/docs/support/plugin-support.md +++ b/docs/support/plugin-support.md @@ -51,6 +51,7 @@ For example, a plugin to provide a formatter which has itself been abandoned has | `hls-class-plugin` | 2 | | | `hls-change-type-signature-plugin` | 2 | | | `hls-eval-plugin` | 2 | | +| `hls-signature-help-plugin` | 2 | | | `hls-explicit-fixity-plugin` | 2 | | | `hls-explicit-record-fields-plugin` | 2 | | | `hls-fourmolu-plugin` | 2 | | diff --git a/ghcide/src/Development/IDE/Core/Actions.hs b/ghcide/src/Development/IDE/Core/Actions.hs index 61614cb0ca..f8d9d4b2b2 100644 --- a/ghcide/src/Development/IDE/Core/Actions.hs +++ b/ghcide/src/Development/IDE/Core/Actions.hs @@ -63,7 +63,7 @@ getAtPoint file pos = runMaybeT $ do (hf, mapping) <- useWithStaleFastMT GetHieAst file env <- hscEnv . fst <$> useWithStaleFastMT GhcSession file - dkMap <- lift $ maybe (DKMap mempty mempty) fst <$> runMaybeT (useWithStaleFastMT GetDocMap file) + dkMap <- lift $ maybe (DKMap mempty mempty mempty) fst <$> runMaybeT (useWithStaleFastMT GetDocMap file) !pos' <- MaybeT (return $ fromCurrentPosition mapping pos) MaybeT $ liftIO $ fmap (first (toCurrentRange mapping =<<)) <$> AtPoint.atPoint opts hf dkMap env pos' diff --git a/ghcide/src/Development/IDE/Core/RuleTypes.hs b/ghcide/src/Development/IDE/Core/RuleTypes.hs index a13e6de14c..8798068b45 100644 --- a/ghcide/src/Development/IDE/Core/RuleTypes.hs +++ b/ghcide/src/Development/IDE/Core/RuleTypes.hs @@ -253,9 +253,15 @@ type instance RuleResult GetHieAst = HieAstResult -- | A IntervalMap telling us what is in scope at each point type instance RuleResult GetBindings = Bindings -data DocAndTyThingMap = DKMap {getDocMap :: !DocMap, getTyThingMap :: !TyThingMap} +data DocAndTyThingMap = DKMap + { getDocMap :: !DocMap + -- ^ Docs for declarations: functions, data types, instances, methods, etc + , getTyThingMap :: !TyThingMap + , getArgDocMap :: !ArgDocMap + -- ^ Docs for arguments, e.g., function arguments and method arguments + } instance NFData DocAndTyThingMap where - rnf (DKMap a b) = rwhnf a `seq` rwhnf b + rnf (DKMap a b c) = rwhnf a `seq` rwhnf b `seq` rwhnf c instance Show DocAndTyThingMap where show = const "docmap" diff --git a/ghcide/src/Development/IDE/Core/Rules.hs b/ghcide/src/Development/IDE/Core/Rules.hs index c123c9d4a8..964d6d379b 100644 --- a/ghcide/src/Development/IDE/Core/Rules.hs +++ b/ghcide/src/Development/IDE/Core/Rules.hs @@ -582,7 +582,7 @@ getDocMapRule recorder = -- | Persistent rule to ensure that hover doesn't block on startup persistentDocMapRule :: Rules () -persistentDocMapRule = addPersistentRule GetDocMap $ \_ -> pure $ Just (DKMap mempty mempty, idDelta, Nothing) +persistentDocMapRule = addPersistentRule GetDocMap $ \_ -> pure $ Just (DKMap mempty mempty mempty, idDelta, Nothing) readHieFileForSrcFromDisk :: Recorder (WithPriority Log) -> NormalizedFilePath -> MaybeT IdeAction Compat.HieFile readHieFileForSrcFromDisk recorder file = do diff --git a/ghcide/src/Development/IDE/GHC/Compat/Outputable.hs b/ghcide/src/Development/IDE/GHC/Compat/Outputable.hs index ccec23c9c3..8414a7c8c3 100644 --- a/ghcide/src/Development/IDE/GHC/Compat/Outputable.hs +++ b/ghcide/src/Development/IDE/GHC/Compat/Outputable.hs @@ -9,6 +9,7 @@ module Development.IDE.GHC.Compat.Outputable ( ppr, pprPanic, text, vcat, (<+>), ($$), empty, hang, nest, punctuate, printSDocQualifiedUnsafe, printWithoutUniques, + printWithoutUniquesOneLine, mkPrintUnqualifiedDefault, PrintUnqualified, defaultUserStyle, @@ -27,6 +28,7 @@ module Development.IDE.GHC.Compat.Outputable ( pprMsgEnvelopeBagWithLoc, Error.getMessages, renderWithContext, + showSDocOneLine, defaultSDocContext, errMsgDiagnostic, unDecorated, @@ -76,8 +78,14 @@ type PrintUnqualified = NamePprCtx -- -- It print with a user-friendly style like: `a_a4ME` as `a`. printWithoutUniques :: Outputable a => a -> String -printWithoutUniques = - renderWithContext (defaultSDocContext +printWithoutUniques = printWithoutUniques' renderWithContext + +printWithoutUniquesOneLine :: Outputable a => a -> String +printWithoutUniquesOneLine = printWithoutUniques' showSDocOneLine + +printWithoutUniques' :: Outputable a => (SDocContext -> SDoc -> String) -> a -> String +printWithoutUniques' showSDoc = + showSDoc (defaultSDocContext { sdocStyle = defaultUserStyle , sdocSuppressUniques = True diff --git a/ghcide/src/Development/IDE/GHC/Util.hs b/ghcide/src/Development/IDE/GHC/Util.hs index fb051bda5a..9f1303c7cf 100644 --- a/ghcide/src/Development/IDE/GHC/Util.hs +++ b/ghcide/src/Development/IDE/GHC/Util.hs @@ -27,6 +27,7 @@ module Development.IDE.GHC.Util( dontWriteHieFiles, disableWarningsAsErrors, printOutputable, + printOutputableOneLine, getExtensions, stripOccNamePrefix, ) where @@ -264,11 +265,17 @@ ioe_dupHandlesNotCompatible h = -- 1. print with a user-friendly style: `a_a4ME` as `a`. -- 2. unescape escape sequences of printable unicode characters within a pair of double quotes printOutputable :: Outputable a => a -> T.Text -printOutputable = +printOutputable = printOutputable' printWithoutUniques + +printOutputableOneLine :: Outputable a => a -> T.Text +printOutputableOneLine = printOutputable' printWithoutUniquesOneLine + +printOutputable' :: Outputable a => (a -> String) -> a -> T.Text +printOutputable' print = -- IfaceTyLit from GHC.Iface.Type implements Outputable with 'show'. -- Showing a String escapes non-ascii printable characters. We unescape it here. -- More discussion at https://github.com/haskell/haskell-language-server/issues/3115. - unescape . T.pack . printWithoutUniques + unescape . T.pack . print {-# INLINE printOutputable #-} getExtensions :: ParsedModule -> [Extension] diff --git a/ghcide/src/Development/IDE/Plugin/Completions.hs b/ghcide/src/Development/IDE/Plugin/Completions.hs index d92bf1da85..7278b8a3e1 100644 --- a/ghcide/src/Development/IDE/Plugin/Completions.hs +++ b/ghcide/src/Development/IDE/Plugin/Completions.hs @@ -132,11 +132,11 @@ resolveCompletion ide _pid comp@CompletionItem{_detail,_documentation,_data_} ur name <- liftIO $ lookupNameCache nc mod occ mdkm <- liftIO $ runIdeAction "CompletionResolve.GetDocMap" (shakeExtras ide) $ useWithStaleFast GetDocMap file let (dm,km) = case mdkm of - Just (DKMap docMap tyThingMap, _) -> (docMap,tyThingMap) - Nothing -> (mempty, mempty) + Just (DKMap docMap tyThingMap _argDocMap, _) -> (docMap,tyThingMap) + Nothing -> (mempty, mempty) doc <- case lookupNameEnv dm name of Just doc -> pure $ spanDocToMarkdown doc - Nothing -> liftIO $ spanDocToMarkdown <$> getDocumentationTryGhc (hscEnv sess) name + Nothing -> liftIO $ spanDocToMarkdown . fst <$> getDocumentationTryGhc (hscEnv sess) name typ <- case lookupNameEnv km name of _ | not needType -> pure Nothing Just ty -> pure (safeTyThingType ty) diff --git a/ghcide/src/Development/IDE/Spans/AtPoint.hs b/ghcide/src/Development/IDE/Spans/AtPoint.hs index 50df0f5ba5..ded1781f7f 100644 --- a/ghcide/src/Development/IDE/Spans/AtPoint.hs +++ b/ghcide/src/Development/IDE/Spans/AtPoint.hs @@ -256,7 +256,7 @@ atPoint -> HscEnv -> Position -> IO (Maybe (Maybe Range, [T.Text])) -atPoint IdeOptions{} (HAR _ (hf :: HieASTs a) rf _ (kind :: HieKind hietype)) (DKMap dm km) env pos = +atPoint IdeOptions{} (HAR _ (hf :: HieASTs a) rf _ (kind :: HieKind hietype)) (DKMap dm km _am) env pos = listToMaybe <$> sequence (pointCommand hf pos hoverInfo) where -- Hover info for values/data diff --git a/ghcide/src/Development/IDE/Spans/Common.hs b/ghcide/src/Development/IDE/Spans/Common.hs index 996e55ef1a..90d77b71fb 100644 --- a/ghcide/src/Development/IDE/Spans/Common.hs +++ b/ghcide/src/Development/IDE/Spans/Common.hs @@ -13,6 +13,7 @@ module Development.IDE.Spans.Common ( , spanDocToMarkdownForTest , DocMap , TyThingMap +, ArgDocMap , srcSpanToMdLink ) where @@ -29,6 +30,7 @@ import GHC.Generics import System.FilePath import Control.Lens +import Data.IntMap (IntMap) import Development.IDE.GHC.Compat import Development.IDE.GHC.Orphans () import qualified Language.LSP.Protocol.Lens as JL @@ -36,6 +38,7 @@ import Language.LSP.Protocol.Types type DocMap = NameEnv SpanDoc type TyThingMap = NameEnv TyThing +type ArgDocMap = NameEnv (IntMap SpanDoc) -- | Shows IEWrappedName, without any modifier, qualifier or unique identifier. unqualIEWrapName :: IEWrappedName GhcPs -> T.Text diff --git a/ghcide/src/Development/IDE/Spans/Documentation.hs b/ghcide/src/Development/IDE/Spans/Documentation.hs index dcf7778de3..a4b6242315 100644 --- a/ghcide/src/Development/IDE/Spans/Documentation.hs +++ b/ghcide/src/Development/IDE/Spans/Documentation.hs @@ -16,6 +16,7 @@ import Control.Monad.Extra (findM) import Control.Monad.IO.Class import Data.Either import Data.Foldable +import Data.IntMap (IntMap) import Data.List.Extra import qualified Data.Map as M import Data.Maybe @@ -42,21 +43,27 @@ mkDocMap -> IO DocAndTyThingMap mkDocMap env rm this_mod = do - (Just Docs{docs_decls = UniqMap this_docs}) <- extractDocs (hsc_dflags env) this_mod + (Just Docs{docs_decls = UniqMap this_docs, docs_args = UniqMap this_arg_docs}) <- extractDocs (hsc_dflags env) this_mod d <- foldrM getDocs (fmap (\(_, x) -> (map hsDocString x) `SpanDocString` SpanDocUris Nothing Nothing) this_docs) names k <- foldrM getType (tcg_type_env this_mod) names - pure $ DKMap d k + a <- foldrM getArgDocs (fmap (\(_, m) -> fmap (\x -> [hsDocString x] `SpanDocString` SpanDocUris Nothing Nothing) m) this_arg_docs) names + pure $ DKMap d k a where getDocs n nameMap | maybe True (mod ==) $ nameModule_maybe n = pure nameMap -- we already have the docs in this_docs, or they do not exist | otherwise = do - doc <- getDocumentationTryGhc env n + (doc, _argDoc) <- getDocumentationTryGhc env n pure $ extendNameEnv nameMap n doc getType n nameMap | Nothing <- lookupNameEnv nameMap n = do kind <- lookupKind env n pure $ maybe nameMap (extendNameEnv nameMap n) kind | otherwise = pure nameMap + getArgDocs n nameMap + | maybe True (mod ==) $ nameModule_maybe n = pure nameMap + | otherwise = do + (_doc, argDoc) <- getDocumentationTryGhc env n + pure $ extendNameEnv nameMap n argDoc names = rights $ S.toList idents idents = M.keysSet rm mod = tcg_mod this_mod @@ -65,23 +72,23 @@ lookupKind :: HscEnv -> Name -> IO (Maybe TyThing) lookupKind env = fmap (fromRight Nothing) . catchSrcErrors (hsc_dflags env) "span" . lookupName env -getDocumentationTryGhc :: HscEnv -> Name -> IO SpanDoc +getDocumentationTryGhc :: HscEnv -> Name -> IO (SpanDoc, IntMap SpanDoc) getDocumentationTryGhc env n = - (fromMaybe emptySpanDoc . listToMaybe <$> getDocumentationsTryGhc env [n]) - `catch` (\(_ :: IOEnvFailure) -> pure emptySpanDoc) + (fromMaybe (emptySpanDoc, mempty) . listToMaybe <$> getDocumentationsTryGhc env [n]) + `catch` (\(_ :: IOEnvFailure) -> pure (emptySpanDoc, mempty)) -getDocumentationsTryGhc :: HscEnv -> [Name] -> IO [SpanDoc] +getDocumentationsTryGhc :: HscEnv -> [Name] -> IO [(SpanDoc, IntMap SpanDoc)] getDocumentationsTryGhc env names = do resOr <- catchSrcErrors (hsc_dflags env) "docs" $ getDocsBatch env names case resOr of Left _ -> return [] Right res -> zipWithM unwrap res names where - unwrap (Right (Just docs, _)) n = SpanDocString (map hsDocString docs) <$> getUris n + unwrap (Right (Just docs, argDocs)) n = (\uris -> (SpanDocString (map hsDocString docs) uris, fmap (\x -> SpanDocString [hsDocString x] uris) argDocs)) <$> getUris n unwrap _ n = mkSpanDocText n mkSpanDocText name = - SpanDocText [] <$> getUris name + (\uris -> (SpanDocText [] uris, mempty)) <$> getUris name -- Get the uris to the documentation and source html pages if they exist getUris name = do diff --git a/haskell-language-server.cabal b/haskell-language-server.cabal index 91adbcbe37..c30eebb8af 100644 --- a/haskell-language-server.cabal +++ b/haskell-language-server.cabal @@ -839,6 +839,59 @@ test-suite hls-stan-plugin-tests default-extensions: OverloadedStrings +----------------------------- +-- signature help plugin +----------------------------- + +flag signatureHelp + description: Enable signature help plugin + default: True + manual: True + +common signatureHelp + if flag(signatureHelp) + build-depends: haskell-language-server:hls-signature-help-plugin + cpp-options: -Dhls_signatureHelp + +library hls-signature-help-plugin + import: defaults, pedantic, warnings + if !flag(signatureHelp) + buildable: False + exposed-modules: Ide.Plugin.SignatureHelp + hs-source-dirs: plugins/hls-signature-help-plugin/src + default-extensions: + DerivingStrategies + LambdaCase + OverloadedStrings + build-depends: + , containers + , ghc + , ghcide == 2.11.0.0 + , hls-plugin-api == 2.11.0.0 + , lsp-types + , text + , transformers + + +test-suite hls-signature-help-plugin-tests + import: defaults, pedantic, test-defaults, warnings + if !flag(signatureHelp) + buildable: False + type: exitcode-stdio-1.0 + hs-source-dirs: plugins/hls-signature-help-plugin/test + main-is: Main.hs + build-depends: + , ghcide + , haskell-language-server:hls-signature-help-plugin + , hls-test-utils == 2.11.0.0 + , lens + , lsp-types + , string-interpolate + , text + default-extensions: + DerivingStrategies + OverloadedStrings + ----------------------------- -- module name plugin ----------------------------- @@ -1858,6 +1911,7 @@ library , retrie , hlint , stan + , signatureHelp , moduleName , pragmas , splice diff --git a/hls-plugin-api/src/Ide/Plugin/Config.hs b/hls-plugin-api/src/Ide/Plugin/Config.hs index 4fee92c309..ecaf5f5d41 100644 --- a/hls-plugin-api/src/Ide/Plugin/Config.hs +++ b/hls-plugin-api/src/Ide/Plugin/Config.hs @@ -72,6 +72,7 @@ parsePluginConfig def = A.withObject "PluginConfig" $ \o -> PluginConfig <*> o .:? "diagnosticsOn" .!= plcDiagnosticsOn def -- AZ <*> o .:? "hoverOn" .!= plcHoverOn def <*> o .:? "symbolsOn" .!= plcSymbolsOn def + <*> o .:? "signatureHelpOn" .!= plcSignatureHelpOn def <*> o .:? "completionOn" .!= plcCompletionOn def <*> o .:? "renameOn" .!= plcRenameOn def <*> o .:? "selectionRangeOn" .!= plcSelectionRangeOn def diff --git a/hls-plugin-api/src/Ide/Plugin/ConfigUtils.hs b/hls-plugin-api/src/Ide/Plugin/ConfigUtils.hs index a7350ab344..f352cc179d 100644 --- a/hls-plugin-api/src/Ide/Plugin/ConfigUtils.hs +++ b/hls-plugin-api/src/Ide/Plugin/ConfigUtils.hs @@ -104,6 +104,7 @@ pluginsToDefaultConfig IdePlugins {..} = SMethod_TextDocumentRename -> ["renameOn" A..= plcRenameOn] SMethod_TextDocumentHover -> ["hoverOn" A..= plcHoverOn] SMethod_TextDocumentDocumentSymbol -> ["symbolsOn" A..= plcSymbolsOn] + SMethod_TextDocumentSignatureHelp -> ["signatureHelpOn" A..= plcSignatureHelpOn] SMethod_TextDocumentCompletion -> ["completionOn" A..= plcCompletionOn] SMethod_TextDocumentPrepareCallHierarchy -> ["callHierarchyOn" A..= plcCallHierarchyOn] SMethod_TextDocumentSemanticTokensFull -> ["semanticTokensOn" A..= plcSemanticTokensOn] @@ -137,6 +138,7 @@ pluginsToVSCodeExtensionSchema IdePlugins {..} = A.object $ mconcat $ singlePlug SMethod_TextDocumentRename -> [toKey' "renameOn" A..= schemaEntry "rename" plcRenameOn] SMethod_TextDocumentHover -> [toKey' "hoverOn" A..= schemaEntry "hover" plcHoverOn] SMethod_TextDocumentDocumentSymbol -> [toKey' "symbolsOn" A..= schemaEntry "symbols" plcSymbolsOn] + SMethod_TextDocumentSignatureHelp -> [toKey' "signatureHelpOn" A..= schemaEntry "signature help" plcSignatureHelpOn] SMethod_TextDocumentCompletion -> [toKey' "completionOn" A..= schemaEntry "completions" plcCompletionOn] SMethod_TextDocumentPrepareCallHierarchy -> [toKey' "callHierarchyOn" A..= schemaEntry "call hierarchy" plcCallHierarchyOn] SMethod_TextDocumentSemanticTokensFull -> [toKey' "semanticTokensOn" A..= schemaEntry "semantic tokens" plcSemanticTokensOn] diff --git a/hls-plugin-api/src/Ide/Types.hs b/hls-plugin-api/src/Ide/Types.hs index 3a06656a77..314049b826 100644 --- a/hls-plugin-api/src/Ide/Types.hs +++ b/hls-plugin-api/src/Ide/Types.hs @@ -263,6 +263,7 @@ data PluginConfig = , plcDiagnosticsOn :: !Bool , plcHoverOn :: !Bool , plcSymbolsOn :: !Bool + , plcSignatureHelpOn :: !Bool , plcCompletionOn :: !Bool , plcRenameOn :: !Bool , plcSelectionRangeOn :: !Bool @@ -281,6 +282,7 @@ instance Default PluginConfig where , plcDiagnosticsOn = True , plcHoverOn = True , plcSymbolsOn = True + , plcSignatureHelpOn = True , plcCompletionOn = True , plcRenameOn = True , plcSelectionRangeOn = True @@ -290,7 +292,7 @@ instance Default PluginConfig where } instance ToJSON PluginConfig where - toJSON (PluginConfig g ch ca ih cl d h s c rn sr fr st cfg) = r + toJSON (PluginConfig g ch ca ih cl d h s sh c rn sr fr st cfg) = r where r = object [ "globalOn" .= g , "callHierarchyOn" .= ch @@ -300,6 +302,7 @@ instance ToJSON PluginConfig where , "diagnosticsOn" .= d , "hoverOn" .= h , "symbolsOn" .= s + , "signatureHelpOn" .= sh , "completionOn" .= c , "renameOn" .= rn , "selectionRangeOn" .= sr @@ -541,6 +544,9 @@ instance PluginMethod Request Method_TextDocumentHover where instance PluginMethod Request Method_TextDocumentDocumentSymbol where handlesRequest = pluginEnabledWithFeature plcSymbolsOn +instance PluginMethod Request Method_TextDocumentSignatureHelp where + handlesRequest = pluginEnabledWithFeature plcSignatureHelpOn + instance PluginMethod Request Method_CompletionItemResolve where -- See Note [Resolve in PluginHandlers] handlesRequest = pluginEnabledResolve plcCompletionOn @@ -764,6 +770,9 @@ instance PluginRequestMethod Method_TextDocumentDocumentSymbol where si = SymbolInformation name' (ds ^. L.kind) Nothing parent (ds ^. L.deprecated) loc in [si] <> children' +instance PluginRequestMethod Method_TextDocumentSignatureHelp where + combineResponses _ _ _ _ (x :| _) = x + instance PluginRequestMethod Method_CompletionItemResolve where -- A resolve request should only have one response. -- See Note [Resolve in PluginHandlers] diff --git a/plugins/hls-signature-help-plugin/src/Ide/Plugin/SignatureHelp.hs b/plugins/hls-signature-help-plugin/src/Ide/Plugin/SignatureHelp.hs new file mode 100644 index 0000000000..ada4d70872 --- /dev/null +++ b/plugins/hls-signature-help-plugin/src/Ide/Plugin/SignatureHelp.hs @@ -0,0 +1,345 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DisambiguateRecordFields #-} +{-# LANGUAGE GADTs #-} + +module Ide.Plugin.SignatureHelp (descriptor) where + +import Control.Arrow ((>>>)) +import Control.Monad.Trans.Except (ExceptT (ExceptT)) +import Data.Bifunctor (bimap) +import Data.Function ((&)) +import Data.IntMap (IntMap) +import qualified Data.IntMap as IntMap +import qualified Data.Map.Strict as M +import qualified Data.Set as S +import Data.Text (Text) +import qualified Data.Text as T +import Development.IDE (DocAndTyThingMap (DKMap), + GetDocMap (GetDocMap), + GetHieAst (GetHieAst), + HieAstResult (HAR, hieAst, hieKind), + HieKind (..), + IdeState (shakeExtras), + Pretty (pretty), + Recorder, WithPriority, + printOutputableOneLine, + useWithStaleFast) +import Development.IDE.Core.PluginUtils (runIdeActionE, + useWithStaleFastE) +import Development.IDE.Core.PositionMapping (fromCurrentPosition) +import Development.IDE.GHC.Compat (FastStringCompat, Name, + RealSrcSpan, + getSourceNodeIds, + isAnnotationInNodeInfo, + mkRealSrcLoc, + mkRealSrcSpan, ppr, + sourceNodeInfo) +import Development.IDE.GHC.Compat.Util (LexicalFastString (LexicalFastString)) +import Development.IDE.Spans.Common (ArgDocMap, DocMap, + SpanDoc (..), + SpanDocUris (SpanDocUris), + spanDocToMarkdown) +import GHC.Core.Map.Type (deBruijnize) +import GHC.Core.Type (FunTyFlag (FTF_T_T), + Type, dropForAlls, + splitFunTy_maybe) +import GHC.Data.Maybe (rightToMaybe) +import GHC.Iface.Ext.Types (ContextInfo (Use), + HieAST (nodeChildren, nodeSpan), + HieASTs (getAsts), + IdentifierDetails (identInfo, identType), + nodeType) +import GHC.Iface.Ext.Utils (smallestContainingSatisfying) +import GHC.Types.Name.Env (lookupNameEnv) +import GHC.Types.SrcLoc (isRealSubspanOf) +import Ide.Plugin.Error (getNormalizedFilePathE) +import Ide.Types (PluginDescriptor (pluginHandlers), + PluginId, + PluginMethodHandler, + defaultPluginDescriptor, + mkPluginHandler) +import Language.LSP.Protocol.Message (Method (Method_TextDocumentSignatureHelp), + SMethod (SMethod_TextDocumentSignatureHelp)) +import Language.LSP.Protocol.Types (MarkupContent (MarkupContent), + MarkupKind (MarkupKind_Markdown), + Null (Null), + ParameterInformation (ParameterInformation), + Position (Position), + SignatureHelp (..), + SignatureHelpContext (..), + SignatureHelpParams (SignatureHelpParams), + SignatureInformation (..), + TextDocumentIdentifier (TextDocumentIdentifier), + UInt, + type (|?) (InL, InR)) + +data Log + +instance Pretty Log where + pretty = \case {} + +descriptor :: Recorder (WithPriority Log) -> PluginId -> PluginDescriptor IdeState +descriptor _recorder pluginId = + (defaultPluginDescriptor pluginId "Provides signature help of something callable") + { Ide.Types.pluginHandlers = mkPluginHandler SMethod_TextDocumentSignatureHelp signatureHelpProvider + } + +{- Note [Stale Results in Signature Help] + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Stale results work well when users are reading code. + +When we add support for writing code, such as automatically triggering signature +help when a space char is inserted, we probably have to use up-to-date results. +-} + +{- +Here is a brief description of the algorithm of finding relevant bits from HIE AST +1. let 'hsAppNode' = the smallest 'HsApp' AST node which contains the cursor postion + See 'extractInfoFromSmallestContainingFunctionApplicationAst' +2. let 'functionNode' = the left-most node of 'hsAppNode' + See 'getLeftMostNode' +3. try to get 'functionName' and 'functionTypes' from 'functionNode' + We get 'Nothing' when we cannot get that info + See 'getNodeNameAndTypes' +4. count 'parameterIndex' by traversing the 'hsAppNode' subtree from its root to the cursor position + We get 'Nothing' when either the cursor position is at 'functionNode' + or we encounter some AST node we do not yet know how to continue our traversal + See 'getParameterIndex' +-} +signatureHelpProvider :: PluginMethodHandler IdeState Method_TextDocumentSignatureHelp +signatureHelpProvider ideState _pluginId (SignatureHelpParams (TextDocumentIdentifier uri) position _mProgreeToken mSignatureHelpContext) = do + nfp <- getNormalizedFilePathE uri + results <- runIdeActionE "signatureHelp.ast" (shakeExtras ideState) $ do + -- see Note [Stale Results in Signature Help] + (HAR {hieAst, hieKind}, positionMapping) <- useWithStaleFastE GetHieAst nfp + case fromCurrentPosition positionMapping position of + Nothing -> pure [] + Just oldPosition -> do + pure $ + extractInfoFromSmallestContainingFunctionApplicationAst + oldPosition + hieAst + ( \span hieAst -> do + let functionNode = getLeftMostNode hieAst + (functionName, functionTypes) <- getNodeNameAndTypes hieKind functionNode + parameterIndex <- getParameterIndex span hieAst + Just (functionName, functionTypes, parameterIndex) + ) + (docMap, argDocMap) <- runIdeActionE "signatureHelp.docMap" (shakeExtras ideState) $ do + -- see Note [Stale Results in Signature Help] + mResult <- ExceptT $ Right <$> useWithStaleFast GetDocMap nfp + case mResult of + Just (DKMap docMap _tyThingMap argDocMap, _positionMapping) -> pure (docMap, argDocMap) + Nothing -> pure (mempty, mempty) + case results of + [(_functionName, [], _parameterIndex)] -> pure $ InR Null + [(functionName, functionTypes, parameterIndex)] -> + pure $ InL $ mkSignatureHelp mSignatureHelpContext docMap argDocMap (fromIntegral parameterIndex - 1) functionName functionTypes + _ -> pure $ InR Null + +mkSignatureHelp :: Maybe SignatureHelpContext -> DocMap -> ArgDocMap -> UInt -> Name -> [Type] -> SignatureHelp +mkSignatureHelp mSignatureHelpContext docMap argDocMap parameterIndex functionName functionTypes = + SignatureHelp + { _signatures = mkSignatureInformation docMap argDocMap parameterIndex functionName <$> functionTypes, + _activeSignature = activeSignature, + _activeParameter = Just $ InL parameterIndex + } + where + activeSignature = case mSignatureHelpContext of + Just + ( SignatureHelpContext + { _triggerKind, + _triggerCharacter, + _isRetrigger = True, + _activeSignatureHelp = Just (SignatureHelp _signatures oldActivateSignature _activeParameter) + } + ) -> oldActivateSignature + _ -> Just 0 + +mkSignatureInformation :: DocMap -> ArgDocMap -> UInt -> Name -> Type -> SignatureInformation +mkSignatureInformation docMap argDocMap parameterIndex functionName functionType = + let functionNameLabelPrefix = printOutputableOneLine (ppr functionName) <> " :: " + mFunctionDoc = case lookupNameEnv docMap functionName of + Nothing -> Nothing + Just spanDoc -> Just $ InR $ mkMarkdownDoc spanDoc + thisArgDocMap = case lookupNameEnv argDocMap functionName of + Nothing -> mempty + Just thisArgDocMap' -> thisArgDocMap' + in SignatureInformation + { -- Server-side line wrapping may be better since more context is available. + -- However, server-side line wrapping may make it harder to calculate + -- parameter ranges. In addition, some clients, such as vscode, ignore + -- server-side line wrapping and instead does client-side line wrapping. + -- So we choose not to do server-side line wrapping. + _label = functionNameLabelPrefix <> printOutputableOneLine functionType, + _documentation = mFunctionDoc, + _parameters = Just $ mkParameterInformations thisArgDocMap (fromIntegral $ T.length functionNameLabelPrefix) functionType, + _activeParameter = Just $ InL parameterIndex + } + +mkParameterInformations :: IntMap SpanDoc -> UInt -> Type -> [ParameterInformation] +mkParameterInformations thisArgDocMap offset functionType = + [ ParameterInformation (InR range) mParameterDoc + | (parameterIndex, range) <- zip [0 ..] (bimap (+ offset) (+ offset) <$> findParameterRanges functionType), + let mParameterDoc = case IntMap.lookup parameterIndex thisArgDocMap of + Nothing -> Nothing + Just spanDoc -> Just $ InR $ mkMarkdownDoc $ removeUris spanDoc + ] + where + -- we already show uris in the function doc, no need to duplicate them in the parameter doc + removeUris (SpanDocString docs _uris) = SpanDocString docs emptyUris + removeUris (SpanDocText docs _uris) = SpanDocText docs emptyUris + + emptyUris = SpanDocUris Nothing Nothing + +mkMarkdownDoc :: SpanDoc -> MarkupContent +mkMarkdownDoc = spanDocToMarkdown >>> T.unlines >>> MarkupContent MarkupKind_Markdown + +findParameterRanges :: Type -> [(UInt, UInt)] +findParameterRanges functionType = + let functionTypeString = printOutputableOneLine functionType + functionTypeStringLength = fromIntegral $ T.length functionTypeString + splitFunctionTypes = filter notTypeConstraint $ splitFunTysIgnoringForAll functionType + splitFunctionTypeStrings = printOutputableOneLine . fst <$> splitFunctionTypes + -- reverse to avoid matching "a" of "forall a" in "forall a. a -> a" + reversedRanges = + drop 1 $ -- do not need the range of the result (last) type + findParameterStringRanges + 0 + (T.reverse functionTypeString) + (T.reverse <$> reverse splitFunctionTypeStrings) + in reverse $ modifyRange functionTypeStringLength <$> reversedRanges + where + modifyRange functionTypeStringLength (start, end) = + (functionTypeStringLength - end, functionTypeStringLength - start) + +{- +The implemented method uses both structured type and unstructured type string. +It provides good enough results and is easier to implement than alternative +method 1 or 2. + +Alternative method 1: use only structured type +This method is hard to implement because we need to duplicate some logic of 'ppr' for 'Type'. +Some tricky cases are as follows: +- 'Eq a => Num b -> c' is shown as '(Eq a, Numb) => c' +- 'forall' can appear anywhere in a type when RankNTypes is enabled + f :: forall a. Maybe a -> forall b. (a, b) -> b +- '=>' can appear anywhere in a type + g :: forall a b. Eq a => a -> Num b => b -> b +- ppr the first parameter type of '(a -> b) -> a -> b' is 'a -> b' (no parentheses) +- 'forall' is not always shown + +Alternative method 2: use only unstructured type string +This method is hard to implement because we need to parse the type string. +Some tricky cases are as follows: +- h :: forall a (m :: Type -> Type). Monad m => a -> m a +-} +findParameterStringRanges :: UInt -> Text -> [Text] -> [(UInt, UInt)] +findParameterStringRanges _totalPrefixLength _functionTypeString [] = [] +findParameterStringRanges totalPrefixLength functionTypeString (parameterTypeString : restParameterTypeStrings) = + let (prefix, match) = T.breakOn parameterTypeString functionTypeString + prefixLength = fromIntegral $ T.length prefix + parameterTypeStringLength = fromIntegral $ T.length parameterTypeString + start = totalPrefixLength + prefixLength + in (start, start + parameterTypeStringLength) + : findParameterStringRanges + (totalPrefixLength + prefixLength + parameterTypeStringLength) + (T.drop (fromIntegral parameterTypeStringLength) match) + restParameterTypeStrings + +-- similar to 'splitFunTys' but +-- 1) the result (last) type is included and +-- 2) toplevel foralls are ignored +splitFunTysIgnoringForAll :: Type -> [(Type, Maybe FunTyFlag)] +splitFunTysIgnoringForAll ty = case ty & dropForAlls & splitFunTy_maybe of + Just (funTyFlag, _mult, parameterType, resultType) -> + (parameterType, Just funTyFlag) : splitFunTysIgnoringForAll resultType + Nothing -> [(ty, Nothing)] + +notTypeConstraint :: (Type, Maybe FunTyFlag) -> Bool +notTypeConstraint (_type, Just FTF_T_T) = True +notTypeConstraint (_type, Nothing) = True +notTypeConstraint _ = False + +extractInfoFromSmallestContainingFunctionApplicationAst :: + Position -> HieASTs a -> (RealSrcSpan -> HieAST a -> Maybe b) -> [b] +extractInfoFromSmallestContainingFunctionApplicationAst position hieAsts extractInfo = + M.elems $ flip M.mapMaybeWithKey (getAsts hieAsts) $ \hiePath hieAst -> + smallestContainingSatisfying (positionToSpan hiePath position) (nodeHasAnnotation ("HsApp", "HsExpr")) hieAst + >>= extractInfo (positionToSpan hiePath position) + where + positionToSpan hiePath position = + let loc = mkLoc hiePath position in mkRealSrcSpan loc loc + mkLoc (LexicalFastString hiePath) (Position line character) = + mkRealSrcLoc hiePath (fromIntegral line + 1) (fromIntegral character + 1) + +type Annotation = (FastStringCompat, FastStringCompat) + +nodeHasAnnotation :: Annotation -> HieAST a -> Bool +nodeHasAnnotation annotation hieAst = case sourceNodeInfo hieAst of + Nothing -> False + Just nodeInfo -> isAnnotationInNodeInfo annotation nodeInfo + +getLeftMostNode :: HieAST a -> HieAST a +getLeftMostNode thisNode = + case nodeChildren thisNode of + [] -> thisNode + leftChild : _ -> getLeftMostNode leftChild + +getNodeNameAndTypes :: HieKind a -> HieAST a -> Maybe (Name, [Type]) +getNodeNameAndTypes hieKind hieAst = + if nodeHasAnnotation ("HsVar", "HsExpr") hieAst + then case hieAst & getSourceNodeIds & M.filter isUse & M.assocs of + [(identifier, identifierDetails)] -> + case extractName identifier of + Nothing -> Nothing + Just name -> + let mTypeOfName = identType identifierDetails + typesOfNode = case sourceNodeInfo hieAst of + Nothing -> [] + Just nodeInfo -> nodeType nodeInfo + allTypes = case mTypeOfName of + Nothing -> typesOfNode + -- (the last?) one type of 'typesOfNode' may (always?) be the same as 'typeOfName' + -- To avoid generating two identical signature helps, we do a filtering here + -- This is similar to 'dropEnd1' in Development.IDE.Spans.AtPoint.atPoint + -- TODO perhaps extract a common function + Just typeOfName -> typeOfName : filter (isDifferentType typeOfName) typesOfNode + in Just (name, filterCoreTypes allTypes) + [] -> Nothing + _ -> Nothing -- seems impossible + else Nothing + where + extractName = rightToMaybe + + isDifferentType type1 type2 = case hieKind of + HieFresh -> deBruijnize type1 /= deBruijnize type2 + HieFromDisk {} -> type1 /= type2 + + filterCoreTypes types = case hieKind of + HieFresh -> types + -- ignore this case since this only happens before we finish startup + HieFromDisk {} -> [] + +isUse :: IdentifierDetails a -> Bool +isUse = identInfo >>> S.member Use + +-- Just 1 means the first parameter +getParameterIndex :: RealSrcSpan -> HieAST a -> Maybe Integer +getParameterIndex span hieAst + | nodeHasAnnotation ("HsApp", "HsExpr") hieAst = + case nodeChildren hieAst of + [leftChild, _] -> + if span `isRealSubspanOf` nodeSpan leftChild + then Nothing + else getParameterIndex span leftChild >>= \parameterIndex -> Just (parameterIndex + 1) + _ -> Nothing -- impossible + | nodeHasAnnotation ("HsAppType", "HsExpr") hieAst = + case nodeChildren hieAst of + [leftChild, _] -> getParameterIndex span leftChild + _ -> Nothing -- impossible + | otherwise = + case nodeChildren hieAst of + [] -> Just 0 -- the function is found + [child] -> getParameterIndex span child -- ignore irrelevant nodes + _ -> Nothing diff --git a/plugins/hls-signature-help-plugin/test/Main.hs b/plugins/hls-signature-help-plugin/test/Main.hs new file mode 100644 index 0000000000..4ac665e7d1 --- /dev/null +++ b/plugins/hls-signature-help-plugin/test/Main.hs @@ -0,0 +1,515 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE QuasiQuotes #-} + +import Control.Arrow ((>>>)) +import Control.Exception (throw) +import Control.Lens ((^.)) +import Data.Maybe (fromJust) +import Data.String.Interpolate (__i) +import Data.Text (Text) +import qualified Data.Text as T +import Development.IDE.Plugin.Completions.Types (PosPrefixInfo (PosPrefixInfo)) +import Ide.Plugin.SignatureHelp (descriptor) +import qualified Language.LSP.Protocol.Lens as L +import Test.Hls +import Test.Hls.FileSystem (VirtualFileTree, + directCradle, file, + mkVirtualFileTree, + text) + +main :: IO () +main = + defaultTestRunner $ + testGroup + "signatureHelp" + [ mkTest + "1 parameter" + [__i| + f :: Int -> Int + f = _ + x = f 1 + ^^^^^^^^ + |] + [ Nothing, + Nothing, + Nothing, + Nothing, + Nothing, + Nothing, + Just $ SimilarSignatureHelp $ SignatureHelp [SignatureInformation "f :: Int -> Int" Nothing (Just [ParameterInformation (InR (5, 8)) Nothing]) (Just (InL 0))] (Just 0) (Just (InL 0)), + Just $ SimilarSignatureHelp $ SignatureHelp [SignatureInformation "f :: Int -> Int" Nothing (Just [ParameterInformation (InR (5, 8)) Nothing]) (Just (InL 0))] (Just 0) (Just (InL 0)) + ], + mkTest + "2 parameters" + [__i| + f :: Int -> Int -> Int + f = _ + x = f 1 2 + ^ ^^^ + |] + [ Nothing, + Just $ SimilarSignatureHelp $ SignatureHelp [SignatureInformation "f :: Int -> Int -> Int" Nothing (Just [ParameterInformation (InR (5, 8)) Nothing, ParameterInformation (InR (12, 15)) Nothing]) (Just (InL 0))] (Just 0) (Just (InL 0)), + Just $ SimilarSignatureHelp $ SignatureHelp [SignatureInformation "f :: Int -> Int -> Int" Nothing (Just [ParameterInformation (InR (5, 8)) Nothing, ParameterInformation (InR (12, 15)) Nothing]) (Just (InL 0))] (Just 0) (Just (InL 0)), + Just $ SimilarSignatureHelp $ SignatureHelp [SignatureInformation "f :: Int -> Int -> Int" Nothing (Just [ParameterInformation (InR (5, 8)) Nothing, ParameterInformation (InR (12, 15)) Nothing]) (Just (InL 1))] (Just 0) (Just (InL 1)) + ], + mkTest + "3 parameters" + [__i| + f :: Int -> Int -> Int -> Int + f = _ + x = f 1 2 3 + ^ ^ ^ ^ + |] + [ Nothing, + Just $ SimilarSignatureHelp $ SignatureHelp [SignatureInformation "f :: Int -> Int -> Int -> Int" Nothing (Just [ParameterInformation (InR (5, 8)) Nothing, ParameterInformation (InR (12, 15)) Nothing, ParameterInformation (InR (19, 22)) Nothing]) (Just (InL 0))] (Just 0) (Just (InL 0)), + Just $ SimilarSignatureHelp $ SignatureHelp [SignatureInformation "f :: Int -> Int -> Int -> Int" Nothing (Just [ParameterInformation (InR (5, 8)) Nothing, ParameterInformation (InR (12, 15)) Nothing, ParameterInformation (InR (19, 22)) Nothing]) (Just (InL 1))] (Just 0) (Just (InL 1)), + Just $ SimilarSignatureHelp $ SignatureHelp [SignatureInformation "f :: Int -> Int -> Int -> Int" Nothing (Just [ParameterInformation (InR (5, 8)) Nothing, ParameterInformation (InR (12, 15)) Nothing, ParameterInformation (InR (19, 22)) Nothing]) (Just (InL 2))] (Just 0) (Just (InL 2)) + ], + mkTest + "parentheses" + [__i| + f :: Int -> Int -> Int + f = _ + x = (f 1) 2 + ^^ ^^^^ + |] + [ Nothing, + Nothing, + Just $ SimilarSignatureHelp $ SignatureHelp [SignatureInformation "f :: Int -> Int -> Int" Nothing (Just [ParameterInformation (InR (5, 8)) Nothing, ParameterInformation (InR (12, 15)) Nothing]) (Just (InL 0))] (Just 0) (Just (InL 0)), + Just $ SimilarSignatureHelp $ SignatureHelp [SignatureInformation "f :: Int -> Int -> Int" Nothing (Just [ParameterInformation (InR (5, 8)) Nothing, ParameterInformation (InR (12, 15)) Nothing]) (Just (InL 0))] (Just 0) (Just (InL 0)), + Nothing, + Just $ SimilarSignatureHelp $ SignatureHelp [SignatureInformation "f :: Int -> Int -> Int" Nothing (Just [ParameterInformation (InR (5, 8)) Nothing, ParameterInformation (InR (12, 15)) Nothing]) (Just (InL 1))] (Just 0) (Just (InL 1)) + ], + mkTest + "newline" + [__i| + f :: Int -> Int -> Int + f = _ + x = + ( + ^ + f + ^ + 1 + ^ + ) + ^ + 2 + ^ + + ^ + |] + [ Nothing, + Nothing, + Just $ SimilarSignatureHelp $ SignatureHelp [SignatureInformation "f :: Int -> Int -> Int" Nothing (Just [ParameterInformation (InR (5, 8)) Nothing, ParameterInformation (InR (12, 15)) Nothing]) (Just (InL 0))] (Just 0) (Just (InL 0)), + Nothing, + Just $ SimilarSignatureHelp $ SignatureHelp [SignatureInformation "f :: Int -> Int -> Int" Nothing (Just [ParameterInformation (InR (5, 8)) Nothing, ParameterInformation (InR (12, 15)) Nothing]) (Just (InL 1))] (Just 0) (Just (InL 1)), + Nothing + ], + mkTest + "nested" + [__i| + f :: Int -> Int -> Int + f = _ + g :: Int -> Int + g = _ + x = f (g 1) 2 + ^^^^ ^^^^ + |] + [ Nothing, + Nothing, + Just $ SimilarSignatureHelp $ SignatureHelp [SignatureInformation "f :: Int -> Int -> Int" Nothing (Just [ParameterInformation (InR (5, 8)) Nothing, ParameterInformation (InR (12, 15)) Nothing]) (Just (InL 0))] (Just 0) (Just (InL 0)), + Nothing, + Just $ SimilarSignatureHelp $ SignatureHelp [SignatureInformation "g :: Int -> Int" Nothing (Just [ParameterInformation (InR (5, 8)) Nothing]) (Just (InL 0))] (Just 0) (Just (InL 0)), + Just $ SimilarSignatureHelp $ SignatureHelp [SignatureInformation "g :: Int -> Int" Nothing (Just [ParameterInformation (InR (5, 8)) Nothing]) (Just (InL 0))] (Just 0) (Just (InL 0)), + Just $ SimilarSignatureHelp $ SignatureHelp [SignatureInformation "f :: Int -> Int -> Int" Nothing (Just [ParameterInformation (InR (5, 8)) Nothing, ParameterInformation (InR (12, 15)) Nothing]) (Just (InL 0))] (Just 0) (Just (InL 0)), + Just $ SimilarSignatureHelp $ SignatureHelp [SignatureInformation "f :: Int -> Int -> Int" Nothing (Just [ParameterInformation (InR (5, 8)) Nothing, ParameterInformation (InR (12, 15)) Nothing]) (Just (InL 1))] (Just 0) (Just (InL 1)) + ], + mkTest + "higher-order function" + [__i| + f :: (Int -> Int) -> Int -> Int + f = _ + x = f (+ 1) 2 + ^ ^ + |] + [ Nothing, + Just $ SimilarSignatureHelp $ SignatureHelp [SignatureInformation "f :: (Int -> Int) -> Int -> Int" Nothing (Just [ParameterInformation (InR (6, 16)) Nothing, ParameterInformation (InR (21, 24)) Nothing]) (Just (InL 0))] (Just 0) (Just (InL 0)) + ], + mkTest + "type constraint" + [__i| + f :: (Num a) => a -> a -> a + f = _ + x = f 1 2 + ^ ^ ^ + |] + [ Nothing, + Just $ SimilarSignatureHelp $ SignatureHelp [SignatureInformation "f :: forall a. Num a => a -> a -> a" Nothing (Just [ParameterInformation (InR (24, 25)) Nothing, ParameterInformation (InR (29, 30)) Nothing]) (Just (InL 0)), SignatureInformation "f :: Integer -> Integer -> Integer" Nothing (Just [ParameterInformation (InR (5, 12)) Nothing, ParameterInformation (InR (16, 23)) Nothing]) (Just (InL 0))] (Just 0) (Just (InL 0)), + Just $ SimilarSignatureHelp $ SignatureHelp [SignatureInformation "f :: forall a. Num a => a -> a -> a" Nothing (Just [ParameterInformation (InR (24, 25)) Nothing, ParameterInformation (InR (29, 30)) Nothing]) (Just (InL 1)), SignatureInformation "f :: Integer -> Integer -> Integer" Nothing (Just [ParameterInformation (InR (5, 12)) Nothing, ParameterInformation (InR (16, 23)) Nothing]) (Just (InL 1))] (Just 0) (Just (InL 1)) + ], + mkTest + "type constraint with kind signatures" + [__i| + x :: IO Bool + x = pure True + ^ ^ + |] + [ Nothing, + Just $ SimilarSignatureHelp $ SignatureHelp [SignatureInformation "pure :: forall (f :: Type -> Type) a. Applicative f => a -> f a" (Just $ InR $ MarkupContent MarkupKind_Markdown "Lift a value") (Just [ParameterInformation (InR (55, 56)) Nothing]) (Just (InL 0)), SignatureInformation "pure :: Bool -> IO Bool" (Just $ InR $ MarkupContent MarkupKind_Markdown "Lift a value") (Just [ParameterInformation (InR (8, 12)) Nothing]) (Just (InL 0)), SignatureInformation "pure :: forall a. a -> IO a" (Just $ InR $ MarkupContent MarkupKind_Markdown "Lift a value") (Just [ParameterInformation (InR (18, 19)) Nothing]) (Just (InL 0))] (Just 0) (Just (InL 0)) + ], + mkTest + "2 type constraints" + [__i| + f :: forall a. (Eq a, Num a) => a -> a -> a + f = _ + x = f True + ^ ^ + |] + [ Nothing, + Just $ SimilarSignatureHelp $ SignatureHelp [SignatureInformation "f :: forall a. (Eq a, Num a) => a -> a -> a" Nothing (Just [ParameterInformation (InR (32, 33)) Nothing, ParameterInformation (InR (37, 38)) Nothing]) (Just (InL 0)), SignatureInformation "f :: Bool -> Bool -> Bool" Nothing (Just [ParameterInformation (InR (5, 9)) Nothing, ParameterInformation (InR (13, 17)) Nothing]) (Just (InL 0))] (Just 0) (Just (InL 0)) + ], + mkTest + "dynamic function" + [__i| + f :: Int -> Int -> Int + f = _ + g :: Int -> Int -> Int + g = _ + x = (if _ then f else g) 1 2 + ^^ ^^^ ^ ^^^ ^ ^^^^^^^^ + |] + (replicate 18 Nothing), + mkTest + "very long type" + [__i| + f :: Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int + f = _ + x = f 1 + ^ ^ + |] + [ Nothing, + Just $ SimilarSignatureHelp $ SignatureHelp [SignatureInformation "f :: Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int" Nothing (Just [ParameterInformation (InR (5, 8)) Nothing, ParameterInformation (InR (12, 15)) Nothing, ParameterInformation (InR (19, 22)) Nothing, ParameterInformation (InR (26, 29)) Nothing, ParameterInformation (InR (33, 36)) Nothing, ParameterInformation (InR (40, 43)) Nothing, ParameterInformation (InR (47, 50)) Nothing, ParameterInformation (InR (54, 57)) Nothing, ParameterInformation (InR (61, 64)) Nothing, ParameterInformation (InR (68, 71)) Nothing, ParameterInformation (InR (75, 78)) Nothing]) (Just (InL 0))] (Just 0) (Just (InL 0)) + ], + mkTest + "very long type with type constraint" + [__i| + f :: Num abcdefghijklmn => abcdefghijklmn -> abcdefghijklmn -> abcdefghijklmn -> abcdefghijklmn -> abcdefghijklmn + f = _ + x = f 1 + ^ ^ + |] + [ Nothing, + Just $ SimilarSignatureHelp $ SignatureHelp [SignatureInformation "f :: forall abcdefghijklmn. Num abcdefghijklmn => abcdefghijklmn -> abcdefghijklmn -> abcdefghijklmn -> abcdefghijklmn -> abcdefghijklmn" Nothing (Just [ParameterInformation (InR (50, 64)) Nothing, ParameterInformation (InR (68, 82)) Nothing, ParameterInformation (InR (86, 100)) Nothing, ParameterInformation (InR (104, 118)) Nothing]) (Just (InL 0)), SignatureInformation "f :: Integer -> Integer -> Integer -> Integer -> Integer" Nothing (Just [ParameterInformation (InR (5, 12)) Nothing, ParameterInformation (InR (16, 23)) Nothing, ParameterInformation (InR (27, 34)) Nothing, ParameterInformation (InR (38, 45)) Nothing]) (Just (InL 0))] (Just 0) (Just (InL 0)) + ], + -- TODO fix bug of wrong parameter range in the function type string + -- https://github.com/haskell/haskell-language-server/pull/4626#discussion_r2261133076 + mkTestExpectFail + "middle =>" + [__i| + f :: Eq a => a -> Num b => b -> b + f = _ + x = f 1 True + ^ ^ ^ + y = f True + ^ + z = f 1 + ^ + |] + ( BrokenIdeal + [ Nothing, + Just $ SimilarSignatureHelp $ SignatureHelp [SignatureInformation "f :: forall a b. Eq a => a -> Num b => b -> b" Nothing (Just [ParameterInformation (InR (25, 26)) Nothing, ParameterInformation (InR (39, 40)) Nothing]) (Just (InL 0)), SignatureInformation "f :: Integer -> Num Bool => Bool -> Bool" Nothing (Just [ParameterInformation (InR (5, 12)) Nothing, ParameterInformation (InR (28, 32)) Nothing]) (Just (InL 0))] (Just 0) (Just (InL 0)), + Just $ SimilarSignatureHelp $ SignatureHelp [SignatureInformation "f :: forall a b. Eq a => a -> Num b => b -> b" Nothing (Just [ParameterInformation (InR (25, 26)) Nothing, ParameterInformation (InR (39, 40)) Nothing]) (Just (InL 1)), SignatureInformation "f :: Integer -> Num Bool => Bool -> Bool" Nothing (Just [ParameterInformation (InR (5, 12)) Nothing, ParameterInformation (InR (28, 32)) Nothing]) (Just (InL 1))] (Just 0) (Just (InL 1)), + Just $ SimilarSignatureHelp $ SignatureHelp [SignatureInformation "f :: forall a b. Eq a => a -> Num b => b -> b" Nothing (Just [ParameterInformation (InR (25, 26)) Nothing, ParameterInformation (InR (39, 40)) Nothing]) (Just (InL 0)), SignatureInformation "f :: Bool -> Num Integer => Integer -> Integer" Nothing (Just [ParameterInformation (InR (5, 9)) Nothing, ParameterInformation (InR (28, 35)) Nothing]) (Just (InL 0))] (Just 0) (Just (InL 0)), + Just $ SimilarSignatureHelp $ SignatureHelp [SignatureInformation "f :: forall a b. Eq a => a -> Num b => b -> b" Nothing (Just [ParameterInformation (InR (25, 26)) Nothing, ParameterInformation (InR (39, 40)) Nothing]) (Just (InL 0)), SignatureInformation "f :: Integer -> Num Integer => Integer -> Integer" Nothing (Just [ParameterInformation (InR (5, 12)) Nothing, ParameterInformation (InR (31, 38)) Nothing]) (Just (InL 0))] (Just 0) (Just (InL 0)) + ] + ) + ( BrokenCurrent + [ Nothing, + Just $ SimilarSignatureHelp $ SignatureHelp [SignatureInformation "f :: forall a b. Eq a => a -> Num b => b -> b" Nothing (Just [ParameterInformation (InR (25, 26)) Nothing, ParameterInformation (InR (39, 40)) Nothing]) (Just (InL 0)), SignatureInformation "f :: Integer -> Num Bool => Bool -> Bool" Nothing (Just [ParameterInformation (InR (5, 12)) Nothing, ParameterInformation (InR (28, 32)) Nothing]) (Just (InL 0))] (Just 0) (Just (InL 0)), + Just $ SimilarSignatureHelp $ SignatureHelp [SignatureInformation "f :: forall a b. Eq a => a -> Num b => b -> b" Nothing (Just [ParameterInformation (InR (25, 26)) Nothing, ParameterInformation (InR (39, 40)) Nothing]) (Just (InL 1)), SignatureInformation "f :: Integer -> Num Bool => Bool -> Bool" Nothing (Just [ParameterInformation (InR (5, 12)) Nothing, ParameterInformation (InR (28, 32)) Nothing]) (Just (InL 1))] (Just 0) (Just (InL 1)), + Just $ SimilarSignatureHelp $ SignatureHelp [SignatureInformation "f :: forall a b. Eq a => a -> Num b => b -> b" Nothing (Just [ParameterInformation (InR (25, 26)) Nothing, ParameterInformation (InR (39, 40)) Nothing]) (Just (InL 0)), SignatureInformation "f :: Bool -> Num Integer => Integer -> Integer" Nothing (Just [ParameterInformation (InR (5, 9)) Nothing, ParameterInformation (InR (28, 35)) Nothing]) (Just (InL 0))] (Just 0) (Just (InL 0)), + Just $ SimilarSignatureHelp $ SignatureHelp [SignatureInformation "f :: forall a b. Eq a => a -> Num b => b -> b" Nothing (Just [ParameterInformation (InR (25, 26)) Nothing, ParameterInformation (InR (39, 40)) Nothing]) (Just (InL 0)), SignatureInformation "f :: Integer -> Num Integer => Integer -> Integer" Nothing (Just [ParameterInformation (InR (20, 27)) Nothing, ParameterInformation (InR (31, 38)) Nothing]) (Just (InL 0))] (Just 0) (Just (InL 0)) + ] + ), + mkTest + "=> in parameter" + [__i| + f :: Eq a => a -> (Num b => b -> b) -> a + f = _ + x = f 1 + ^ ^ + y = f 1 negate + ^ ^ + |] + ( let typ = + if ghcVersion <= GHC98 + then "f :: Integer -> (Num Any => Any -> Any) -> Integer" + else "f :: Integer -> (Num (ZonkAny 0) => ZonkAny 0 -> ZonkAny 0) -> Integer" + range = if ghcVersion <= GHC98 then (17, 38) else (17, 58) + in [ Nothing, + Just $ SimilarSignatureHelp $ SignatureHelp [SignatureInformation "f :: forall a b. Eq a => a -> (Num b => b -> b) -> a" Nothing (Just [ParameterInformation (InR (25, 26)) Nothing, ParameterInformation (InR (31, 46)) Nothing]) (Just (InL 0)), SignatureInformation "f :: Integer -> (Num b => b -> b) -> Integer" Nothing (Just [ParameterInformation (InR (5, 12)) Nothing, ParameterInformation (InR (17, 32)) Nothing]) (Just (InL 0))] (Just 0) (Just (InL 0)), + Just $ SimilarSignatureHelp $ SignatureHelp [SignatureInformation "f :: forall a b. Eq a => a -> (Num b => b -> b) -> a" Nothing (Just [ParameterInformation (InR (25, 26)) Nothing, ParameterInformation (InR (31, 46)) Nothing]) (Just (InL 0)), SignatureInformation typ Nothing (Just [ParameterInformation (InR (5, 12)) Nothing, ParameterInformation (InR range) Nothing]) (Just (InL 0))] (Just 0) (Just (InL 0)), + Just $ SimilarSignatureHelp $ SignatureHelp [SignatureInformation "f :: forall a b. Eq a => a -> (Num b => b -> b) -> a" Nothing (Just [ParameterInformation (InR (25, 26)) Nothing, ParameterInformation (InR (31, 46)) Nothing]) (Just (InL 1)), SignatureInformation typ Nothing (Just [ParameterInformation (InR (5, 12)) Nothing, ParameterInformation (InR range) Nothing]) (Just (InL 1))] (Just 0) (Just (InL 1)) + ] + ), + mkTest + "RankNTypes(forall in middle)" + [__i| + f :: Maybe a -> forall b. (a, b) -> b + f = _ + x1 = f Nothing + ^ ^ + x2 = f (Just True) + ^ + x3 = f Nothing (1, True) + ^ + |] + [ Nothing, + Just $ SimilarSignatureHelp $ SignatureHelp [SignatureInformation "f :: forall a. Maybe a -> forall b. (a, b) -> b" Nothing (Just [ParameterInformation (InR (15, 22)) Nothing, ParameterInformation (InR (36, 42)) Nothing]) (Just (InL 0)), SignatureInformation "f :: Maybe a -> forall b. (a, b) -> b" Nothing (Just [ParameterInformation (InR (5, 12)) Nothing, ParameterInformation (InR (26, 32)) Nothing]) (Just (InL 0))] (Just 0) (Just (InL 0)), + Just $ SimilarSignatureHelp $ SignatureHelp [SignatureInformation "f :: forall a. Maybe a -> forall b. (a, b) -> b" Nothing (Just [ParameterInformation (InR (15, 22)) Nothing, ParameterInformation (InR (36, 42)) Nothing]) (Just (InL 0)), SignatureInformation "f :: Maybe Bool -> forall b. (Bool, b) -> b" Nothing (Just [ParameterInformation (InR (5, 15)) Nothing, ParameterInformation (InR (29, 38)) Nothing]) (Just (InL 0))] (Just 0) (Just (InL 0)), + Just $ SimilarSignatureHelp $ SignatureHelp [SignatureInformation "f :: forall a. Maybe a -> forall b. (a, b) -> b" Nothing (Just [ParameterInformation (InR (15, 22)) Nothing, ParameterInformation (InR (36, 42)) Nothing]) (Just (InL 0)), SignatureInformation "f :: Maybe Integer -> forall b. (Integer, b) -> b" Nothing (Just [ParameterInformation (InR (5, 18)) Nothing, ParameterInformation (InR (32, 44)) Nothing]) (Just (InL 0))] (Just 0) (Just (InL 0)) + ], + -- TODO fix bug of wrong parameter range in the function type string + -- https://github.com/haskell/haskell-language-server/pull/4626#discussion_r2261133076 + mkTestExpectFail + "RankNTypes(forall in middle), another" + [__i| + f :: l -> forall a. a -> a + f = _ + x = f 1 + ^ ^ + |] + ( BrokenIdeal + [ Nothing, + Just $ SimilarSignatureHelp $ SignatureHelp [SignatureInformation "f :: forall l. l -> forall a. a -> a" Nothing (Just [ParameterInformation (InR (15, 16)) Nothing, ParameterInformation (InR (30, 31)) Nothing]) (Just (InL 0)), SignatureInformation "f :: Integer -> forall a. a -> a" Nothing (Just [ParameterInformation (InR (5, 12)) Nothing, ParameterInformation (InR (26, 27)) Nothing]) (Just (InL 0))] (Just 0) (Just (InL 0)) + ] + ) + ( BrokenCurrent + [ Nothing, + Just $ SimilarSignatureHelp $ SignatureHelp [SignatureInformation "f :: forall l. l -> forall a. a -> a" Nothing (Just [ParameterInformation (InR (25, 26)) Nothing, ParameterInformation (InR (30, 31)) Nothing]) (Just (InL 0)), SignatureInformation "f :: Integer -> forall a. a -> a" Nothing (Just [ParameterInformation (InR (5, 12)) Nothing, ParameterInformation (InR (26, 27)) Nothing]) (Just (InL 0))] (Just 0) (Just (InL 0)) + ] + ), + -- TODO fix bug of wrong parameter range in the function type string + -- https://github.com/haskell/haskell-language-server/pull/4626#discussion_r2261133076 + mkTestExpectFail + "RankNTypes(forall in middle), again" + [__i| + f :: a -> forall a. a -> a + f = _ + x = f 1 + ^ ^ + |] + ( BrokenIdeal + [ Nothing, + Just $ SimilarSignatureHelp $ SignatureHelp [SignatureInformation "f :: forall a. a -> forall a1. a1 -> a1" Nothing (Just [ParameterInformation (InR (15, 16)) Nothing, ParameterInformation (InR (31, 33)) Nothing]) (Just (InL 0)), SignatureInformation "f :: Integer -> forall a. a -> a" Nothing (Just [ParameterInformation (InR (5, 12)) Nothing, ParameterInformation (InR (26, 27)) Nothing]) (Just (InL 0))] (Just 0) (Just (InL 0)) + ] + ) + ( BrokenCurrent + [ Nothing, + Just $ SimilarSignatureHelp $ SignatureHelp [SignatureInformation "f :: forall a. a -> forall a1. a1 -> a1" Nothing (Just [ParameterInformation (InR (27, 28)) Nothing, ParameterInformation (InR (31, 32)) Nothing]) (Just (InL 0)), SignatureInformation "f :: Integer -> forall a. a -> a" Nothing (Just [ParameterInformation (InR (5, 12)) Nothing, ParameterInformation (InR (26, 27)) Nothing]) (Just (InL 0))] (Just 0) (Just (InL 0)) + ] + ), + mkTest + "LinearTypes" + [__i| + {-\# LANGUAGE LinearTypes \#-} + f :: (a -> b) %1 -> a -> b + f = _ + x1 = f negate + ^ ^ + x2 = f _ 1 + ^ + |] + [ Nothing, + Just $ SimilarSignatureHelp $ SignatureHelp [SignatureInformation "f :: forall a b. (a -> b) %1 -> a -> b" Nothing (Just [ParameterInformation (InR (18, 24)) Nothing, ParameterInformation (InR (32, 33)) Nothing]) (Just (InL 0)), SignatureInformation "f :: (Integer -> Integer) %1 -> Integer -> Integer" Nothing (Just [ParameterInformation (InR (6, 24)) Nothing, ParameterInformation (InR (32, 39)) Nothing]) (Just (InL 0))] (Just 0) (Just (InL 0)), + Just $ SimilarSignatureHelp $ SignatureHelp [SignatureInformation "f :: forall a b. (a -> b) %1 -> a -> b" Nothing (Just [ParameterInformation (InR (18, 24)) Nothing, ParameterInformation (InR (32, 33)) Nothing]) (Just (InL 0)), SignatureInformation "f :: (Integer -> b) %1 -> Integer -> b" Nothing (Just [ParameterInformation (InR (6, 18)) Nothing, ParameterInformation (InR (26, 33)) Nothing]) (Just (InL 0))] (Just 0) (Just (InL 0)) + ], + mkTest + "function documentation" + [__i| + -- |The 'f' function does something to a bool value. + f :: Bool -> Bool + f = _ + x = f True + ^ ^ + |] + [ Nothing, + Just $ SimilarSignatureHelp $ SignatureHelp [SignatureInformation "f :: Bool -> Bool" (Just $ InR $ MarkupContent MarkupKind_Markdown "The `f` function does something to a bool value") (Just [ParameterInformation (InR (5, 9)) Nothing]) (Just (InL 0))] (Just 0) (Just (InL 0)) + ], + mkTest + "function and parameters documentation" + [__i| + -- |Doc for function 'f'. + f :: + -- | The first 'Bool' parameter + Bool -> + -- | The second 'Int' parameter + Int -> + -- | The return value + Bool + f = _ + x = f True 1 + ^ ^ + |] + [ Nothing, + Just $ SimilarSignatureHelp $ SignatureHelp [SignatureInformation "f :: Bool -> Int -> Bool" (Just $ InR $ MarkupContent MarkupKind_Markdown "Doc for function `f`") (Just [ParameterInformation (InR (5, 9)) (Just $ InR $ MarkupContent MarkupKind_Markdown "The first `Bool` parameter"), ParameterInformation (InR (13, 16)) (Just $ InR $ MarkupContent MarkupKind_Markdown "The second `Int` parameter")]) (Just (InL 0))] (Just 0) (Just (InL 0)) + ], + mkTest + "imported function with no documentation" + [__i| + x = even 1 + ^ ^ + |] + [ Nothing, + Just $ SimilarSignatureHelp $ SignatureHelp [SignatureInformation "even :: forall a. Integral a => a -> Bool" (Just $ InR $ MarkupContent MarkupKind_Markdown "") (Just [ParameterInformation (InR (32, 33)) Nothing]) (Just (InL 0)), SignatureInformation "even :: Integer -> Bool" (Just $ InR $ MarkupContent MarkupKind_Markdown "") (Just [ParameterInformation (InR (8, 15)) Nothing]) (Just (InL 0))] (Just 0) (Just (InL 0)) + ], + mkTest + "imported function with parameter documentation" + [__i| + import Language.Haskell.TH.Lib (mkBytes) + x = mkBytes _ + ^ ^ + |] + [ Nothing, + Just $ SimilarSignatureHelp $ SignatureHelp [SignatureInformation "mkBytes :: ForeignPtr Word8 -> Word -> Word -> Bytes" (Just $ InR $ MarkupContent MarkupKind_Markdown "Create a Bytes datatype representing raw bytes to be embedded into the") (Just [ParameterInformation (InR (11, 27)) (Just $ InR $ MarkupContent MarkupKind_Markdown "Pointer to the data"), ParameterInformation (InR (31, 35)) (Just $ InR $ MarkupContent MarkupKind_Markdown "Offset from the pointer"), ParameterInformation (InR (39, 43)) (Just $ InR $ MarkupContent MarkupKind_Markdown "Number of bytes")]) (Just (InL 0))] (Just 0) (Just (InL 0)) + ], + mkTest + "TypeApplications" + [__i| + f :: a -> b -> c + f = _ + x = f @Int @_ 1 True + ^ ^ ^ ^ + |] + [ Nothing, + Nothing, + Nothing, + Just $ SimilarSignatureHelp $ SignatureHelp [SignatureInformation "f :: forall a b c. a -> b -> c" Nothing (Just [ParameterInformation (InR (19, 20)) Nothing, ParameterInformation (InR (24, 25)) Nothing]) (Just (InL 0))] (Just 0) (Just (InL 0)) + ] + ] + +mkTest :: TestName -> Text -> [Maybe SimilarSignatureHelp] -> TestTree +mkTest name sourceCode expectedSignatureHelps = + parameterisedCursorTest + name + sourceCode + expectedSignatureHelps + getSignatureHelpFromSession + +mkTestExpectFail :: + TestName -> + Text -> + ExpectBroken 'Ideal [Maybe SimilarSignatureHelp] -> + ExpectBroken 'Current [Maybe SimilarSignatureHelp] -> + TestTree +mkTestExpectFail name sourceCode _idealSignatureHelps = unCurrent >>> mkTest name sourceCode + +getSignatureHelpFromSession :: Text -> PosPrefixInfo -> IO (Maybe SimilarSignatureHelp) +getSignatureHelpFromSession sourceCode (PosPrefixInfo _ _ _ position) = + let fileName = "A.hs" + plugin = mkPluginTestDescriptor descriptor "signatureHelp" + virtualFileTree = mkVirtualFileTreeWithSingleFile fileName sourceCode + in runSessionWithServerInTmpDir def plugin virtualFileTree $ do + doc <- openDoc fileName "haskell" + (fmap . fmap) SimilarSignatureHelp (getSignatureHelp doc position) + +mkVirtualFileTreeWithSingleFile :: FilePath -> Text -> VirtualFileTree +mkVirtualFileTreeWithSingleFile fileName sourceCode = + let testDataDir = "/not-used-dir" + in mkVirtualFileTree + testDataDir + [ directCradle [T.pack fileName], + file fileName (text sourceCode) + ] + +newtype SimilarSignatureHelp = SimilarSignatureHelp SignatureHelp + deriving newtype (Show) + +-- custom Eq to ignore some details, such as added doc string +-- not symmetry +instance Eq SimilarSignatureHelp where + SimilarSignatureHelp + actualSignatureHelp@( SignatureHelp + actualSignatureInformations + actualActiveSignature + actualActiveParameter + ) + == SimilarSignatureHelp + expectedSignatureHelp@( SignatureHelp + expectedSignatureInformations + expectedActiveSignature + expectedActiveParameter + ) + | actualSignatureHelp == expectedSignatureHelp = True + | actualActiveSignature == expectedActiveSignature + && actualActiveParameter == expectedActiveParameter = + actualSignatureInformations ~= expectedSignatureInformations + | otherwise = False + +class IsSimilar a where + (~=) :: a -> a -> Bool + +instance IsSimilar SignatureInformation where + actualSignatureInformation@( SignatureInformation + actualLabel + actualDocumentation + actualParameters + actualActiveParameter + ) + ~= expectedSignatureInformation@( SignatureInformation + expectedLabel + expectedDocumentation + expectedParameters + expectedActiveParameter + ) + | actualSignatureInformation == expectedSignatureInformation = True + | actualLabel == expectedLabel && actualActiveParameter == expectedActiveParameter = + actualDocumentation ~= expectedDocumentation + && actualParameters ~= expectedParameters + | otherwise = False + +instance IsSimilar ParameterInformation where + actualParameterInformation@(ParameterInformation actualLabel actualDocumentation) + ~= expectedParameterInformation@(ParameterInformation expectedLabel expectedDocumentation) + | actualParameterInformation == expectedParameterInformation = True + | actualLabel == expectedLabel = actualDocumentation ~= expectedDocumentation + | otherwise = False + +instance IsSimilar MarkupContent where + actualMarkupContent@(MarkupContent actualKind actualText) + ~= expectedMarkupContent@(MarkupContent expectedKind expectedText) + | actualMarkupContent == expectedMarkupContent = True + | actualKind == expectedKind = actualText ~= expectedText + | otherwise = False + +instance IsSimilar Text where + actualText ~= expectedText = expectedText `T.isInfixOf` actualText + +instance (IsSimilar a) => IsSimilar [a] where + [] ~= [] = True + (x : xs) ~= (y : ys) = x ~= y && xs ~= ys + _ ~= _ = False + +instance (IsSimilar a) => IsSimilar (Maybe a) where + Nothing ~= Nothing = True + Just x ~= Just y = x ~= y + _ ~= _ = False + +instance (IsSimilar a, IsSimilar b) => IsSimilar (a |? b) where + InL x ~= InL y = x ~= y + InR x ~= InR y = x ~= y + _ ~= _ = False + +-- TODO use the one from lsp-test when we have https://github.com/haskell/lsp/pull/621 + +-- | Returns the signature help at the specified position. +getSignatureHelp :: TextDocumentIdentifier -> Position -> Session (Maybe SignatureHelp) +getSignatureHelp doc pos = + let params = SignatureHelpParams doc pos Nothing Nothing + in nullToMaybe . getResponseResult <$> request SMethod_TextDocumentSignatureHelp params + where + getResponseResult rsp = + case rsp ^. L.result of + Right x -> x + Left err -> throw $ UnexpectedResponseError (fromJust $ rsp ^. L.id) err diff --git a/src/HlsPlugins.hs b/src/HlsPlugins.hs index 4c135fc48b..51fc196fdb 100644 --- a/src/HlsPlugins.hs +++ b/src/HlsPlugins.hs @@ -53,6 +53,10 @@ import qualified Ide.Plugin.Hlint as Hlint import qualified Ide.Plugin.Stan as Stan #endif +#if hls_signatureHelp +import qualified Ide.Plugin.SignatureHelp as SignatureHelp +#endif + #if hls_moduleName import qualified Ide.Plugin.ModuleName as ModuleName #endif @@ -214,6 +218,9 @@ idePlugins recorder = pluginDescToIdePlugins allPlugins #if hls_stan let pId = "stan" in Stan.descriptor (pluginRecorder pId) pId : #endif +#if hls_signatureHelp + let pId = "signatureHelp" in SignatureHelp.descriptor (pluginRecorder pId) pId: +#endif #if hls_splice Splice.descriptor "splice" : #endif @@ -249,4 +256,3 @@ idePlugins recorder = pluginDescToIdePlugins allPlugins let pId = "notes" in Notes.descriptor (pluginRecorder pId) pId : #endif GhcIde.descriptors (pluginRecorder "ghcide") - diff --git a/test/testdata/schema/ghc910/default-config.golden.json b/test/testdata/schema/ghc910/default-config.golden.json index 3b4e687ef9..81b63dc6e4 100644 --- a/test/testdata/schema/ghc910/default-config.golden.json +++ b/test/testdata/schema/ghc910/default-config.golden.json @@ -150,6 +150,9 @@ }, "globalOn": false }, + "signatureHelp": { + "globalOn": true + }, "stan": { "globalOn": false } diff --git a/test/testdata/schema/ghc910/vscode-extension-schema.golden.json b/test/testdata/schema/ghc910/vscode-extension-schema.golden.json index 4ca08f296c..ba79ee22c7 100644 --- a/test/testdata/schema/ghc910/vscode-extension-schema.golden.json +++ b/test/testdata/schema/ghc910/vscode-extension-schema.golden.json @@ -1037,6 +1037,12 @@ "scope": "resource", "type": "boolean" }, + "haskell.plugin.signatureHelp.globalOn": { + "default": true, + "description": "Enables signatureHelp plugin", + "scope": "resource", + "type": "boolean" + }, "haskell.plugin.stan.globalOn": { "default": false, "description": "Enables stan plugin", diff --git a/test/testdata/schema/ghc912/default-config.golden.json b/test/testdata/schema/ghc912/default-config.golden.json index 0dfbd39df2..598e3a4f2e 100644 --- a/test/testdata/schema/ghc912/default-config.golden.json +++ b/test/testdata/schema/ghc912/default-config.golden.json @@ -149,6 +149,9 @@ "variableToken": "variable" }, "globalOn": false + }, + "signatureHelp": { + "globalOn": true } }, "sessionLoading": "singleComponent" diff --git a/test/testdata/schema/ghc912/vscode-extension-schema.golden.json b/test/testdata/schema/ghc912/vscode-extension-schema.golden.json index 77d398438e..68f1b4f800 100644 --- a/test/testdata/schema/ghc912/vscode-extension-schema.golden.json +++ b/test/testdata/schema/ghc912/vscode-extension-schema.golden.json @@ -1036,5 +1036,11 @@ "description": "Enables semanticTokens plugin", "scope": "resource", "type": "boolean" + }, + "haskell.plugin.signatureHelp.globalOn": { + "default": true, + "description": "Enables signatureHelp plugin", + "scope": "resource", + "type": "boolean" } } diff --git a/test/testdata/schema/ghc96/default-config.golden.json b/test/testdata/schema/ghc96/default-config.golden.json index 8467b451f1..efe24df3ae 100644 --- a/test/testdata/schema/ghc96/default-config.golden.json +++ b/test/testdata/schema/ghc96/default-config.golden.json @@ -153,6 +153,9 @@ }, "globalOn": false }, + "signatureHelp": { + "globalOn": true + }, "splice": { "globalOn": true }, diff --git a/test/testdata/schema/ghc96/vscode-extension-schema.golden.json b/test/testdata/schema/ghc96/vscode-extension-schema.golden.json index 1c0b19eb27..50ed005112 100644 --- a/test/testdata/schema/ghc96/vscode-extension-schema.golden.json +++ b/test/testdata/schema/ghc96/vscode-extension-schema.golden.json @@ -1043,6 +1043,12 @@ "scope": "resource", "type": "boolean" }, + "haskell.plugin.signatureHelp.globalOn": { + "default": true, + "description": "Enables signatureHelp plugin", + "scope": "resource", + "type": "boolean" + }, "haskell.plugin.splice.globalOn": { "default": true, "description": "Enables splice plugin", diff --git a/test/testdata/schema/ghc98/default-config.golden.json b/test/testdata/schema/ghc98/default-config.golden.json index 8467b451f1..efe24df3ae 100644 --- a/test/testdata/schema/ghc98/default-config.golden.json +++ b/test/testdata/schema/ghc98/default-config.golden.json @@ -153,6 +153,9 @@ }, "globalOn": false }, + "signatureHelp": { + "globalOn": true + }, "splice": { "globalOn": true }, diff --git a/test/testdata/schema/ghc98/vscode-extension-schema.golden.json b/test/testdata/schema/ghc98/vscode-extension-schema.golden.json index 1c0b19eb27..50ed005112 100644 --- a/test/testdata/schema/ghc98/vscode-extension-schema.golden.json +++ b/test/testdata/schema/ghc98/vscode-extension-schema.golden.json @@ -1043,6 +1043,12 @@ "scope": "resource", "type": "boolean" }, + "haskell.plugin.signatureHelp.globalOn": { + "default": true, + "description": "Enables signatureHelp plugin", + "scope": "resource", + "type": "boolean" + }, "haskell.plugin.splice.globalOn": { "default": true, "description": "Enables splice plugin", From 558f861a5202822c109ce786c64fab39dcac04e9 Mon Sep 17 00:00:00 2001 From: soulomoon Date: Sat, 6 Sep 2025 22:29:12 +0800 Subject: [PATCH 77/87] debounce empty diags --- ghcide/src/Development/IDE/Core/Shake.hs | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/ghcide/src/Development/IDE/Core/Shake.hs b/ghcide/src/Development/IDE/Core/Shake.hs index 94bf9f733c..b0870df27b 100644 --- a/ghcide/src/Development/IDE/Core/Shake.hs +++ b/ghcide/src/Development/IDE/Core/Shake.hs @@ -197,6 +197,7 @@ data Log | LogDelayedAction !(DelayedAction ()) !Seconds | LogBuildSessionFinish !Step !(Either SomeException [Either SomeException ()]) | LogDiagsDiffButNoLspEnv ![FileDiagnostic] + | LogDiagsPublishLog !Key ![FileDiagnostic] ![FileDiagnostic] | LogDefineEarlyCutoffRuleNoDiagHasDiag !FileDiagnostic | LogDefineEarlyCutoffRuleCustomNewnessHasDiag !FileDiagnostic | LogCancelledAction !T.Text @@ -210,6 +211,12 @@ data Log instance Pretty Log where pretty = \case + LogDiagsPublishLog key lastDiags diags -> + vcat + [ "Publishing diagnostics for" <+> pretty (show key) + , "Last published:" <+> pretty (showDiagnosticsColored lastDiags) <+> "diagnostics" + , "New:" <+> pretty (showDiagnosticsColored diags) <+> "diagnostics" + ] LogShakeText msg -> pretty msg LogCreateHieDbExportsMapStart -> "Initializing exports map from hiedb" @@ -1401,9 +1408,8 @@ updateFileDiagnostics recorder fp ver k ShakeExtras{diagnostics, hiddenDiagnosti newDiags <- liftIO $ atomicallyNamed "diagnostics - update" $ update (addTagUnsafe "shown ") currentShown diagnostics _ <- liftIO $ atomicallyNamed "diagnostics - hidden" $ update (addTagUnsafe "hidden ") currentHidden hiddenDiagnostics let uri' = filePathToUri' fp - -- let delay = if null newDiags then 0.1 else 0 - -- registerEvent debouncer delay uri' $ withTrace ("report diagnostics " <> fromString (fromNormalizedFilePath fp)) $ \tag -> do - withTrace ("report diagnostics " <> fromString (fromNormalizedFilePath fp)) $ \tag -> do + let delay = if null newDiags then 0.1 else 0 + registerEvent debouncer delay uri' $ withTrace ("report diagnostics " <> fromString (fromNormalizedFilePath fp)) $ \tag -> do join $ mask_ $ do lastPublish <- atomicallyNamed "diagnostics - publish" $ STM.focus (Focus.lookupWithDefault [] <* Focus.insert newDiags) uri' publishedDiagnostics let action = when (lastPublish /= newDiags) $ case lspEnv of @@ -1412,6 +1418,7 @@ updateFileDiagnostics recorder fp ver k ShakeExtras{diagnostics, hiddenDiagnosti Just env -> LSP.runLspT env $ do liftIO $ tag "count" (show $ Prelude.length newDiags) liftIO $ tag "key" (show k) + -- logWith recorder Debug $ LogDiagsPublishLog k lastPublish newDiags LSP.sendNotification SMethod_TextDocumentPublishDiagnostics $ LSP.PublishDiagnosticsParams (fromNormalizedUri uri') (fmap fromIntegral ver) (map fdLspDiagnostic newDiags) return action From 51c1ceba20e22cb73c02f0633e9d4cca7a4b2d48 Mon Sep 17 00:00:00 2001 From: soulomoon Date: Sat, 6 Sep 2025 23:15:56 +0800 Subject: [PATCH 78/87] refactor progress reporting: remove unused parameters and simplify function calls --- .../Development/IDE/Core/ProgressReporting.hs | 45 +++++++------------ ghcide/src/Development/IDE/Core/Shake.hs | 3 +- 2 files changed, 16 insertions(+), 32 deletions(-) diff --git a/ghcide/src/Development/IDE/Core/ProgressReporting.hs b/ghcide/src/Development/IDE/Core/ProgressReporting.hs index 20dfbe9e69..3d8a2bf989 100644 --- a/ghcide/src/Development/IDE/Core/ProgressReporting.hs +++ b/ghcide/src/Development/IDE/Core/ProgressReporting.hs @@ -39,11 +39,7 @@ import Language.LSP.Server (ProgressAmount (..), ProgressCancellable (..), withProgress) import qualified Language.LSP.Server as LSP -import qualified ListT as L import qualified StmContainers.Map as STM -import qualified StmContainers.Set as S -import qualified StmContainers.Set as Set -import StmContainers.Set import UnliftIO (Async, async, bracket, cancel) data ProgressEvent @@ -128,25 +124,24 @@ updateState _ StopProgress st = pure st data InProgressState = InProgressState { -- | Number of files to do - todoVar :: TVar Int, + todoVar :: TVar Int, -- | Number of files done - doneVar :: TVar Int, - currentVar :: STM.Map NormalizedFilePath Int, - workingFileVar :: S.Set NormalizedFilePath + doneVar :: TVar Int, + currentVar :: STM.Map NormalizedFilePath Int } newInProgress :: IO InProgressState -newInProgress = InProgressState <$> newTVarIO 0 <*> newTVarIO 0 <*> STM.newIO <*> newIO +newInProgress = InProgressState <$> newTVarIO 0 <*> newTVarIO 0 <*> STM.newIO recordProgress :: InProgressState -> NormalizedFilePath -> (Int -> Int) -> IO () recordProgress InProgressState {..} file shift = do (prev, new) <- atomicallyNamed "recordProgress" $ STM.focus alterPrevAndNew file currentVar atomicallyNamed "recordProgress2" $ case (prev, new) of (Nothing, 0) -> modifyTVar' doneVar (+ 1) >> modifyTVar' todoVar (+ 1) - (Nothing, _) -> modifyTVar' todoVar (+ 1) >> S.insert file workingFileVar + (Nothing, _) -> modifyTVar' todoVar (+ 1) (Just 0, 0) -> pure () (Just 0, _) -> modifyTVar' doneVar pred - (Just _, 0) -> modifyTVar' doneVar (+ 1) >> S.delete file workingFileVar + (Just _, 0) -> modifyTVar' doneVar (+ 1) (Just _, _) -> pure () where alterPrevAndNew = do @@ -163,18 +158,16 @@ recordProgress InProgressState {..} file shift = do progressReportingNoTrace :: STM Int -> STM Int -> - STM (Maybe T.Text)-> Maybe (LSP.LanguageContextEnv c) -> T.Text -> ProgressReportingStyle -> IO ProgressReporting -progressReportingNoTrace _ _ _ Nothing _title _optProgressStyle = return noProgressReporting -progressReportingNoTrace todo done mf (Just lspEnv) title optProgressStyle = do +progressReportingNoTrace _ _ Nothing _title _optProgressStyle = return noProgressReporting +progressReportingNoTrace todo done (Just lspEnv) title optProgressStyle = do progressState <- newVar NotStarted - let _progressUpdate event = do - liftIO $ updateStateVar $ Event event + let _progressUpdate event = liftIO $ updateStateVar $ Event event _progressStop = updateStateVar StopProgress - updateStateVar = modifyVar_ progressState . updateState (progressCounter lspEnv title optProgressStyle todo done mf) + updateStateVar = modifyVar_ progressState . updateState (progressCounter lspEnv title optProgressStyle todo done) return ProgressReporting {..} -- | `progressReporting` initiates a new progress reporting session. @@ -189,18 +182,12 @@ progressReporting Nothing _title _optProgressStyle = noPerFileProgressReporting progressReporting (Just lspEnv) title optProgressStyle = do inProgressState <- newInProgress progressReportingInner <- progressReportingNoTrace (readTVar $ todoVar inProgressState) - (readTVar $ doneVar inProgressState) (getFile $ workingFileVar inProgressState) (Just lspEnv) title optProgressStyle + (readTVar $ doneVar inProgressState) (Just lspEnv) title optProgressStyle let inProgress :: NormalizedFilePath -> IO a -> IO a inProgress = updateStateForFile inProgressState return PerFileProgressReporting {..} where - getFile :: Set.Set NormalizedFilePath -> STM (Maybe T.Text) - getFile set = do - let lst = S.listT set - x <- L.head lst - return (T.pack . fromNormalizedFilePath <$> x) - updateStateForFile inProgress file = UnliftIO.bracket (liftIO $ f succ) (const $ liftIO $ f pred) . const where -- This functions are deliberately eta-expanded to avoid space leaks. @@ -216,25 +203,23 @@ progressCounter :: ProgressReportingStyle -> STM Int -> STM Int -> - STM (Maybe T.Text)-> IO () -progressCounter lspEnv title optProgressStyle getTodo getDone mf = +progressCounter lspEnv title optProgressStyle getTodo getDone = LSP.runLspT lspEnv $ withProgress title Nothing NotCancellable $ \update -> loop update 0 where loop _ _ | optProgressStyle == NoProgress = forever $ liftIO $ threadDelay maxBound loop update prevPct = do - (todo, done, nextPct,file) <- liftIO $ atomically $ do + (todo, done, nextPct) <- liftIO $ atomically $ do todo <- getTodo done <- getDone - file <- mf let nextFrac :: Double nextFrac = if todo == 0 then 0 else fromIntegral done / fromIntegral todo nextPct :: UInt nextPct = floor $ 100 * nextFrac when (nextPct == prevPct) retry - pure (todo, done, nextPct, file) + pure (todo, done, nextPct) - _ <- update (ProgressAmount (Just nextPct) (Just $ (T.pack $ show done) <> "/" <> (T.pack $ show todo) <> maybe mempty (":" <>) file)) + _ <- update (ProgressAmount (Just nextPct) (Just $ T.pack $ show done <> "/" <> show todo)) loop update nextPct mRunLspT :: (Applicative m) => Maybe (LSP.LanguageContextEnv c) -> LSP.LspT c m () -> m () diff --git a/ghcide/src/Development/IDE/Core/Shake.hs b/ghcide/src/Development/IDE/Core/Shake.hs index b0870df27b..4c78655697 100644 --- a/ghcide/src/Development/IDE/Core/Shake.hs +++ b/ghcide/src/Development/IDE/Core/Shake.hs @@ -713,8 +713,7 @@ shakeOpen recorder lspEnv defaultConfig idePlugins debouncer semanticTokensId <- newTVarIO 0 indexProgressReporting <- progressReportingNoTrace (liftM2 (+) (length <$> readTVar indexPending) (readTVar indexCompleted) ) - (readTVar indexCompleted) (pure $ Nothing) - lspEnv "Indexing" optProgressStyle + (readTVar indexCompleted) lspEnv "Indexing" optProgressStyle let hiedbWriter = HieDbWriter{..} exportsMap <- newTVarIO mempty -- lazily initialize the exports map with the contents of the hiedb From 1ed5fcb474fe9c5b4767c225c1a1c2397397d052 Mon Sep 17 00:00:00 2001 From: soulomoon Date: Sun, 7 Sep 2025 04:25:05 +0800 Subject: [PATCH 79/87] fix old actions runs because we did not increment the step before releasing the serialized queue --- ghcide/src/Development/IDE/Core/Shake.hs | 85 +++++++++++++------ .../src/Development/IDE/Graph/Database.hs | 33 +++++-- .../Development/IDE/Graph/Internal/Action.hs | 24 ++++-- .../IDE/Graph/Internal/Database.hs | 2 +- .../Development/IDE/Graph/Internal/Types.hs | 36 ++++++-- hls-graph/src/Development/IDE/WorkerThread.hs | 43 +++++----- hls-test-utils/src/Test/Hls.hs | 5 +- 7 files changed, 156 insertions(+), 72 deletions(-) diff --git a/ghcide/src/Development/IDE/Core/Shake.hs b/ghcide/src/Development/IDE/Core/Shake.hs index 4c78655697..98d446f06d 100644 --- a/ghcide/src/Development/IDE/Core/Shake.hs +++ b/ghcide/src/Development/IDE/Core/Shake.hs @@ -129,9 +129,9 @@ import Development.IDE.Types.Options as Options import qualified Language.LSP.Protocol.Message as LSP import qualified Language.LSP.Server as LSP -import Control.Exception (Exception (fromException)) -import Data.Either (isLeft, isRight, - lefts) +import Data.Either (isRight, lefts) +import Data.Int (Int64) +import Data.IORef.Extra (atomicModifyIORef'_) import Development.IDE.Core.Tracing import Development.IDE.GHC.Compat (NameCache, NameCacheUpdater, @@ -142,13 +142,15 @@ import Development.IDE.Graph hiding (ShakeValue, action) import qualified Development.IDE.Graph as Shake import Development.IDE.Graph.Database (ShakeDatabase, + shakeGetActionQueueLength, shakeGetBuildStep, shakeGetDatabaseKeys, shakeNewDatabase, shakeProfileDatabase, shakeRunDatabaseForKeys, + shakeRunDatabaseForKeysSep, shakeShutDatabase) -import Development.IDE.Graph.Internal.Action (runActionInDb) +import Development.IDE.Graph.Internal.Action (runActionInDbCb) import Development.IDE.Graph.Internal.Database (AsyncParentKill (AsyncParentKill)) import Development.IDE.Graph.Internal.Types (Step (..), getShakeStep) @@ -184,15 +186,15 @@ import qualified StmContainers.Map as STM import System.FilePath hiding (makeRelative) import System.IO.Unsafe (unsafePerformIO) import System.Time.Extra -import UnliftIO (MonadUnliftIO (withRunInIO)) -import qualified UnliftIO.Exception as UE +import UnliftIO (MonadUnliftIO (withRunInIO), + newIORef, readIORef) data Log = LogCreateHieDbExportsMapStart | LogCreateHieDbExportsMapFinish !Int - | LogBuildSessionRestart !String ![DelayedActionInternal] !KeySet !Seconds !(Maybe FilePath) + | LogBuildSessionRestart !String ![DelayedActionInternal] !KeySet !Seconds !(Maybe FilePath) !Int | LogBuildSessionRestartTakingTooLong !Seconds | LogDelayedAction !(DelayedAction ()) !Seconds | LogBuildSessionFinish !Step !(Either SomeException [Either SomeException ()]) @@ -207,10 +209,13 @@ data Log -- * OfInterest Log messages | LogSetFilesOfInterest ![(NormalizedFilePath, FileOfInterestStatus)] | LogShakeText !T.Text + | LogMonitering !T.Text !Int64 deriving Show instance Pretty Log where pretty = \case + LogMonitering name value -> + "Monitoring:" <+> pretty name <+> "value:" <+> pretty value LogDiagsPublishLog key lastDiags diags -> vcat [ "Publishing diagnostics for" <+> pretty (show key) @@ -222,11 +227,12 @@ instance Pretty Log where "Initializing exports map from hiedb" LogCreateHieDbExportsMapFinish exportsMapSize -> "Done initializing exports map from hiedb. Size:" <+> pretty exportsMapSize - LogBuildSessionRestart reason actionQueue keyBackLog abortDuration shakeProfilePath -> + LogBuildSessionRestart reason actionQueue keyBackLog abortDuration shakeProfilePath step -> vcat [ "Restarting build session due to" <+> pretty reason , "Action Queue:" <+> pretty (map actionName actionQueue) , "Keys:" <+> pretty (map show $ toListKeySet keyBackLog) + , "Current step:" <+> pretty (show step) , "Aborting previous build session took" <+> pretty (showDuration abortDuration) <+> pretty shakeProfilePath ] LogBuildSessionRestartTakingTooLong seconds -> "Build restart is taking too long (" <> pretty seconds <> " seconds)" @@ -690,7 +696,7 @@ shakeOpen :: Recorder (WithPriority Log) shakeOpen recorder lspEnv defaultConfig idePlugins debouncer shakeProfileDir (IdeReportProgress reportProgress) ideTesting - withHieDb threadQueue opts monitoring rules rootDir = mdo + withHieDb threadQueue opts argMonitoring rules rootDir = mdo -- see Note [Serializing runs in separate thread] let indexQueue = tIndexQueue threadQueue restartQueue = tRestartQueue threadQueue @@ -717,12 +723,12 @@ shakeOpen recorder lspEnv defaultConfig idePlugins debouncer let hiedbWriter = HieDbWriter{..} exportsMap <- newTVarIO mempty -- lazily initialize the exports map with the contents of the hiedb - -- TODO: exceptions can be swallowed here? - _ <- async $ do + async <- async $ do logWith recorder Debug LogCreateHieDbExportsMapStart em <- createExportsMapHieDb withHieDb atomically $ modifyTVar' exportsMap (<> em) logWith recorder Debug $ LogCreateHieDbExportsMapFinish (ExportsMap.size em) + link async progress <- if reportProgress @@ -750,6 +756,9 @@ shakeOpen recorder lspEnv defaultConfig idePlugins debouncer checkParents <- optCheckParents + + logMonitoring <- newLogMonitoring recorder + let monitoring = logMonitoring <> argMonitoring -- monitoring let readValuesCounter = fromIntegral . countRelevantKeys checkParents <$> getStateKeys shakeExtras readDirtyKeys = fromIntegral . countRelevantKeys checkParents . toListKeySet <$> readTVarIO(dirtyKeys shakeExtras) @@ -757,6 +766,7 @@ shakeOpen recorder lspEnv defaultConfig idePlugins debouncer readExportsMap = fromIntegral . ExportsMap.exportsMapSize <$> readTVarIO (exportsMap shakeExtras) readDatabaseCount = fromIntegral . countRelevantKeys checkParents . map fst <$> shakeGetDatabaseKeys shakeDb readDatabaseStep = fromIntegral <$> shakeGetBuildStep shakeDb + readDatabaseActionQueueCount = fromIntegral <$> shakeGetActionQueueLength shakeDb registerGauge monitoring "ghcide.values_count" readValuesCounter registerGauge monitoring "ghcide.dirty_keys_count" readDirtyKeys @@ -764,12 +774,28 @@ shakeOpen recorder lspEnv defaultConfig idePlugins debouncer registerGauge monitoring "ghcide.exports_map_count" readExportsMap registerGauge monitoring "ghcide.database_count" readDatabaseCount registerCounter monitoring "ghcide.num_builds" readDatabaseStep + registerCounter monitoring "ghcide.database_action_queue_count" readDatabaseActionQueueCount stopMonitoring <- start monitoring let ideState = IdeState{..} return ideState - +newLogMonitoring :: MonadIO m => Recorder (WithPriority Log) -> m Monitoring +newLogMonitoring logger = do + actions <- newIORef [] + let registerCounter name readA = do + let update = do + val <- readA + logWith logger Info $ LogMonitering name (fromIntegral val) + atomicModifyIORef'_ actions (update :) + registerGauge = registerCounter + let start = do + a <- regularly 10 $ sequence_ =<< readIORef actions + return (cancel a) + return Monitoring{..} + where + regularly :: Seconds -> IO () -> IO (Async ()) + regularly delay act = async $ forever (act >> sleep delay) getStateKeys :: ShakeExtras -> IO [Key] getStateKeys = (fmap.fmap) fst . atomically . ListT.toList . STM.listT . stateValues @@ -837,7 +863,8 @@ shakeRestart recorder IdeState{..} vfs reason acts ioActionBetweenShakeSession = res <- shakeDatabaseProfile shakeDb backlog <- readTVarIO $ dirtyKeys shakeExtras -- this log is required by tests - logWith recorder Debug $ LogBuildSessionRestart reason queue backlog stopTime res + step <- shakeGetBuildStep shakeDb + logWith recorder Debug $ LogBuildSessionRestart reason queue backlog stopTime res step ) -- It is crucial to be masked here, otherwise we can get killed -- between spawning the new thread and updating shakeSession. @@ -859,12 +886,13 @@ shakeEnqueue :: ShakeExtras -> DelayedAction a -> IO (IO a) shakeEnqueue ShakeExtras{actionQueue, shakeRecorder} act = do (b, dai) <- instantiateDelayedAction act atomicallyNamed "actionQueue - push" $ pushQueue dai actionQueue + logWith shakeRecorder Debug $ LogShakeText (T.pack $ "Enqueued action: " <> actionName act) let wait' barrier = waitBarrier barrier `catches` [ Handler(\BlockedIndefinitelyOnMVar -> fail $ "internal bug: forever blocked on MVar for " <> actionName act) - , Handler (\e@AsyncCancelled -> do + , Handler (\e@(SomeAsyncException _) -> do logWith shakeRecorder Debug $ LogCancelledAction (T.pack $ actionName act) atomicallyNamed "actionQueue - abort" $ abortQueue dai actionQueue @@ -892,6 +920,7 @@ newSession recorder extras@ShakeExtras{..} vfsMod shakeDb acts reason = do VFSModified vfs -> atomically $ writeTVar vfsVar vfs IdeOptions{optRunSubset} <- getIdeOptionsIO extras + reenqueued <- atomicallyNamed "actionQueue - peek" $ peekInProgress actionQueue step <- getShakeStep shakeDb allPendingKeys <- @@ -907,13 +936,14 @@ newSession recorder extras@ShakeExtras{..} vfsMod shakeDb acts reason = do Left e -> logWith recorder Error $ LogShakeText (T.pack $ label ++ " failed: " ++ show e) Right r -> logWith recorder Debug $ LogShakeText (T.pack $ label ++ " finished: " ++ show r) pumpActionThread = do - d <- liftIO $ atomicallyNamed "action queue - pop" $ popQueue actionQueue - r <- runActionInDb [run d] - liftIO $ logResult "pumpActionThread" r + logWith recorder Debug $ LogShakeText (T.pack $ "Starting action" ++ "(step: " <> show step) + d <- runActionInDbCb actionName run (popQueue actionQueue) (logResult "pumpActionThread" . return) + step <- getShakeStep shakeDb + logWith recorder Debug $ LogShakeText (T.pack $ "started action" ++ "(step: " <> show step <> "): " <> actionName d) pumpActionThread -- TODO figure out how to thread the otSpan into defineEarlyCutoff - run d = do + run d = do start <- liftIO offsetTime getAction d liftIO $ atomicallyNamed "actionQueue - done" $ doneQueue d actionQueue @@ -921,23 +951,24 @@ newSession recorder extras@ShakeExtras{..} vfsMod shakeDb acts reason = do logWith recorder (actionPriority d) $ LogDelayedAction d runTime -- The inferred type signature doesn't work in ghc >= 9.0.1 - workRun :: (forall b. IO b -> IO b) -> IO () - workRun restore = withSpan "Shake session" $ \otSpan -> do + -- workRun :: (forall b. IO b -> IO b) -> IO () + workRun start restore = withSpan "Shake session" $ \otSpan -> do setTag otSpan "reason" (fromString reason) setTag otSpan "queue" (fromString $ unlines $ map actionName reenqueued) whenJust allPendingKeys $ \kk -> setTag otSpan "keys" (BS8.pack $ unlines $ map show $ toListKeySet kk) - let keysActs = pumpActionThread : map run (reenqueued ++ acts) - res <- try @SomeException $ - restore $ shakeRunDatabaseForKeys (toListKeySet <$> allPendingKeys) shakeDb keysActs - logWith recorder Debug $ LogBuildSessionFinish step $ res + res <- try @SomeException $ restore start + logWith recorder Debug $ LogBuildSessionFinish step res + + let keysActs = pumpActionThread : map run (reenqueued ++ acts) + -- first we increase the step, so any actions started from here on + start <- shakeRunDatabaseForKeysSep (toListKeySet <$> allPendingKeys) shakeDb keysActs -- Do the work in a background thread parentTid <- myThreadId workThread <- asyncWithUnmask $ \x -> do childThreadId <- myThreadId logWith recorder Info $ LogShakeText ("Starting shake thread: " <> T.pack (show childThreadId) <> " (parent: " <> T.pack (show parentTid) <> ")") - workRun x - + workRun start x -- Cancelling is required to flush the Shake database when either -- the filesystem or the Ghc configuration have changed @@ -949,7 +980,7 @@ newSession recorder extras@ShakeExtras{..} vfsMod shakeDb acts reason = do cancelWith workThread $ AsyncParentKill tid step shakeShutDatabase shakeDb - + -- should wait until the step has increased pure (ShakeSession{..}) instantiateDelayedAction diff --git a/hls-graph/src/Development/IDE/Graph/Database.hs b/hls-graph/src/Development/IDE/Graph/Database.hs index c7b4e6a6be..0b072974cb 100644 --- a/hls-graph/src/Development/IDE/Graph/Database.hs +++ b/hls-graph/src/Development/IDE/Graph/Database.hs @@ -4,15 +4,19 @@ module Development.IDE.Graph.Database( shakeNewDatabase, shakeRunDatabase, shakeRunDatabaseForKeys, + shakeRunDatabaseForKeysSep, shakeProfileDatabase, shakeGetBuildStep, shakeGetDatabaseKeys, shakeGetDirtySet, shakeGetCleanKeys ,shakeGetBuildEdges, - shakeShutDatabase) where -import Control.Concurrent.STM.Stats (readTVarIO) + shakeShutDatabase, + shakeGetActionQueueLength) where +import Control.Concurrent.STM.Stats (atomically, + readTVarIO) import Control.Exception (SomeException) +import Control.Monad (join) import Data.Dynamic import Data.Maybe import Development.IDE.Graph.Classes () @@ -40,7 +44,7 @@ shakeNewDatabase que opts rules = do pure $ ShakeDatabase (length actions) actions db shakeRunDatabase :: ShakeDatabase -> [Action a] -> IO [Either SomeException a] -shakeRunDatabase = shakeRunDatabaseForKeys Nothing +shakeRunDatabase s xs = shakeRunDatabaseForKeys Nothing s xs -- | Returns the set of dirty keys annotated with their age (in # of builds) shakeGetDirtySet :: ShakeDatabase -> IO [(Key, Int)] @@ -58,15 +62,26 @@ unvoid :: Functor m => m () -> m a unvoid = fmap undefined -- | Assumes that the database is not running a build -shakeRunDatabaseForKeys +-- The nested IO is to +-- seperate incrementing the step from running the build +shakeRunDatabaseForKeysSep :: Maybe [Key] -- ^ Set of keys changed since last run. 'Nothing' means everything has changed -> ShakeDatabase -> [Action a] - -> IO [Either SomeException a] -shakeRunDatabaseForKeys keysChanged (ShakeDatabase lenAs1 as1 db) as2 = do + -> IO (IO [Either SomeException a]) +shakeRunDatabaseForKeysSep keysChanged (ShakeDatabase lenAs1 as1 db) as2 = do incDatabase db keysChanged - drop lenAs1 <$> runActions db (map unvoid as1 ++ as2) + return $ drop lenAs1 <$> runActions db (map unvoid as1 ++ as2) + +shakeRunDatabaseForKeys + :: Maybe [Key] + -- ^ Set of keys changed since last run. 'Nothing' means everything has changed + -> ShakeDatabase + -> [Action a] + -> (IO [Either SomeException a]) +shakeRunDatabaseForKeys keysChanged sdb as2 = join $ shakeRunDatabaseForKeysSep keysChanged sdb as2 + -- | Given a 'ShakeDatabase', write an HTML profile to the given file about the latest run. @@ -90,3 +105,7 @@ shakeGetBuildEdges (ShakeDatabase _ _ db) = do -- annotated with how long ago (in # builds) they were visited shakeGetDatabaseKeys :: ShakeDatabase -> IO [(Key, Int)] shakeGetDatabaseKeys (ShakeDatabase _ _ db) = getKeysAndVisitAge db + +shakeGetActionQueueLength :: ShakeDatabase -> IO Int +shakeGetActionQueueLength (ShakeDatabase _ _ db) = + atomically $ databaseGetActionQueueLength db diff --git a/hls-graph/src/Development/IDE/Graph/Internal/Action.hs b/hls-graph/src/Development/IDE/Graph/Internal/Action.hs index ce1e7e432d..8624c490e8 100644 --- a/hls-graph/src/Development/IDE/Graph/Internal/Action.hs +++ b/hls-graph/src/Development/IDE/Graph/Internal/Action.hs @@ -13,10 +13,11 @@ module Development.IDE.Graph.Internal.Action , runActions , Development.IDE.Graph.Internal.Action.getDirtySet , getKeysAndVisitedAge -, runActionInDb +, runActionInDbCb ) where import Control.Concurrent.Async +import Control.Concurrent.STM.Stats (atomicallyNamed) import Control.DeepSeq (force) import Control.Exception import Control.Monad.IO.Class @@ -31,7 +32,7 @@ import Development.IDE.Graph.Internal.Key import Development.IDE.Graph.Internal.Rules (RuleResult) import Development.IDE.Graph.Internal.Types import System.Exit -import UnliftIO (atomically, +import UnliftIO (STM, atomically, newEmptyTMVarIO, putTMVar, readTMVar) @@ -48,23 +49,32 @@ parallel [] = return [] parallel xs = do a <- Action ask deps <- liftIO $ readIORef $ actionDeps a - case deps of UnknownDeps -> -- if we are already in the rerun mode, nothing we do is going to impact our state - runActionInDb xs + runActionInDb "parallel" xs deps -> error $ "parallel not supported when we have precise dependencies: " ++ show deps -- (newDeps, res) <- liftIO $ unzip <$> runActionInDb usingState xs -- liftIO $ writeIORef (actionDeps a) $ mconcat $ deps : newDeps -- return () -runActionInDb :: [Action a] -> Action [Either SomeException a] -runActionInDb acts = do +-- non-blocking version of runActionInDb +runActionInDbCb :: (a -> String) -> (a -> Action result) -> STM a -> (Either SomeException result -> IO ()) -> Action a +runActionInDbCb getTitle work getAct handler = do + a <- Action ask + liftIO $ atomicallyNamed "action queue - pop" $ do + act <- getAct + runInDataBase (getTitle act) (actionDatabase a) [(ignoreState a $ work act, handler)] + return act + +runActionInDb :: String -> [Action a] -> Action [Either SomeException a] +runActionInDb title acts = do a <- Action ask xs <- mapM (\x -> do barrier <- newEmptyTMVarIO return (x, barrier)) acts - liftIO $ atomically $ runInDataBase (actionDatabase a) (map (\(x, b) -> (ignoreState a x >>= (atomically . putTMVar b . Right), atomically . putTMVar b . Left)) xs) + liftIO $ atomically $ runInDataBase title (actionDatabase a) + (map (\(x, b) -> (ignoreState a x, atomically . putTMVar b)) xs) results <- liftIO $ mapM (atomically . readTMVar) $ fmap snd xs return results diff --git a/hls-graph/src/Development/IDE/Graph/Internal/Database.hs b/hls-graph/src/Development/IDE/Graph/Internal/Database.hs index 83ded5168d..853be75d5f 100644 --- a/hls-graph/src/Development/IDE/Graph/Internal/Database.hs +++ b/hls-graph/src/Development/IDE/Graph/Internal/Database.hs @@ -123,7 +123,7 @@ builderOne db@Database {..} stack id = do Dirty s -> do SMap.focus (updateStatus $ Running current s) id databaseValues traceEvent ("Starting build of key: " ++ show id ++ ", step "++ show current) - $ runOneInDataBase db (refresh db stack id s) $ \e -> atomically $ SMap.focus (updateStatus $ Exception current e s) id databaseValues + $ runOneInDataBase (show id) db (refresh db stack id s) $ \e -> atomically $ SMap.focus (updateStatus $ Exception current e s) id databaseValues return Nothing Clean r -> return $ Just r -- force here might contains async exceptions from previous runs diff --git a/hls-graph/src/Development/IDE/Graph/Internal/Types.hs b/hls-graph/src/Development/IDE/Graph/Internal/Types.hs index 08b911e765..fae69da565 100644 --- a/hls-graph/src/Development/IDE/Graph/Internal/Types.hs +++ b/hls-graph/src/Development/IDE/Graph/Internal/Types.hs @@ -1,6 +1,7 @@ {-# LANGUAGE CPP #-} {-# LANGUAGE DeriveAnyClass #-} {-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE LambdaCase #-} {-# LANGUAGE RecordWildCards #-} module Development.IDE.Graph.Internal.Types where @@ -23,8 +24,9 @@ import Data.Typeable import Debug.Trace (traceEventIO, traceM) import Development.IDE.Graph.Classes import Development.IDE.Graph.Internal.Key -import Development.IDE.WorkerThread (TaskQueue, - awaitRunInThreadStmInNewThreads) +import Development.IDE.WorkerThread (DeliverStatus (..), + TaskQueue, counTaskQueue, + runInThreadStmInNewThreads) import GHC.Conc (TVar, atomically) import GHC.Generics (Generic) import qualified ListT @@ -131,15 +133,31 @@ data Database = Database { databaseValues :: !(Map Key KeyDetails) } -runInDataBase :: Database -> [(IO result, SomeException -> IO ())] -> STM () -runInDataBase db acts = do - s <- getDataBaseStepInt db - awaitRunInThreadStmInNewThreads (getDataBaseStepInt db) s (databaseQueue db) (databaseThreads db) acts -runOneInDataBase :: Database -> IO result -> (SomeException -> IO ()) -> STM () -runOneInDataBase db act handler = do +databaseGetActionQueueLength :: Database -> STM Int +databaseGetActionQueueLength db = do + counTaskQueue (databaseQueue db) + +runInDataBase :: String -> Database -> [(IO result, Either SomeException result -> IO ())] -> STM () +runInDataBase title db acts = do s <- getDataBaseStepInt db - awaitRunInThreadStmInNewThreads (getDataBaseStepInt db) s (databaseQueue db) (databaseThreads db) [(act, handler)] + runInThreadStmInNewThreads (getDataBaseStepInt db) (DeliverStatus s title) (databaseQueue db) (databaseThreads db) acts + +runOneInDataBase :: String -> Database -> IO result -> (SomeException -> IO ()) -> STM () +runOneInDataBase title db act handler = do + s <- getDataBaseStepInt db + runInThreadStmInNewThreads + (getDataBaseStepInt db) + (DeliverStatus s title) + (databaseQueue db) + (databaseThreads db) + [ ( act, + \case + Left e -> handler e + Right _ -> return () + ) + ] + getDataBaseStepInt :: Database -> STM Int getDataBaseStepInt db = do diff --git a/hls-graph/src/Development/IDE/WorkerThread.hs b/hls-graph/src/Development/IDE/WorkerThread.hs index 2f496f6cff..344971483f 100644 --- a/hls-graph/src/Development/IDE/WorkerThread.hs +++ b/hls-graph/src/Development/IDE/WorkerThread.hs @@ -11,15 +11,15 @@ see Note [Serializing runs in separate thread] module Development.IDE.WorkerThread ( LogWorkerThread (..), + DeliverStatus(..), withWorkerQueue, awaitRunInThread, TaskQueue, writeTaskQueue, withWorkerQueueSimple, - awaitRunInThreadStm, - awaitRunInThreadStmInNewThread, - awaitRunInThreadStmInNewThreads, - isEmptyTaskQueue + runInThreadStmInNewThreads, + isEmptyTaskQueue, + counTaskQueue ) where import Control.Concurrent.Async (Async, async, withAsync) @@ -33,6 +33,7 @@ import qualified Data.Text as T import Control.Concurrent import Control.Exception (catch) import Control.Monad (void, when) +import Debug.Trace (traceM) import Prettyprinter data LogWorkerThread @@ -117,28 +118,22 @@ withWorkerQueue log title workerAction = ContT $ \mainAction -> do -- | 'awaitRunInThread' queues up an 'IO' action to be run by a worker thread, -- and then blocks until the result is computed. If the action throws an -- non-async exception, it is rethrown in the calling thread. -awaitRunInThreadStm :: TaskQueue (IO ()) -> IO result -> STM result -awaitRunInThreadStm (TaskQueue q) act = do - barrier <- newEmptyTMVar - -- Take an action from TQueue, run it and - -- use barrier to wait for the result - writeTQueue q (try act >>= atomically . putTMVar barrier) - resultOrException <- takeTMVar barrier - case resultOrException of - Left e -> throw (e :: SomeException) - Right r -> return r -awaitRunInThreadStmInNewThread :: STM Int -> Int -> TaskQueue (IO ()) -> TVar [Async ()] -> IO result -> (SomeException -> IO ()) -> STM () -awaitRunInThreadStmInNewThread getStep deliverStep q tthreads act handler = awaitRunInThreadStmInNewThreads getStep deliverStep q tthreads [(act, handler)] +data DeliverStatus = DeliverStatus + { deliverStep :: Int + , deliverName :: String + } deriving (Show) -awaitRunInThreadStmInNewThreads :: STM Int -> Int -> TaskQueue (IO ()) -> TVar [Async ()] -> [(IO result, SomeException -> IO ())] -> STM () -awaitRunInThreadStmInNewThreads getStep deliverStep (TaskQueue q) tthreads acts = do +runInThreadStmInNewThreads :: STM Int -> DeliverStatus -> TaskQueue (IO ()) -> TVar [Async ()] -> [(IO result, Either SomeException result -> IO ())] -> STM () +runInThreadStmInNewThreads getStep deliver (TaskQueue q) tthreads acts = do -- Take an action from TQueue, run it and -- use barrier to wait for the result writeTQueue q (uninterruptibleMask $ \restore -> do curStep <- atomically getStep - when (curStep == deliverStep) $ do - syncs <- mapM (\(act, handler) -> async (restore (void act `catch` \(SomeException e) -> handler (SomeException e)))) acts + traceM ("runInThreadStmInNewThreads: current step: " ++ show curStep ++ " deliver step: " ++ show deliver) + when (curStep == deliverStep deliver) $ do + syncs <- mapM (\(act, handler) -> + async (handler =<< (restore $ Right <$> act) `catch` \e@(SomeException _) -> return (Left e))) acts atomically $ modifyTVar' tthreads (syncs++) ) @@ -162,3 +157,11 @@ tryReadTaskQueue (TaskQueue q) = tryReadTQueue q isEmptyTaskQueue :: TaskQueue a -> STM Bool isEmptyTaskQueue (TaskQueue q) = isEmptyTQueue q +-- look and count the number of items in the queue +-- do not remove them +counTaskQueue :: TaskQueue a -> STM Int +counTaskQueue (TaskQueue q) = do + xs <- flushTQueue q + mapM_ (unGetTQueue q) (reverse xs) + return $ length xs + diff --git a/hls-test-utils/src/Test/Hls.hs b/hls-test-utils/src/Test/Hls.hs index 0ab203fe36..3ac4413860 100644 --- a/hls-test-utils/src/Test/Hls.hs +++ b/hls-test-utils/src/Test/Hls.hs @@ -770,7 +770,10 @@ runSessionWithTestConfig TestConfig{..} session = let plugins = testPluginDescriptor recorder <> lspRecorderPlugin timeoutOverride <- fmap read <$> lookupEnv "LSP_TIMEOUT" - let sconf' = testConfigSession { lspConfig = hlsConfigToClientConfig testLspConfig, messageTimeout = fromMaybe (messageTimeout defaultConfig) timeoutOverride} + let sconf' = testConfigSession { lspConfig = hlsConfigToClientConfig testLspConfig + , messageTimeout = fromMaybe (messageTimeout defaultConfig) timeoutOverride + , logStdErr = True + } arguments = testingArgs serverRoot recorderIde plugins server <- async $ IDEMain.defaultMain (cmapWithPrio LogIDEMain recorderIde) From df6a8f4b35d628fed1af48627dc249260f8fd6e0 Mon Sep 17 00:00:00 2001 From: soulomoon Date: Sun, 7 Sep 2025 05:05:11 +0800 Subject: [PATCH 80/87] fix build --- hls-graph/hls-graph.cabal | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/hls-graph/hls-graph.cabal b/hls-graph/hls-graph.cabal index d1bbb61c31..b1553580d3 100644 --- a/hls-graph/hls-graph.cabal +++ b/hls-graph/hls-graph.cabal @@ -133,7 +133,7 @@ test-suite tests -threaded -rtsopts -with-rtsopts=-N -fno-ignore-asserts build-depends: - , transformers ^>=0.6.1.2 + , transformers , base , extra , hls-graph @@ -143,6 +143,5 @@ test-suite tests , tasty , tasty-hspec >= 1.2 , tasty-rerun - , transformers build-tool-depends: hspec-discover:hspec-discover From 80733e7131a0c905157c5ffa4673d22e6067cf19 Mon Sep 17 00:00:00 2001 From: soulomoon Date: Sun, 7 Sep 2025 05:24:18 +0800 Subject: [PATCH 81/87] remove unused imports from various modules --- hls-graph/src/Development/IDE/WorkerThread.hs | 5 ++--- plugins/hls-class-plugin/src/Ide/Plugin/Class/ExactPrint.hs | 1 - plugins/hls-class-plugin/src/Ide/Plugin/Class/Types.hs | 6 ++---- .../src/Ide/Plugin/ExplicitFields.hs | 3 +-- plugins/hls-notes-plugin/src/Ide/Plugin/Notes.hs | 1 - .../src/Ide/Plugin/StylishHaskell.hs | 1 - 6 files changed, 5 insertions(+), 12 deletions(-) diff --git a/hls-graph/src/Development/IDE/WorkerThread.hs b/hls-graph/src/Development/IDE/WorkerThread.hs index 344971483f..1b527e089d 100644 --- a/hls-graph/src/Development/IDE/WorkerThread.hs +++ b/hls-graph/src/Development/IDE/WorkerThread.hs @@ -32,8 +32,7 @@ import qualified Data.Text as T import Control.Concurrent import Control.Exception (catch) -import Control.Monad (void, when) -import Debug.Trace (traceM) +import Control.Monad (when) import Prettyprinter data LogWorkerThread @@ -130,7 +129,7 @@ runInThreadStmInNewThreads getStep deliver (TaskQueue q) tthreads acts = do -- use barrier to wait for the result writeTQueue q (uninterruptibleMask $ \restore -> do curStep <- atomically getStep - traceM ("runInThreadStmInNewThreads: current step: " ++ show curStep ++ " deliver step: " ++ show deliver) + -- traceM ("runInThreadStmInNewThreads: current step: " ++ show curStep ++ " deliver step: " ++ show deliver) when (curStep == deliverStep deliver) $ do syncs <- mapM (\(act, handler) -> async (handler =<< (restore $ Right <$> act) `catch` \e@(SomeException _) -> return (Left e))) acts diff --git a/plugins/hls-class-plugin/src/Ide/Plugin/Class/ExactPrint.hs b/plugins/hls-class-plugin/src/Ide/Plugin/Class/ExactPrint.hs index bb0994442a..0bd40b13cc 100644 --- a/plugins/hls-class-plugin/src/Ide/Plugin/Class/ExactPrint.hs +++ b/plugins/hls-class-plugin/src/Ide/Plugin/Class/ExactPrint.hs @@ -6,7 +6,6 @@ module Ide.Plugin.Class.ExactPrint where import Control.Monad.Trans.Maybe import Data.Either.Extra (eitherToMaybe) -import Data.Functor.Identity (Identity) import qualified Data.Text as T import Development.IDE.GHC.Compat import GHC.Parser.Annotation diff --git a/plugins/hls-class-plugin/src/Ide/Plugin/Class/Types.hs b/plugins/hls-class-plugin/src/Ide/Plugin/Class/Types.hs index 1669aba43d..0fa6b4890a 100644 --- a/plugins/hls-class-plugin/src/Ide/Plugin/Class/Types.hs +++ b/plugins/hls-class-plugin/src/Ide/Plugin/Class/Types.hs @@ -22,7 +22,6 @@ import Development.IDE import Development.IDE.Core.PluginUtils (useMT) import qualified Development.IDE.Core.Shake as Shake import Development.IDE.GHC.Compat hiding (newUnique, (<+>)) -import Development.IDE.GHC.Compat.Util (bagToList) import Development.IDE.Graph.Classes import GHC.Generics import Ide.Plugin.Class.Utils @@ -226,9 +225,8 @@ getInstanceBindTypeSigsRule recorder = do whenMaybe (isBindingName name) $ do env <- tcInitTidyEnv #if MIN_VERSION_ghc(9,11,0) - let ty = + let ty = tidyOpenType env (idType id) #else - let (_, ty) = + let (_, ty) = tidyOpenType env (idType id) #endif - tidyOpenType env (idType id) pure $ InstanceBindTypeSig name ty diff --git a/plugins/hls-explicit-record-fields-plugin/src/Ide/Plugin/ExplicitFields.hs b/plugins/hls-explicit-record-fields-plugin/src/Ide/Plugin/ExplicitFields.hs index a111e9062b..e2f8eb38f6 100644 --- a/plugins/hls-explicit-record-fields-plugin/src/Ide/Plugin/ExplicitFields.hs +++ b/plugins/hls-explicit-record-fields-plugin/src/Ide/Plugin/ExplicitFields.hs @@ -57,8 +57,7 @@ import Development.IDE.GHC.Compat (FieldLabel (flSelector), HsConDetails (RecCon), HsExpr (HsApp, HsVar, XExpr), HsFieldBind (hfbLHS), - HsRecFields (..), - HsWrap (HsWrap), LPat, + HsRecFields (..), LPat, Located, NamedThing (getName), Outputable, diff --git a/plugins/hls-notes-plugin/src/Ide/Plugin/Notes.hs b/plugins/hls-notes-plugin/src/Ide/Plugin/Notes.hs index db1696d94b..5ca86baecc 100644 --- a/plugins/hls-notes-plugin/src/Ide/Plugin/Notes.hs +++ b/plugins/hls-notes-plugin/src/Ide/Plugin/Notes.hs @@ -5,7 +5,6 @@ import Control.Monad.Except (ExceptT, MonadError, throwError) import Control.Monad.IO.Class (liftIO) import qualified Data.Array as A -import Data.Foldable (foldl') import Data.HashMap.Strict (HashMap) import qualified Data.HashMap.Strict as HM import qualified Data.HashSet as HS diff --git a/plugins/hls-stylish-haskell-plugin/src/Ide/Plugin/StylishHaskell.hs b/plugins/hls-stylish-haskell-plugin/src/Ide/Plugin/StylishHaskell.hs index 767cc061df..0f78e67d7e 100644 --- a/plugins/hls-stylish-haskell-plugin/src/Ide/Plugin/StylishHaskell.hs +++ b/plugins/hls-stylish-haskell-plugin/src/Ide/Plugin/StylishHaskell.hs @@ -25,7 +25,6 @@ import Ide.PluginUtils import Ide.Types hiding (Config) import Language.Haskell.Stylish import Language.LSP.Protocol.Types as LSP -import System.Directory import System.FilePath data Log From 26045a12ce5b08ba36f98fb25f852a67e83283ec Mon Sep 17 00:00:00 2001 From: soulomoon Date: Sun, 7 Sep 2025 07:22:52 +0800 Subject: [PATCH 82/87] upgrade lsp version --- cabal.project | 6 ++++++ ghcide/src/Development/IDE/Core/FileStore.hs | 4 ++-- ghcide/src/Development/IDE/Core/Rules.hs | 4 ++-- ghcide/src/Development/IDE/Core/Shake.hs | 7 +++++-- ghcide/src/Development/IDE/Plugin/Completions/Logic.hs | 2 +- .../hls-refactor-plugin/src/Development/IDE/GHC/Dump.hs | 2 +- .../hls-semantic-tokens-plugin/test/SemanticTokensTest.hs | 2 +- 7 files changed, 18 insertions(+), 9 deletions(-) diff --git a/cabal.project b/cabal.project index 8d8bd080af..d83e432492 100644 --- a/cabal.project +++ b/cabal.project @@ -56,3 +56,9 @@ if impl(ghc >= 9.11) allow-newer: cabal-install-parsers:base, cabal-install-parsers:time, + +source-repository-package + type: git + location: https://github.com/soulomoon/lsp.git + tag: 640c7c755bf16128e3cb19c257688aa3305ff9f5 + subdir: lsp lsp-types lsp-test diff --git a/ghcide/src/Development/IDE/Core/FileStore.hs b/ghcide/src/Development/IDE/Core/FileStore.hs index 07b104d26d..0bdec3874e 100644 --- a/ghcide/src/Development/IDE/Core/FileStore.hs +++ b/ghcide/src/Development/IDE/Core/FileStore.hs @@ -252,8 +252,8 @@ getVersionedTextDoc doc = do maybe (pure Nothing) getVirtualFile $ uriToNormalizedFilePath $ toNormalizedUri uri let ver = case mvf of - Just (VirtualFile lspver _ _) -> lspver - Nothing -> 0 + Just (VirtualFile lspver _ _ _) -> lspver + Nothing -> 0 return (VersionedTextDocumentIdentifier uri ver) fileStoreRules :: Recorder (WithPriority Log) -> (NormalizedFilePath -> Action Bool) -> Rules () diff --git a/ghcide/src/Development/IDE/Core/Rules.hs b/ghcide/src/Development/IDE/Core/Rules.hs index 8273570aca..3e1d7f09ea 100644 --- a/ghcide/src/Development/IDE/Core/Rules.hs +++ b/ghcide/src/Development/IDE/Core/Rules.hs @@ -516,8 +516,8 @@ persistentHieFileRule recorder = addPersistentRule GetHieAst $ \file -> runMaybe vfsRef <- asks vfsVar vfsData <- liftIO $ _vfsMap <$> readTVarIO vfsRef (currentSource, ver) <- liftIO $ case M.lookup (filePathToUri' file) vfsData of - Nothing -> (,Nothing) . T.decodeUtf8 <$> BS.readFile (fromNormalizedFilePath file) - Just vf -> pure (virtualFileText vf, Just $ virtualFileVersion vf) + Just (Open vf) -> pure (virtualFileText vf, Just $ virtualFileVersion vf) + _ -> (,Nothing) . T.decodeUtf8 <$> BS.readFile (fromNormalizedFilePath file) let refmap = generateReferencesMap . getAsts . Compat.hie_asts $ res del = deltaFromDiff (T.decodeUtf8 $ Compat.hie_hs_src res) currentSource pure (HAR (Compat.hie_module res) (Compat.hie_asts res) refmap mempty (HieFromDisk res),del,ver) diff --git a/ghcide/src/Development/IDE/Core/Shake.hs b/ghcide/src/Development/IDE/Core/Shake.hs index 98d446f06d..51c34e61e2 100644 --- a/ghcide/src/Development/IDE/Core/Shake.hs +++ b/ghcide/src/Development/IDE/Core/Shake.hs @@ -147,7 +147,6 @@ import Development.IDE.Graph.Database (ShakeDatabase, shakeGetDatabaseKeys, shakeNewDatabase, shakeProfileDatabase, - shakeRunDatabaseForKeys, shakeRunDatabaseForKeysSep, shakeShutDatabase) import Development.IDE.Graph.Internal.Action (runActionInDbCb) @@ -427,10 +426,14 @@ class Typeable a => IsIdeGlobal a where -- data VirtualFileEntry = Open VirtualFile | Closed ClosedVirtualFile -- | Read a virtual file from the current snapshot +getOpenFile :: VirtualFileEntry -> Maybe VirtualFile +getOpenFile (Open vf) = Just vf +getOpenFile _ = Nothing +-- | Read a virtual file from the current snapshot getVirtualFile :: NormalizedFilePath -> Action (Maybe VirtualFile) getVirtualFile nf = do vfs <- fmap _vfsMap . liftIO . readTVarIO . vfsVar =<< getShakeExtras - let file = Map.lookup (filePathToUri' nf) vfs + let file = getOpenFile =<< Map.lookup (filePathToUri' nf) vfs pure $! file -- Don't leak a reference to the entire map -- Take a snapshot of the current LSP VFS diff --git a/ghcide/src/Development/IDE/Plugin/Completions/Logic.hs b/ghcide/src/Development/IDE/Plugin/Completions/Logic.hs index 0a5cecaca8..6c59a5ffe5 100644 --- a/ghcide/src/Development/IDE/Plugin/Completions/Logic.hs +++ b/ghcide/src/Development/IDE/Plugin/Completions/Logic.hs @@ -857,7 +857,7 @@ mergeListsBy cmp all_lists = merge_lists all_lists -- |From the given cursor position, gets the prefix module or record for autocompletion getCompletionPrefix :: Position -> VFS.VirtualFile -> PosPrefixInfo -getCompletionPrefix pos (VFS.VirtualFile _ _ ropetext) = getCompletionPrefixFromRope pos ropetext +getCompletionPrefix pos (VFS.VirtualFile _ _ ropetext _) = getCompletionPrefixFromRope pos ropetext getCompletionPrefixFromRope :: Position -> Rope.Rope -> PosPrefixInfo getCompletionPrefixFromRope pos@(Position l c) ropetext = diff --git a/plugins/hls-refactor-plugin/src/Development/IDE/GHC/Dump.hs b/plugins/hls-refactor-plugin/src/Development/IDE/GHC/Dump.hs index 638d14c51d..6f2bd70ab4 100644 --- a/plugins/hls-refactor-plugin/src/Development/IDE/GHC/Dump.hs +++ b/plugins/hls-refactor-plugin/src/Development/IDE/GHC/Dump.hs @@ -7,7 +7,7 @@ import Development.IDE.GHC.Compat hiding (LocatedA, import Development.IDE.GHC.Compat.ExactPrint (ExactPrint, exactPrint) import Development.IDE.GHC.Compat.Util import Generics.SYB (ext1Q, ext2Q, extQ) -import GHC.Hs hiding (AnnLet) +import GHC.Hs import GHC.Hs.Dump import GHC.Plugins hiding (AnnLet) import Prelude hiding ((<>)) diff --git a/plugins/hls-semantic-tokens-plugin/test/SemanticTokensTest.hs b/plugins/hls-semantic-tokens-plugin/test/SemanticTokensTest.hs index a0d1648fb3..7daae0df51 100644 --- a/plugins/hls-semantic-tokens-plugin/test/SemanticTokensTest.hs +++ b/plugins/hls-semantic-tokens-plugin/test/SemanticTokensTest.hs @@ -90,7 +90,7 @@ docLspSemanticTokensString :: (HasCallStack) => TextDocumentIdentifier -> Sessio docLspSemanticTokensString doc = do res <- Test.getSemanticTokens doc textContent <- documentContents doc - let vfs = VirtualFile 0 0 (Rope.fromText textContent) + let vfs = VirtualFile 0 0 (Rope.fromText textContent) (Just LanguageKind_Haskell) case res ^? Language.LSP.Protocol.Types._L of Just tokens -> do either (error . show) pure $ recoverLspSemanticTokens vfs tokens From 74e190921f476efc4810e5479fec46262fb95e97 Mon Sep 17 00:00:00 2001 From: soulomoon Date: Sun, 7 Sep 2025 09:39:45 +0800 Subject: [PATCH 83/87] fix build in older verions --- ghcide/src/Development/IDE/Core/Compile.hs | 6 +++++- ghcide/src/Development/IDE/Core/PositionMapping.hs | 1 + ghcide/src/Development/IDE/Core/Shake.hs | 3 ++- ghcide/src/Development/IDE/GHC/Compat.hs | 1 + ghcide/src/Development/IDE/LSP/LanguageServer.hs | 2 +- ghcide/src/Development/IDE/Types/HscEnvEq.hs | 3 --- plugins/hls-class-plugin/src/Ide/Plugin/Class/ExactPrint.hs | 1 + plugins/hls-class-plugin/src/Ide/Plugin/Class/Types.hs | 1 + .../src/Ide/Plugin/ExplicitFields.hs | 3 ++- plugins/hls-notes-plugin/src/Ide/Plugin/Notes.hs | 1 + plugins/hls-refactor-plugin/src/Development/IDE/GHC/Dump.hs | 2 +- plugins/hls-signature-help-plugin/test/Main.hs | 3 ++- .../src/Ide/Plugin/StylishHaskell.hs | 1 + 13 files changed, 19 insertions(+), 9 deletions(-) diff --git a/ghcide/src/Development/IDE/Core/Compile.hs b/ghcide/src/Development/IDE/Core/Compile.hs index 8065e56325..2b25fb08c0 100644 --- a/ghcide/src/Development/IDE/Core/Compile.hs +++ b/ghcide/src/Development/IDE/Core/Compile.hs @@ -115,9 +115,14 @@ import GHC.Tc.Gen.Splice import GHC.Types.Error import GHC.Types.ForeignStubs import GHC.Types.TypeEnv +import Development.IDE.WorkerThread (writeTaskQueue) -- See Note [Guidelines For Using CPP In GHCIDE Import Statements] +#if !MIN_VERSION_ghc(9,11,0) +import GHC.Types.HpcInfo +#endif + #if MIN_VERSION_ghc(9,7,0) import Data.Foldable (toList) import GHC.Unit.Module.Warnings @@ -150,7 +155,6 @@ import GHC.Iface.Ext.Types (NameEntityInfo) #if MIN_VERSION_ghc(9,12,0) import Development.IDE.Import.FindImports -import Development.IDE.WorkerThread (writeTaskQueue) #endif --Simple constants to make sure the source is consistently named diff --git a/ghcide/src/Development/IDE/Core/PositionMapping.hs b/ghcide/src/Development/IDE/Core/PositionMapping.hs index 41f9ca50e0..de02f5b1f7 100644 --- a/ghcide/src/Development/IDE/Core/PositionMapping.hs +++ b/ghcide/src/Development/IDE/Core/PositionMapping.hs @@ -28,6 +28,7 @@ import Control.Lens ((^.)) import Control.Monad import Data.Algorithm.Diff import Data.Bifunctor +import Data.List import qualified Data.Text as T import qualified Data.Vector.Unboxed as V import qualified Language.LSP.Protocol.Lens as L diff --git a/ghcide/src/Development/IDE/Core/Shake.hs b/ghcide/src/Development/IDE/Core/Shake.hs index 51c34e61e2..4552fc6457 100644 --- a/ghcide/src/Development/IDE/Core/Shake.hs +++ b/ghcide/src/Development/IDE/Core/Shake.hs @@ -107,7 +107,8 @@ import Data.Hashable import qualified Data.HashMap.Strict as HMap import Data.HashSet (HashSet) import qualified Data.HashSet as HSet -import Data.List.Extra (partition, takeEnd) +import Data.List.Extra (foldl', partition, + takeEnd) import qualified Data.Map.Strict as Map import Data.Maybe import qualified Data.SortedList as SL diff --git a/ghcide/src/Development/IDE/GHC/Compat.hs b/ghcide/src/Development/IDE/GHC/Compat.hs index d4750f1a2e..befd22c8de 100644 --- a/ghcide/src/Development/IDE/GHC/Compat.hs +++ b/ghcide/src/Development/IDE/GHC/Compat.hs @@ -115,6 +115,7 @@ module Development.IDE.GHC.Compat( import Control.Applicative ((<|>)) import qualified Data.ByteString as BS import Data.Coerce (coerce) +import Data.List (foldl') import qualified Data.Map as Map import qualified Data.Set as S import Data.String (IsString (fromString)) diff --git a/ghcide/src/Development/IDE/LSP/LanguageServer.hs b/ghcide/src/Development/IDE/LSP/LanguageServer.hs index bff90cb66f..9a56f02137 100644 --- a/ghcide/src/Development/IDE/LSP/LanguageServer.hs +++ b/ghcide/src/Development/IDE/LSP/LanguageServer.hs @@ -54,7 +54,7 @@ import Ide.Logger import Language.LSP.Server (LanguageContextEnv, LspServerLog, type (<~>)) -import System.Time.Extra (Seconds, sleep) +import System.Time.Extra (Seconds) import System.Timeout (timeout) data Log = LogRegisteringIdeConfig !IdeConfiguration diff --git a/ghcide/src/Development/IDE/Types/HscEnvEq.hs b/ghcide/src/Development/IDE/Types/HscEnvEq.hs index 26eb8d5395..e14ab56847 100644 --- a/ghcide/src/Development/IDE/Types/HscEnvEq.hs +++ b/ghcide/src/Development/IDE/Types/HscEnvEq.hs @@ -15,7 +15,6 @@ import Control.Exception (evaluate, mask, throwIO) import Control.Monad.Extra (eitherM, join, mapMaybeM) import Data.Either (fromRight) import Data.IORef -import qualified Data.Map as M import Data.Unique (Unique) import qualified Data.Unique as Unique import Development.IDE.GHC.Compat hiding (newUnique) @@ -25,9 +24,7 @@ import Development.IDE.GHC.Util (lookupPackageConfig) import Development.IDE.Graph.Classes import Development.IDE.Types.Exports (ExportsMap, createExportsMap) import GHC.Driver.Env (hsc_all_home_unit_ids) -import Ide.PluginUtils (toAbsolute) import OpenTelemetry.Eventlog (withSpan) -import System.Directory (makeAbsolute) -- | An 'HscEnv' with equality. Two values are considered equal diff --git a/plugins/hls-class-plugin/src/Ide/Plugin/Class/ExactPrint.hs b/plugins/hls-class-plugin/src/Ide/Plugin/Class/ExactPrint.hs index 0bd40b13cc..bb0994442a 100644 --- a/plugins/hls-class-plugin/src/Ide/Plugin/Class/ExactPrint.hs +++ b/plugins/hls-class-plugin/src/Ide/Plugin/Class/ExactPrint.hs @@ -6,6 +6,7 @@ module Ide.Plugin.Class.ExactPrint where import Control.Monad.Trans.Maybe import Data.Either.Extra (eitherToMaybe) +import Data.Functor.Identity (Identity) import qualified Data.Text as T import Development.IDE.GHC.Compat import GHC.Parser.Annotation diff --git a/plugins/hls-class-plugin/src/Ide/Plugin/Class/Types.hs b/plugins/hls-class-plugin/src/Ide/Plugin/Class/Types.hs index 0fa6b4890a..ee2a3fda7f 100644 --- a/plugins/hls-class-plugin/src/Ide/Plugin/Class/Types.hs +++ b/plugins/hls-class-plugin/src/Ide/Plugin/Class/Types.hs @@ -22,6 +22,7 @@ import Development.IDE import Development.IDE.Core.PluginUtils (useMT) import qualified Development.IDE.Core.Shake as Shake import Development.IDE.GHC.Compat hiding (newUnique, (<+>)) +import Development.IDE.GHC.Compat.Util (bagToList) import Development.IDE.Graph.Classes import GHC.Generics import Ide.Plugin.Class.Utils diff --git a/plugins/hls-explicit-record-fields-plugin/src/Ide/Plugin/ExplicitFields.hs b/plugins/hls-explicit-record-fields-plugin/src/Ide/Plugin/ExplicitFields.hs index e2f8eb38f6..a111e9062b 100644 --- a/plugins/hls-explicit-record-fields-plugin/src/Ide/Plugin/ExplicitFields.hs +++ b/plugins/hls-explicit-record-fields-plugin/src/Ide/Plugin/ExplicitFields.hs @@ -57,7 +57,8 @@ import Development.IDE.GHC.Compat (FieldLabel (flSelector), HsConDetails (RecCon), HsExpr (HsApp, HsVar, XExpr), HsFieldBind (hfbLHS), - HsRecFields (..), LPat, + HsRecFields (..), + HsWrap (HsWrap), LPat, Located, NamedThing (getName), Outputable, diff --git a/plugins/hls-notes-plugin/src/Ide/Plugin/Notes.hs b/plugins/hls-notes-plugin/src/Ide/Plugin/Notes.hs index 5ca86baecc..db1696d94b 100644 --- a/plugins/hls-notes-plugin/src/Ide/Plugin/Notes.hs +++ b/plugins/hls-notes-plugin/src/Ide/Plugin/Notes.hs @@ -5,6 +5,7 @@ import Control.Monad.Except (ExceptT, MonadError, throwError) import Control.Monad.IO.Class (liftIO) import qualified Data.Array as A +import Data.Foldable (foldl') import Data.HashMap.Strict (HashMap) import qualified Data.HashMap.Strict as HM import qualified Data.HashSet as HS diff --git a/plugins/hls-refactor-plugin/src/Development/IDE/GHC/Dump.hs b/plugins/hls-refactor-plugin/src/Development/IDE/GHC/Dump.hs index 6f2bd70ab4..638d14c51d 100644 --- a/plugins/hls-refactor-plugin/src/Development/IDE/GHC/Dump.hs +++ b/plugins/hls-refactor-plugin/src/Development/IDE/GHC/Dump.hs @@ -7,7 +7,7 @@ import Development.IDE.GHC.Compat hiding (LocatedA, import Development.IDE.GHC.Compat.ExactPrint (ExactPrint, exactPrint) import Development.IDE.GHC.Compat.Util import Generics.SYB (ext1Q, ext2Q, extQ) -import GHC.Hs +import GHC.Hs hiding (AnnLet) import GHC.Hs.Dump import GHC.Plugins hiding (AnnLet) import Prelude hiding ((<>)) diff --git a/plugins/hls-signature-help-plugin/test/Main.hs b/plugins/hls-signature-help-plugin/test/Main.hs index 4ac665e7d1..f6518552ae 100644 --- a/plugins/hls-signature-help-plugin/test/Main.hs +++ b/plugins/hls-signature-help-plugin/test/Main.hs @@ -11,7 +11,8 @@ import qualified Data.Text as T import Development.IDE.Plugin.Completions.Types (PosPrefixInfo (PosPrefixInfo)) import Ide.Plugin.SignatureHelp (descriptor) import qualified Language.LSP.Protocol.Lens as L -import Test.Hls +import Test.Hls hiding + (getSignatureHelp) import Test.Hls.FileSystem (VirtualFileTree, directCradle, file, mkVirtualFileTree, diff --git a/plugins/hls-stylish-haskell-plugin/src/Ide/Plugin/StylishHaskell.hs b/plugins/hls-stylish-haskell-plugin/src/Ide/Plugin/StylishHaskell.hs index 0f78e67d7e..767cc061df 100644 --- a/plugins/hls-stylish-haskell-plugin/src/Ide/Plugin/StylishHaskell.hs +++ b/plugins/hls-stylish-haskell-plugin/src/Ide/Plugin/StylishHaskell.hs @@ -25,6 +25,7 @@ import Ide.PluginUtils import Ide.Types hiding (Config) import Language.Haskell.Stylish import Language.LSP.Protocol.Types as LSP +import System.Directory import System.FilePath data Log From c4438374e1423eff5be939d51d2c502b678daceb Mon Sep 17 00:00:00 2001 From: soulomoon Date: Sun, 7 Sep 2025 10:20:43 +0800 Subject: [PATCH 84/87] change to list --- .../src/Development/IDE/Core/PluginUtils.hs | 8 +++-- ghcide/src/Development/IDE/Core/Shake.hs | 35 ++++++++++++------- .../Development/IDE/Graph/Internal/Action.hs | 9 +++-- .../IDE/Graph/Internal/Database.hs | 6 ++-- .../hls-signature-help-plugin/test/Main.hs | 1 - 5 files changed, 37 insertions(+), 22 deletions(-) diff --git a/ghcide/src/Development/IDE/Core/PluginUtils.hs b/ghcide/src/Development/IDE/Core/PluginUtils.hs index 6ba633df26..ca404b0dd0 100644 --- a/ghcide/src/Development/IDE/Core/PluginUtils.hs +++ b/ghcide/src/Development/IDE/Core/PluginUtils.hs @@ -88,11 +88,11 @@ useMT :: IdeRule k v => k -> NormalizedFilePath -> MaybeT Action v useMT k = MaybeT . Shake.use k -- |ExceptT version of `uses` that throws a PluginRuleFailed upon failure -usesE :: (Traversable f, IdeRule k v) => k -> f NormalizedFilePath -> ExceptT PluginError Action (f v) +usesE :: (IdeRule k v) => k -> [NormalizedFilePath] -> ExceptT PluginError Action [v] usesE k = maybeToExceptT (PluginRuleFailed (T.pack $ show k)) . usesMT k -- |MaybeT version of `uses` -usesMT :: (Traversable f, IdeRule k v) => k -> f NormalizedFilePath -> MaybeT Action (f v) +usesMT :: (IdeRule k v) => k -> [NormalizedFilePath] -> MaybeT Action [v] usesMT k xs = MaybeT $ sequence <$> Shake.uses k xs -- |ExceptT version of `useWithStale` that throws a PluginRuleFailed upon @@ -104,7 +104,9 @@ useWithStaleE key = maybeToExceptT (PluginRuleFailed (T.pack $ show key)) . useW -- |MaybeT version of `useWithStale` useWithStaleMT :: IdeRule k v => k -> NormalizedFilePath -> MaybeT Action (v, PositionMapping) -useWithStaleMT key file = MaybeT $ runIdentity <$> Shake.usesWithStale key (Identity file) +useWithStaleMT key file = MaybeT $ do + [r] <- Shake.usesWithStale key [file] + return r -- ---------------------------------------------------------------------------- -- IdeAction wrappers diff --git a/ghcide/src/Development/IDE/Core/Shake.hs b/ghcide/src/Development/IDE/Core/Shake.hs index 4552fc6457..b854f7ea51 100644 --- a/ghcide/src/Development/IDE/Core/Shake.hs +++ b/ghcide/src/Development/IDE/Core/Shake.hs @@ -165,6 +165,7 @@ import Development.IDE.Types.Monitoring (Monitoring (..)) import Development.IDE.Types.Shake import Development.IDE.WorkerThread import qualified Focus +import GHC.Base (undefined) import GHC.Fingerprint import GHC.Stack (HasCallStack) import GHC.TypeLits (KnownSymbol) @@ -1095,12 +1096,16 @@ defineNoDiagnostics recorder op = defineEarlyCutoff recorder $ RuleNoDiagnostics -- | Request a Rule result if available use :: IdeRule k v => k -> NormalizedFilePath -> Action (Maybe v) -use key file = runIdentity <$> uses key (Identity file) +use key file = do + [r] <- uses key [file] + return r -- | Request a Rule result, it not available return the last computed result, if any, which may be stale useWithStale :: IdeRule k v => k -> NormalizedFilePath -> Action (Maybe (v, PositionMapping)) -useWithStale key file = runIdentity <$> usesWithStale key (Identity file) +useWithStale key file = do + [r] <-usesWithStale key [file] + return r -- |Request a Rule result, it not available return the last computed result -- which may be stale. @@ -1111,7 +1116,9 @@ useWithStale key file = runIdentity <$> usesWithStale key (Identity file) -- WARNING: Not suitable for PluginHandlers. Use `useWithStaleE` instead. useWithStale_ :: IdeRule k v => k -> NormalizedFilePath -> Action (v, PositionMapping) -useWithStale_ key file = runIdentity <$> usesWithStale_ key (Identity file) +useWithStale_ key file = do + [r] <- usesWithStale_ key [file] + return r -- |Plural version of 'useWithStale_' -- @@ -1119,7 +1126,7 @@ useWithStale_ key file = runIdentity <$> usesWithStale_ key (Identity file) -- none available. -- -- WARNING: Not suitable for PluginHandlers. -usesWithStale_ :: (Traversable f, IdeRule k v) => k -> f NormalizedFilePath -> Action (f (v, PositionMapping)) +usesWithStale_ :: (IdeRule k v) => k -> [NormalizedFilePath] -> Action [(v, PositionMapping)] usesWithStale_ key files = do res <- usesWithStale key files case sequence res of @@ -1191,7 +1198,9 @@ useNoFile key = use key emptyFilePath -- -- WARNING: Not suitable for PluginHandlers. Use `useE` instead. use_ :: IdeRule k v => k -> NormalizedFilePath -> Action v -use_ key file = runIdentity <$> uses_ key (Identity file) +use_ key file = do + [r] <- uses_ key [file] + return r useNoFile_ :: IdeRule k v => k -> Action v useNoFile_ key = use_ key emptyFilePath @@ -1202,7 +1211,7 @@ useNoFile_ key = use_ key emptyFilePath -- none available. -- -- WARNING: Not suitable for PluginHandlers. Use `usesE` instead. -uses_ :: (Traversable f, IdeRule k v) => k -> f NormalizedFilePath -> Action (f v) +uses_ :: (IdeRule k v) => k -> [NormalizedFilePath] -> Action [v] uses_ key files = do res <- uses key files case sequence res of @@ -1210,13 +1219,13 @@ uses_ key files = do Just v -> return v -- | Plural version of 'use' -uses :: (Traversable f, IdeRule k v) - => k -> f NormalizedFilePath -> Action (f (Maybe v)) +uses :: (IdeRule k v) + => k -> [NormalizedFilePath] -> Action [(Maybe v)] uses key files = fmap (\(A value) -> currentValue value) <$> apply (fmap (Q . (key,)) files) -- | Return the last computed result which might be stale. -usesWithStale :: (Traversable f, IdeRule k v) - => k -> f NormalizedFilePath -> Action (f (Maybe (v, PositionMapping))) +usesWithStale :: (IdeRule k v) + => k -> [NormalizedFilePath] -> Action [(Maybe (v, PositionMapping))] usesWithStale key files = do _ <- apply (fmap (Q . (key,)) files) -- We don't look at the result of the 'apply' since 'lastValue' will @@ -1243,8 +1252,10 @@ useWithSeparateFingerprintRule_ fingerKey key file = do useWithoutDependency :: IdeRule k v => k -> NormalizedFilePath -> Action (Maybe v) -useWithoutDependency key file = - (\(Identity (A value)) -> currentValue value) <$> applyWithoutDependency (Identity (Q (key, file))) +useWithoutDependency key file = do + [A x] <- applyWithoutDependency [Q (key, file)] + return $ currentValue x + data RuleBody k v = Rule (k -> NormalizedFilePath -> Action (Maybe BS.ByteString, IdeResult v)) diff --git a/hls-graph/src/Development/IDE/Graph/Internal/Action.hs b/hls-graph/src/Development/IDE/Graph/Internal/Action.hs index 8624c490e8..abab30c930 100644 --- a/hls-graph/src/Development/IDE/Graph/Internal/Action.hs +++ b/hls-graph/src/Development/IDE/Graph/Internal/Action.hs @@ -114,9 +114,12 @@ actionFinally a b = do Action $ lift $ finally (runReaderT (fromAction a) v) b apply1 :: (RuleResult key ~ value, ShakeValue key, Typeable value) => key -> Action value -apply1 k = runIdentity <$> apply (Identity k) +apply1 k = do + [r] <- apply [k] + return r -apply :: (Traversable f, RuleResult key ~ value, ShakeValue key, Typeable value) => f key -> Action (f value) + +apply :: (RuleResult key ~ value, ShakeValue key, Typeable value) => [key] -> Action [value] apply ks = do db <- Action $ asks actionDatabase stack <- Action $ asks actionStack @@ -127,7 +130,7 @@ apply ks = do pure vs -- | Evaluate a list of keys without recording any dependencies. -applyWithoutDependency :: (Traversable f, RuleResult key ~ value, ShakeValue key, Typeable value) => f key -> Action (f value) +applyWithoutDependency :: (RuleResult key ~ value, ShakeValue key, Typeable value) => [key] -> Action [value] applyWithoutDependency ks = do db <- Action $ asks actionDatabase stack <- Action $ asks actionStack diff --git a/hls-graph/src/Development/IDE/Graph/Internal/Database.hs b/hls-graph/src/Development/IDE/Graph/Internal/Database.hs index 853be75d5f..bae81ece30 100644 --- a/hls-graph/src/Development/IDE/Graph/Internal/Database.hs +++ b/hls-graph/src/Development/IDE/Graph/Internal/Database.hs @@ -83,8 +83,8 @@ updateDirty = Focus.adjust $ \(KeyDetails status rdeps) -> in KeyDetails status' rdeps -- | Unwrap and build a list of keys in parallel build - :: forall f key value . (Traversable f, RuleResult key ~ value, Typeable key, Show key, Hashable key, Eq key, Typeable value) - => Database -> Stack -> f key -> IO (f Key, f value) + :: forall key value . (RuleResult key ~ value, Typeable key, Show key, Hashable key, Eq key, Typeable value) + => Database -> Stack -> [key] -> IO ([Key], [value]) -- build _ st k | traceShow ("build", st, k) False = undefined build db stack keys = do step <- readTVarIO $ databaseStep db @@ -107,7 +107,7 @@ build db stack keys = do -- | Build a list of keys and return their results. -- If none of the keys are dirty, we can return the results immediately. -- Otherwise, a blocking computation is returned *which must be evaluated asynchronously* to avoid deadlock. -builder :: (Traversable f) => Database -> Stack -> f Key -> IO (f (Key, Result)) +builder :: Database -> Stack -> [Key] -> IO [(Key, Result)] -- builder _ st kk | traceShow ("builder", st,kk) False = undefined builder db stack keys = for keys $ \k -> builderOne db stack k diff --git a/plugins/hls-signature-help-plugin/test/Main.hs b/plugins/hls-signature-help-plugin/test/Main.hs index f6518552ae..f22fbaf400 100644 --- a/plugins/hls-signature-help-plugin/test/Main.hs +++ b/plugins/hls-signature-help-plugin/test/Main.hs @@ -5,7 +5,6 @@ import Control.Arrow ((>>>)) import Control.Exception (throw) import Control.Lens ((^.)) import Data.Maybe (fromJust) -import Data.String.Interpolate (__i) import Data.Text (Text) import qualified Data.Text as T import Development.IDE.Plugin.Completions.Types (PosPrefixInfo (PosPrefixInfo)) From 8f3737973f58a6313ae9690922a84d6a6a9ee98e Mon Sep 17 00:00:00 2001 From: soulomoon Date: Sun, 7 Sep 2025 14:42:32 +0800 Subject: [PATCH 85/87] prefer shakeRestart if there are others in queue --- ghcide/src/Development/IDE/Core/Shake.hs | 2 +- hls-graph/src/Development/IDE/WorkerThread.hs | 14 +++++++++++++- 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/ghcide/src/Development/IDE/Core/Shake.hs b/ghcide/src/Development/IDE/Core/Shake.hs index 4552fc6457..e8f2022f27 100644 --- a/ghcide/src/Development/IDE/Core/Shake.hs +++ b/ghcide/src/Development/IDE/Core/Shake.hs @@ -854,7 +854,7 @@ delayedAction a = do -- but actions added via 'shakeEnqueue' will be requeued. shakeRestart :: Recorder (WithPriority Log) -> IdeState -> VFSModified -> String -> [DelayedAction ()] -> IO [Key] -> IO () shakeRestart recorder IdeState{..} vfs reason acts ioActionBetweenShakeSession = - void $ awaitRunInThread (restartQueue shakeExtras) $ do + void $ awaitRunInThreadAtHead (restartQueue shakeExtras) $ do withMVar' shakeSession (\runner -> do diff --git a/hls-graph/src/Development/IDE/WorkerThread.hs b/hls-graph/src/Development/IDE/WorkerThread.hs index 1b527e089d..fd6a5c7695 100644 --- a/hls-graph/src/Development/IDE/WorkerThread.hs +++ b/hls-graph/src/Development/IDE/WorkerThread.hs @@ -19,7 +19,8 @@ module Development.IDE.WorkerThread withWorkerQueueSimple, runInThreadStmInNewThreads, isEmptyTaskQueue, - counTaskQueue + counTaskQueue, + awaitRunInThreadAtHead ) where import Control.Concurrent.Async (Async, async, withAsync) @@ -147,6 +148,17 @@ awaitRunInThread (TaskQueue q) act = do Left e -> throw (e :: SomeException) Right r -> return r +awaitRunInThreadAtHead :: TaskQueue (IO ()) -> IO result -> IO result +awaitRunInThreadAtHead (TaskQueue q) act = do + barrier <- newEmptyTMVarIO + -- Take an action from TQueue, run it and + -- use barrier to wait for the result + atomically $ unGetTQueue q (try act >>= atomically . putTMVar barrier) + resultOrException <- atomically $ takeTMVar barrier + case resultOrException of + Left e -> throw (e :: SomeException) + Right r -> return r + writeTaskQueue :: TaskQueue a -> a -> STM () writeTaskQueue (TaskQueue q) = writeTQueue q From 1ec7d63296d3d3f0fdcafad529e34a778f0a3987 Mon Sep 17 00:00:00 2001 From: soulomoon Date: Sun, 7 Sep 2025 15:16:26 +0800 Subject: [PATCH 86/87] do not spawn if just one element --- .../IDE/Graph/Internal/Database.hs | 34 +++++++++++++------ 1 file changed, 23 insertions(+), 11 deletions(-) diff --git a/hls-graph/src/Development/IDE/Graph/Internal/Database.hs b/hls-graph/src/Development/IDE/Graph/Internal/Database.hs index bae81ece30..b0940f19ac 100644 --- a/hls-graph/src/Development/IDE/Graph/Internal/Database.hs +++ b/hls-graph/src/Development/IDE/Graph/Internal/Database.hs @@ -109,32 +109,44 @@ build db stack keys = do -- Otherwise, a blocking computation is returned *which must be evaluated asynchronously* to avoid deadlock. builder :: Database -> Stack -> [Key] -> IO [(Key, Result)] -- builder _ st kk | traceShow ("builder", st,kk) False = undefined -builder db stack keys = for keys $ \k -> builderOne db stack k +builder db stack [k] = return <$> builderOne IsSingleton db stack k +builder db stack keys = for keys $ \k -> builderOne NotSingleton db stack k +data IsSingletonTask = IsSingleton | NotSingleton +data BuildContinue = BCContinue | BCStop Result | BCRead (IO Result) -builderOne :: Database -> Stack -> Key -> IO (Key, Result) -builderOne db@Database {..} stack id = do +builderOne :: IsSingletonTask -> Database -> Stack -> Key -> IO (Key, Result) +builderOne isSingletonTask db@Database {..} stack id = do traceEvent ("builderOne: " ++ show id) return () res <- liftIO $ atomicallyNamed "builder" $ do -- Spawn the id if needed status <- SMap.lookup id databaseValues current <- readTVar databaseStep - val <- case viewDirty current $ maybe (Dirty Nothing) keyStatus status of + case viewDirty current $ maybe (Dirty Nothing) keyStatus status of Dirty s -> do SMap.focus (updateStatus $ Running current s) id databaseValues - traceEvent ("Starting build of key: " ++ show id ++ ", step "++ show current) - $ runOneInDataBase (show id) db (refresh db stack id s) $ \e -> atomically $ SMap.focus (updateStatus $ Exception current e s) id databaseValues - return Nothing - Clean r -> return $ Just r + case isSingletonTask of + IsSingleton -> + return $ + BCRead $ + refresh db stack id s `catch` \e@(SomeException _) -> do + atomically $ SMap.focus (updateStatus $ Exception current e s) id databaseValues + throw e + NotSingleton -> do + traceEvent ("Starting build of key: " ++ show id ++ ", step " ++ show current) $ + runOneInDataBase (show id) db (refresh db stack id s) $ + \e -> atomically $ SMap.focus (updateStatus $ Exception current e s) id databaseValues + return BCContinue + Clean r -> return $ BCStop r -- force here might contains async exceptions from previous runs Running _step _s | memberStack id stack -> throw $ StackException stack | otherwise -> retry Exception _ e _s -> throw e - pure val case res of - Just r -> return (id, r) - Nothing -> builderOne db stack id + BCStop r -> return (id, r) + BCContinue -> builderOne isSingletonTask db stack id + BCRead ioR -> (id,) <$> ioR -- | isDirty -- only dirty when it's build time is older than the changed time of one of its dependencies From df2631c8db2e6defc66f7af037d2a3e5b2b916c7 Mon Sep 17 00:00:00 2001 From: soulomoon Date: Sun, 7 Sep 2025 15:28:37 +0800 Subject: [PATCH 87/87] guard with mask so it must run --- hls-graph/src/Development/IDE/Graph/Internal/Database.hs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/hls-graph/src/Development/IDE/Graph/Internal/Database.hs b/hls-graph/src/Development/IDE/Graph/Internal/Database.hs index b0940f19ac..315d1615c9 100644 --- a/hls-graph/src/Development/IDE/Graph/Internal/Database.hs +++ b/hls-graph/src/Development/IDE/Graph/Internal/Database.hs @@ -115,7 +115,7 @@ data IsSingletonTask = IsSingleton | NotSingleton data BuildContinue = BCContinue | BCStop Result | BCRead (IO Result) builderOne :: IsSingletonTask -> Database -> Stack -> Key -> IO (Key, Result) -builderOne isSingletonTask db@Database {..} stack id = do +builderOne isSingletonTask db@Database {..} stack id = mask $ \restore -> do traceEvent ("builderOne: " ++ show id) return () res <- liftIO $ atomicallyNamed "builder" $ do -- Spawn the id if needed @@ -129,7 +129,7 @@ builderOne isSingletonTask db@Database {..} stack id = do IsSingleton -> return $ BCRead $ - refresh db stack id s `catch` \e@(SomeException _) -> do + restore (refresh db stack id s) `catch` \e@(SomeException _) -> do atomically $ SMap.focus (updateStatus $ Exception current e s) id databaseValues throw e NotSingleton -> do @@ -145,7 +145,7 @@ builderOne isSingletonTask db@Database {..} stack id = do Exception _ e _s -> throw e case res of BCStop r -> return (id, r) - BCContinue -> builderOne isSingletonTask db stack id + BCContinue -> restore $ builderOne isSingletonTask db stack id BCRead ioR -> (id,) <$> ioR -- | isDirty