@@ 15,10 15,15 @@ import Foreign.Marshal.Array
import Control.Monad (forM)
import Control.Exception (catch)
-import System.Directory -- For locating links.xml
+import System.Directory -- For locating links.xml, suggestions.gmni
import System.FilePath
import System.IO (hPrint, stderr) -- For error reporting
+import Data.Trie.Text (Trie)
+import qualified Data.Trie.Text as Trie
+import Data.List (nub)
+import Control.Concurrent.MVar (readMVar)
+
data Link = Link {
label :: Text,
title :: Text,
@@ 111,11 116,33 @@ readBookmarks = do
handleInvalid err@(InvalidXMLFile _ _) = hPrint stderr err >> nodoc
nodoc = return $ Document (Prologue [] Nothing []) (Element "empty" M.empty []) []
+-- | Write out a file of most frequently encountered unvisited links.
+-- Hopefully this'll help surfers rely less on YouTube, et al's hueristics.
+updateSuggestions :: Page -> IO ()
+updateSuggestions page = do
+ hist <- readMVar $ visitedURLs page
+ let links = extractLinks $ html page
+ let domain = maybe "" show $ uriAuthority $ url page
+
+ dir <- getXdgDirectory XdgData "rhapsode"
+ let path = dir </> "suggestions.gmni"
+ exists <- doesFileExist path
+ suggestions <- if not exists then return [] else do
+ file <- Prelude.readFile path
+ return [line' | line <- lines file, line'@(_:uri':_) <- [words line], not (pack uri' `Trie.member` hist)]
+
+ let suggestions' = suggestions ++ nub [["=>", uri', domain] | link <- links,
+ let uri' = uriToString id (href link) "", not (pack uri' `Trie.member` hist)]
+
+ createDirectoryIfMissing True dir
+ Prelude.writeFile path $ unlines $ map unwords suggestions'
+
-- C API
foreign export ccall c_extractLinks :: StablePtr Page -> IO (CArray CString)
c_extractLinks c_page = do
page <- deRefStablePtr c_page
+ forkIO $ updateSuggestions page -- background process for useful navigation aid.
bookmarks <- readBookmarks
ret <- forM (linksFromPage page ++ extractLinks bookmarks) $ \link -> do
c_label <- text2cstring $ strip $ label link