! Copyright (C) 2023 CapitalEx. ! See https://factorcode.org/license.txt for BSD license. USING: accessors arrays assocs combinators combinators.short-circuit combinators.smart command-line concurrency.combinators continuations generalizations hashtables html.elements io io.directories io.encodings.utf8 io.files io.files.info io.pathnames io.streams.string kernel linked-sets namespaces prettyprint sbufs sequences sequences.deep sets sorting splitting strings unicode xml xml.data xml.traversal xml.writer ; FROM: namespaces => set ; IN: snarl ( -- ) "
" write-html ; :
( -- ) ">" write-html ; :
( -- ) "
" write-html ; : write-title ( template title -- string ) "{{title}}" swap replace ; : write-headings ( template headings -- string ) "{{headings}}" swap replace ; : write-backlinks ( template backlinks -- string ) "{{backlinks}}" swap replace ; : write-article ( template articles -- string ) "{{article}}" swap replace ; : read-template-file ( wiki-path -- sbuf ) "_templates/page.html" append-path utf8 file-contents >sbuf ; : parse-xml ( dirs -- hashtable ) [ dup file-name dup rot file>xml H{ } clone [ "body" swap set-at ] [ "file-name" swap set-at ] [ "backlinks" 0 clone set-of ] tri 2array ] map >hashtable ; : add-title ( page -- page ) dup "body" of "h1" deep-tag-named children>string "title" swap set-of ; : add-titles ( hashtable -- hashtable ) [ add-title ] assoc-map ; : add-links ( page -- page ) dup "body" of "a" deep-tags-named [ "href" attr ] map! [ "sitemap.html" = ] reject "links" swap set-of ; : add-all-links ( hashtable -- hashtable ) [ add-links ] assoc-map ; : add-headings ( page -- page ) dup "body" of { [ "h1" deep-tags-named ] [ "h2" deep-tags-named ] [ "h3" deep-tags-named ] [ "h4" deep-tags-named ] [ "h5" deep-tags-named ] [ "h6" deep-tags-named ] } cleave>array flatten1 "headings" swap set-of ; : add-all-headings ( hahstable -- hashtable ) [ add-headings ] assoc-map ; : backlinks ( file pages links -- ) [ of "backlinks" of adjoin ] 2with each ; : build-backlinks ( pages -- pages ) 3 dupn [ "links" of backlinks ] with assoc-each ; : process-pages ( path -- pages ) qualified-directory-files parse-xml add-titles add-all-links add-all-headings build-backlinks dup @pages set ; : render-backlink ( page -- ) @pages get at "title" of write ; : (render-backlinks) ( links -- )

"In Bound Links" write

; : render-backlinks ( page -- sbuf ) "backlinks" of members [ (render-backlinks) ] with-string-writer ; : render-header ( header -- ) children>string write ; : (render-headers) ( headers -- )

"Headers" write

; : render-headers ( page -- sbuf ) "headings" of [ (render-headers) ] with-string-writer ; : render-body ( page -- string ) "body" of body>> dup "main-article" "id" set-attr xml>string ; : render-page ( template page -- sbuf ) { [ "title" of write-title ] [ render-headers write-headings ] [ render-backlinks write-backlinks ] [ render-body write-article ] } cleave ; : render-pages ( template pages -- rendered-pages ) [ render-page >string ] with assoc-map ; : process-command-line ( -- public-path template pages ) command-line get first [ "public" append-path ] [ read-template-file ] [ "pages" append-path process-pages ] tri ; : map-output-files ( out pages -- pages ) [ [ prepend-path ] dip ] with assoc-map ; : write-pages ( pages -- ) [ values ] [ keys ] bi [ utf8 set-file-contents ] 2each ; : strip-articles ( words -- words ) [ blank? ] split-when [ >lower { [ "the" = ] [ "a" = ] [ "an" = ] } 1|| ] reject ; : group-pages-alphbetically ( -- pages ) @pages get values [ "title" of strip-articles first 1 head >upper ] collect-by sort-keys ; : render-sitemap ( -- string ) group-pages-alphbetically [

"Site Map" write

[

swap write

] assoc-each
] with-string-writer ; : make-sitemap ( -- ) command-line get first [ "public/sitemap.html" append-path ] [ read-template-file ] bi "Site Map" write-title "" write-headings "" write-backlinks render-sitemap write-article swap utf8 set-file-contents ; PRIVATE> : build-wiki ( -- ) process-command-line render-pages map-output-files write-pages make-sitemap ; MAIN: [ command-line get length 1 = [ [ build-wiki ] [ drop "Failed to build wiki" print ] recover ] [ "Usage: snarl DIR" print ] if ]