Rewrite markdown rendering to blackfriday v2 and rewrite orgmode rendering to go-org (#8560)
* Rewrite markdown rendering to blackfriday v2.0 * Fix style * Fix go mod with golang 1.13 * Fix blackfriday v2 import * Inital orgmode renderer migration to go-org * Vendor go-org dependency * Ignore errors :/ * Update go-org to latest version * Update test * Fix go-org test * Remove unneeded code * Fix comments * Fix markdown test * Fix blackfriday regression rendering HTML block
This commit is contained in:
9
go.mod
9
go.mod
@ -22,7 +22,6 @@ require (
|
|||||||
github.com/blevesearch/go-porterstemmer v0.0.0-20141230013033-23a2c8e5cf1f // indirect
|
github.com/blevesearch/go-porterstemmer v0.0.0-20141230013033-23a2c8e5cf1f // indirect
|
||||||
github.com/blevesearch/segment v0.0.0-20160105220820-db70c57796cc // indirect
|
github.com/blevesearch/segment v0.0.0-20160105220820-db70c57796cc // indirect
|
||||||
github.com/boombuler/barcode v0.0.0-20161226211916-fe0f26ff6d26 // indirect
|
github.com/boombuler/barcode v0.0.0-20161226211916-fe0f26ff6d26 // indirect
|
||||||
github.com/chaseadamsio/goorgeous v0.0.0-20170901132237-098da33fde5f
|
|
||||||
github.com/couchbase/vellum v0.0.0-20190111184608-e91b68ff3efe // indirect
|
github.com/couchbase/vellum v0.0.0-20190111184608-e91b68ff3efe // indirect
|
||||||
github.com/cznic/b v0.0.0-20181122101859-a26611c4d92d // indirect
|
github.com/cznic/b v0.0.0-20181122101859-a26611c4d92d // indirect
|
||||||
github.com/cznic/mathutil v0.0.0-20181122101859-297441e03548 // indirect
|
github.com/cznic/mathutil v0.0.0-20181122101859-297441e03548 // indirect
|
||||||
@ -73,6 +72,7 @@ require (
|
|||||||
github.com/mschoch/smat v0.0.0-20160514031455-90eadee771ae // indirect
|
github.com/mschoch/smat v0.0.0-20160514031455-90eadee771ae // indirect
|
||||||
github.com/msteinert/pam v0.0.0-20151204160544-02ccfbfaf0cc
|
github.com/msteinert/pam v0.0.0-20151204160544-02ccfbfaf0cc
|
||||||
github.com/nfnt/resize v0.0.0-20160724205520-891127d8d1b5
|
github.com/nfnt/resize v0.0.0-20160724205520-891127d8d1b5
|
||||||
|
github.com/niklasfasching/go-org v0.1.7
|
||||||
github.com/oliamb/cutter v0.2.2
|
github.com/oliamb/cutter v0.2.2
|
||||||
github.com/philhofer/fwd v1.0.0 // indirect
|
github.com/philhofer/fwd v1.0.0 // indirect
|
||||||
github.com/pkg/errors v0.8.1
|
github.com/pkg/errors v0.8.1
|
||||||
@ -80,12 +80,13 @@ require (
|
|||||||
github.com/prometheus/client_golang v1.1.0
|
github.com/prometheus/client_golang v1.1.0
|
||||||
github.com/prometheus/procfs v0.0.4 // indirect
|
github.com/prometheus/procfs v0.0.4 // indirect
|
||||||
github.com/remyoudompheng/bigfft v0.0.0-20190321074620-2f0d2b0e0001 // indirect
|
github.com/remyoudompheng/bigfft v0.0.0-20190321074620-2f0d2b0e0001 // indirect
|
||||||
github.com/russross/blackfriday v0.0.0-20180428102519-11635eb403ff
|
github.com/russross/blackfriday v2.0.0+incompatible // indirect
|
||||||
|
github.com/russross/blackfriday/v2 v2.0.1
|
||||||
github.com/saintfish/chardet v0.0.0-20120816061221-3af4cd4741ca // indirect
|
github.com/saintfish/chardet v0.0.0-20120816061221-3af4cd4741ca // indirect
|
||||||
github.com/satori/go.uuid v1.2.0
|
github.com/satori/go.uuid v1.2.0
|
||||||
github.com/sergi/go-diff v1.0.0
|
github.com/sergi/go-diff v1.0.0
|
||||||
github.com/shurcooL/httpfs v0.0.0-20190527155220-6a4d4a70508b // indirect
|
github.com/shurcooL/httpfs v0.0.0-20190527155220-6a4d4a70508b // indirect
|
||||||
github.com/shurcooL/sanitized_anchor_name v0.0.0-20160918041101-1dba4b3954bc // indirect
|
github.com/shurcooL/sanitized_anchor_name v1.0.0 // indirect
|
||||||
github.com/shurcooL/vfsgen v0.0.0-20181202132449-6a9ea43bcacd
|
github.com/shurcooL/vfsgen v0.0.0-20181202132449-6a9ea43bcacd
|
||||||
github.com/steveyen/gtreap v0.0.0-20150807155958-0abe01ef9be2 // indirect
|
github.com/steveyen/gtreap v0.0.0-20150807155958-0abe01ef9be2 // indirect
|
||||||
github.com/stretchr/testify v1.4.0
|
github.com/stretchr/testify v1.4.0
|
||||||
@ -100,7 +101,7 @@ require (
|
|||||||
github.com/willf/bitset v0.0.0-20180426185212-8ce1146b8621 // indirect
|
github.com/willf/bitset v0.0.0-20180426185212-8ce1146b8621 // indirect
|
||||||
github.com/yohcop/openid-go v0.0.0-20160914080427-2c050d2dae53
|
github.com/yohcop/openid-go v0.0.0-20160914080427-2c050d2dae53
|
||||||
golang.org/x/crypto v0.0.0-20190927123631-a832865fa7ad
|
golang.org/x/crypto v0.0.0-20190927123631-a832865fa7ad
|
||||||
golang.org/x/net v0.0.0-20190909003024-a7b16738d86b
|
golang.org/x/net v0.0.0-20191028085509-fe3aa8a45271
|
||||||
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45
|
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45
|
||||||
golang.org/x/sys v0.0.0-20190910064555-bbd175535a8b
|
golang.org/x/sys v0.0.0-20190910064555-bbd175535a8b
|
||||||
golang.org/x/text v0.3.2
|
golang.org/x/text v0.3.2
|
||||||
|
16
go.sum
16
go.sum
@ -86,8 +86,6 @@ github.com/boombuler/barcode v0.0.0-20161226211916-fe0f26ff6d26/go.mod h1:paBWMc
|
|||||||
github.com/bradfitz/gomemcache v0.0.0-20190329173943-551aad21a668 h1:U/lr3Dgy4WK+hNk4tyD+nuGjpVLPEHuJSFXMw11/HPA=
|
github.com/bradfitz/gomemcache v0.0.0-20190329173943-551aad21a668 h1:U/lr3Dgy4WK+hNk4tyD+nuGjpVLPEHuJSFXMw11/HPA=
|
||||||
github.com/bradfitz/gomemcache v0.0.0-20190329173943-551aad21a668/go.mod h1:H0wQNHz2YrLsuXOZozoeDmnHXkNCRmMW0gwFWDfEZDA=
|
github.com/bradfitz/gomemcache v0.0.0-20190329173943-551aad21a668/go.mod h1:H0wQNHz2YrLsuXOZozoeDmnHXkNCRmMW0gwFWDfEZDA=
|
||||||
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
|
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
|
||||||
github.com/chaseadamsio/goorgeous v0.0.0-20170901132237-098da33fde5f h1:REH9VH5ubNR0skLaOxK7TRJeRbE2dDfvaouQo8FsRcA=
|
|
||||||
github.com/chaseadamsio/goorgeous v0.0.0-20170901132237-098da33fde5f/go.mod h1:6QaC0vFoKWYDth94dHFNgRT2YkT5FHdQp/Yx15aAAi0=
|
|
||||||
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
|
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
|
||||||
github.com/corbym/gocrest v1.0.3 h1:gwEdq6RkTmq+09CTuM29DfKOCtZ7G7bcyxs3IZ6EVdU=
|
github.com/corbym/gocrest v1.0.3 h1:gwEdq6RkTmq+09CTuM29DfKOCtZ7G7bcyxs3IZ6EVdU=
|
||||||
github.com/corbym/gocrest v1.0.3/go.mod h1:maVFL5lbdS2PgfOQgGRWDYTeunSWQeiEgoNdTABShCs=
|
github.com/corbym/gocrest v1.0.3/go.mod h1:maVFL5lbdS2PgfOQgGRWDYTeunSWQeiEgoNdTABShCs=
|
||||||
@ -425,6 +423,10 @@ github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRW
|
|||||||
github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
|
github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
|
||||||
github.com/nfnt/resize v0.0.0-20160724205520-891127d8d1b5 h1:BvoENQQU+fZ9uukda/RzCAL/191HHwJA5b13R6diVlY=
|
github.com/nfnt/resize v0.0.0-20160724205520-891127d8d1b5 h1:BvoENQQU+fZ9uukda/RzCAL/191HHwJA5b13R6diVlY=
|
||||||
github.com/nfnt/resize v0.0.0-20160724205520-891127d8d1b5/go.mod h1:jpp1/29i3P1S/RLdc7JQKbRpFeM1dOBd8T9ki5s+AY8=
|
github.com/nfnt/resize v0.0.0-20160724205520-891127d8d1b5/go.mod h1:jpp1/29i3P1S/RLdc7JQKbRpFeM1dOBd8T9ki5s+AY8=
|
||||||
|
github.com/niklasfasching/go-org v0.1.6 h1:F521WcqRNl8OJumlgAnekZgERaTA2HpfOYYfVEKOeI8=
|
||||||
|
github.com/niklasfasching/go-org v0.1.6/go.mod h1:AsLD6X7djzRIz4/RFZu8vwRL0VGjUvGZCCH1Nz0VdrU=
|
||||||
|
github.com/niklasfasching/go-org v0.1.7 h1:t3V+3XnS/7BhKv/7SlMUa8FvAiq577/a1T3D7mLIRXE=
|
||||||
|
github.com/niklasfasching/go-org v0.1.7/go.mod h1:AsLD6X7djzRIz4/RFZu8vwRL0VGjUvGZCCH1Nz0VdrU=
|
||||||
github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U=
|
github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U=
|
||||||
github.com/oliamb/cutter v0.2.2 h1:Lfwkya0HHNU1YLnGv2hTkzHfasrSMkgv4Dn+5rmlk3k=
|
github.com/oliamb/cutter v0.2.2 h1:Lfwkya0HHNU1YLnGv2hTkzHfasrSMkgv4Dn+5rmlk3k=
|
||||||
github.com/oliamb/cutter v0.2.2/go.mod h1:4BenG2/4GuRBDbVm/OPahDVqbrOemzpPiG5mi1iryBU=
|
github.com/oliamb/cutter v0.2.2/go.mod h1:4BenG2/4GuRBDbVm/OPahDVqbrOemzpPiG5mi1iryBU=
|
||||||
@ -487,8 +489,10 @@ github.com/remyoudompheng/bigfft v0.0.0-20190321074620-2f0d2b0e0001/go.mod h1:qq
|
|||||||
github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg=
|
github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg=
|
||||||
github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ=
|
github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ=
|
||||||
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
|
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
|
||||||
github.com/russross/blackfriday v0.0.0-20180428102519-11635eb403ff h1:g9ZlAHmkc/h5So+OjNCkZWh+FjuKEOOOoyRkqlGA8+c=
|
github.com/russross/blackfriday v2.0.0+incompatible h1:cBXrhZNUf9C+La9/YpS+UHpUT8YD6Td9ZMSU9APFcsk=
|
||||||
github.com/russross/blackfriday v0.0.0-20180428102519-11635eb403ff/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g=
|
github.com/russross/blackfriday v2.0.0+incompatible/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g=
|
||||||
|
github.com/russross/blackfriday/v2 v2.0.1 h1:lPqVAte+HuHNfhJ/0LC98ESWRz8afy9tM/0RK8m9o+Q=
|
||||||
|
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||||
github.com/saintfish/chardet v0.0.0-20120816061221-3af4cd4741ca h1:NugYot0LIVPxTvN8n+Kvkn6TrbMyxQiuvKdEwFdR9vI=
|
github.com/saintfish/chardet v0.0.0-20120816061221-3af4cd4741ca h1:NugYot0LIVPxTvN8n+Kvkn6TrbMyxQiuvKdEwFdR9vI=
|
||||||
github.com/saintfish/chardet v0.0.0-20120816061221-3af4cd4741ca/go.mod h1:uugorj2VCxiV1x+LzaIdVa9b4S4qGAcH6cbhh4qVxOU=
|
github.com/saintfish/chardet v0.0.0-20120816061221-3af4cd4741ca/go.mod h1:uugorj2VCxiV1x+LzaIdVa9b4S4qGAcH6cbhh4qVxOU=
|
||||||
github.com/satori/go.uuid v1.2.0 h1:0uYX9dsZ2yD7q2RtLRtPSdGDWzjeM3TbMJP9utgA0ww=
|
github.com/satori/go.uuid v1.2.0 h1:0uYX9dsZ2yD7q2RtLRtPSdGDWzjeM3TbMJP9utgA0ww=
|
||||||
@ -499,6 +503,8 @@ github.com/shurcooL/httpfs v0.0.0-20190527155220-6a4d4a70508b h1:4kg1wyftSKxLtnP
|
|||||||
github.com/shurcooL/httpfs v0.0.0-20190527155220-6a4d4a70508b/go.mod h1:ZY1cvUeJuFPAdZ/B6v7RHavJWZn2YPVFQ1OSXhCGOkg=
|
github.com/shurcooL/httpfs v0.0.0-20190527155220-6a4d4a70508b/go.mod h1:ZY1cvUeJuFPAdZ/B6v7RHavJWZn2YPVFQ1OSXhCGOkg=
|
||||||
github.com/shurcooL/sanitized_anchor_name v0.0.0-20160918041101-1dba4b3954bc h1:3wIrJvFb3Pf6B/2mDBnN1G5IfUVev4X5apadQlWOczE=
|
github.com/shurcooL/sanitized_anchor_name v0.0.0-20160918041101-1dba4b3954bc h1:3wIrJvFb3Pf6B/2mDBnN1G5IfUVev4X5apadQlWOczE=
|
||||||
github.com/shurcooL/sanitized_anchor_name v0.0.0-20160918041101-1dba4b3954bc/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
|
github.com/shurcooL/sanitized_anchor_name v0.0.0-20160918041101-1dba4b3954bc/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
|
||||||
|
github.com/shurcooL/sanitized_anchor_name v1.0.0 h1:PdmoCO6wvbs+7yrJyMORt4/BmY5IYyJwS/kOiWx8mHo=
|
||||||
|
github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
|
||||||
github.com/shurcooL/vfsgen v0.0.0-20181202132449-6a9ea43bcacd h1:ug7PpSOB5RBPK1Kg6qskGBoP3Vnj/aNYFTznWvlkGo0=
|
github.com/shurcooL/vfsgen v0.0.0-20181202132449-6a9ea43bcacd h1:ug7PpSOB5RBPK1Kg6qskGBoP3Vnj/aNYFTznWvlkGo0=
|
||||||
github.com/shurcooL/vfsgen v0.0.0-20181202132449-6a9ea43bcacd/go.mod h1:TrYk7fJVaAttu97ZZKrO9UbRa8izdowaMIZcxYMbVaw=
|
github.com/shurcooL/vfsgen v0.0.0-20181202132449-6a9ea43bcacd/go.mod h1:TrYk7fJVaAttu97ZZKrO9UbRa8izdowaMIZcxYMbVaw=
|
||||||
github.com/siddontang/go v0.0.0-20180604090527-bdc77568d726/go.mod h1:3yhqj7WBBfRhbBlzyOC3gUxftwsU0u8gqevxwIHQpMw=
|
github.com/siddontang/go v0.0.0-20180604090527-bdc77568d726/go.mod h1:3yhqj7WBBfRhbBlzyOC3gUxftwsU0u8gqevxwIHQpMw=
|
||||||
@ -650,6 +656,8 @@ golang.org/x/net v0.0.0-20190827160401-ba9fcec4b297 h1:k7pJ2yAPLPgbskkFdhRCsA77k
|
|||||||
golang.org/x/net v0.0.0-20190827160401-ba9fcec4b297/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
golang.org/x/net v0.0.0-20190827160401-ba9fcec4b297/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||||
golang.org/x/net v0.0.0-20190909003024-a7b16738d86b h1:XfVGCX+0T4WOStkaOsJRllbsiImhB2jgVBGc9L0lPGc=
|
golang.org/x/net v0.0.0-20190909003024-a7b16738d86b h1:XfVGCX+0T4WOStkaOsJRllbsiImhB2jgVBGc9L0lPGc=
|
||||||
golang.org/x/net v0.0.0-20190909003024-a7b16738d86b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
golang.org/x/net v0.0.0-20190909003024-a7b16738d86b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||||
|
golang.org/x/net v0.0.0-20191028085509-fe3aa8a45271 h1:N66aaryRB3Ax92gH0v3hp1QYZ3zWWCCUR/j8Ifh45Ss=
|
||||||
|
golang.org/x/net v0.0.0-20191028085509-fe3aa8a45271/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||||
golang.org/x/oauth2 v0.0.0-20180620175406-ef147856a6dd/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
golang.org/x/oauth2 v0.0.0-20180620175406-ef147856a6dd/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||||
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421 h1:Wo7BWFiOk0QRFMLYMqJGFMd9CgUAcGx7V+qEg/h5IBI=
|
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421 h1:Wo7BWFiOk0QRFMLYMqJGFMd9CgUAcGx7V+qEg/h5IBI=
|
||||||
|
@ -323,6 +323,6 @@ func TestRender_ShortLinks(t *testing.T) {
|
|||||||
`<p><a href="`+notencodedImgurlWiki+`" rel="nofollow"><img src="`+notencodedImgurlWiki+`"/></a></p>`)
|
`<p><a href="`+notencodedImgurlWiki+`" rel="nofollow"><img src="`+notencodedImgurlWiki+`"/></a></p>`)
|
||||||
test(
|
test(
|
||||||
"<p><a href=\"https://example.org\">[[foobar]]</a></p>",
|
"<p><a href=\"https://example.org\">[[foobar]]</a></p>",
|
||||||
`<p><a href="https://example.org" rel="nofollow">[[foobar]]</a></p>`,
|
`<p></p><p><a href="https://example.org" rel="nofollow">[[foobar]]</a></p><p></p>`,
|
||||||
`<p><a href="https://example.org" rel="nofollow">[[foobar]]</a></p>`)
|
`<p></p><p><a href="https://example.org" rel="nofollow">[[foobar]]</a></p><p></p>`)
|
||||||
}
|
}
|
||||||
|
@ -7,13 +7,14 @@ package markdown
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
|
"io"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"code.gitea.io/gitea/modules/markup"
|
"code.gitea.io/gitea/modules/markup"
|
||||||
"code.gitea.io/gitea/modules/setting"
|
"code.gitea.io/gitea/modules/setting"
|
||||||
"code.gitea.io/gitea/modules/util"
|
"code.gitea.io/gitea/modules/util"
|
||||||
|
|
||||||
"github.com/russross/blackfriday"
|
"github.com/russross/blackfriday/v2"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Renderer is a extended version of underlying render object.
|
// Renderer is a extended version of underlying render object.
|
||||||
@ -25,134 +26,138 @@ type Renderer struct {
|
|||||||
|
|
||||||
var byteMailto = []byte("mailto:")
|
var byteMailto = []byte("mailto:")
|
||||||
|
|
||||||
// Link defines how formal links should be processed to produce corresponding HTML elements.
|
var htmlEscaper = [256][]byte{
|
||||||
func (r *Renderer) Link(out *bytes.Buffer, link []byte, title []byte, content []byte) {
|
'&': []byte("&"),
|
||||||
// special case: this is not a link, a hash link or a mailto:, so it's a
|
'<': []byte("<"),
|
||||||
// relative URL
|
'>': []byte(">"),
|
||||||
if len(link) > 0 && !markup.IsLink(link) &&
|
'"': []byte("""),
|
||||||
link[0] != '#' && !bytes.HasPrefix(link, byteMailto) {
|
}
|
||||||
lnk := string(link)
|
|
||||||
|
func escapeHTML(w io.Writer, s []byte) {
|
||||||
|
var start, end int
|
||||||
|
for end < len(s) {
|
||||||
|
escSeq := htmlEscaper[s[end]]
|
||||||
|
if escSeq != nil {
|
||||||
|
_, _ = w.Write(s[start:end])
|
||||||
|
_, _ = w.Write(escSeq)
|
||||||
|
start = end + 1
|
||||||
|
}
|
||||||
|
end++
|
||||||
|
}
|
||||||
|
if start < len(s) && end <= len(s) {
|
||||||
|
_, _ = w.Write(s[start:end])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// RenderNode is a default renderer of a single node of a syntax tree. For
|
||||||
|
// block nodes it will be called twice: first time with entering=true, second
|
||||||
|
// time with entering=false, so that it could know when it's working on an open
|
||||||
|
// tag and when on close. It writes the result to w.
|
||||||
|
//
|
||||||
|
// The return value is a way to tell the calling walker to adjust its walk
|
||||||
|
// pattern: e.g. it can terminate the traversal by returning Terminate. Or it
|
||||||
|
// can ask the walker to skip a subtree of this node by returning SkipChildren.
|
||||||
|
// The typical behavior is to return GoToNext, which asks for the usual
|
||||||
|
// traversal to the next node.
|
||||||
|
func (r *Renderer) RenderNode(w io.Writer, node *blackfriday.Node, entering bool) blackfriday.WalkStatus {
|
||||||
|
switch node.Type {
|
||||||
|
case blackfriday.Image:
|
||||||
|
prefix := r.URLPrefix
|
||||||
if r.IsWiki {
|
if r.IsWiki {
|
||||||
lnk = util.URLJoin("wiki", lnk)
|
prefix = util.URLJoin(prefix, "wiki", "raw")
|
||||||
}
|
}
|
||||||
mLink := util.URLJoin(r.URLPrefix, lnk)
|
prefix = strings.Replace(prefix, "/src/", "/media/", 1)
|
||||||
link = []byte(mLink)
|
link := node.LinkData.Destination
|
||||||
}
|
if len(link) > 0 && !markup.IsLink(link) {
|
||||||
|
lnk := string(link)
|
||||||
if len(content) > 10 && string(content[0:9]) == "<a href=\"" && bytes.Contains(content[9:], []byte("<img")) {
|
lnk = util.URLJoin(prefix, lnk)
|
||||||
// Image with link case: markdown `[![]()]()`
|
lnk = strings.Replace(lnk, " ", "+", -1)
|
||||||
// If the content is an image, then we change the original href around it
|
link = []byte(lnk)
|
||||||
// which points to itself to a new address "link"
|
|
||||||
rightQuote := bytes.Index(content[9:], []byte("\""))
|
|
||||||
content = bytes.Replace(content, content[9:9+rightQuote], link, 1)
|
|
||||||
out.Write(content)
|
|
||||||
} else {
|
|
||||||
r.Renderer.Link(out, link, title, content)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// List renders markdown bullet or digit lists to HTML
|
|
||||||
func (r *Renderer) List(out *bytes.Buffer, text func() bool, flags int) {
|
|
||||||
marker := out.Len()
|
|
||||||
if out.Len() > 0 {
|
|
||||||
out.WriteByte('\n')
|
|
||||||
}
|
|
||||||
|
|
||||||
if flags&blackfriday.LIST_TYPE_DEFINITION != 0 {
|
|
||||||
out.WriteString("<dl>")
|
|
||||||
} else if flags&blackfriday.LIST_TYPE_ORDERED != 0 {
|
|
||||||
out.WriteString("<ol class='ui list'>")
|
|
||||||
} else {
|
|
||||||
out.WriteString("<ul class='ui list'>")
|
|
||||||
}
|
|
||||||
if !text() {
|
|
||||||
out.Truncate(marker)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if flags&blackfriday.LIST_TYPE_DEFINITION != 0 {
|
|
||||||
out.WriteString("</dl>\n")
|
|
||||||
} else if flags&blackfriday.LIST_TYPE_ORDERED != 0 {
|
|
||||||
out.WriteString("</ol>\n")
|
|
||||||
} else {
|
|
||||||
out.WriteString("</ul>\n")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// ListItem defines how list items should be processed to produce corresponding HTML elements.
|
|
||||||
func (r *Renderer) ListItem(out *bytes.Buffer, text []byte, flags int) {
|
|
||||||
// Detect procedures to draw checkboxes.
|
|
||||||
prefix := ""
|
|
||||||
if bytes.HasPrefix(text, []byte("<p>")) {
|
|
||||||
prefix = "<p>"
|
|
||||||
}
|
|
||||||
switch {
|
|
||||||
case bytes.HasPrefix(text, []byte(prefix+"[ ] ")):
|
|
||||||
text = append([]byte(`<span class="ui fitted disabled checkbox"><input type="checkbox" disabled="disabled" /><label /></span>`), text[3+len(prefix):]...)
|
|
||||||
if prefix != "" {
|
|
||||||
text = bytes.Replace(text, []byte(prefix), []byte{}, 1)
|
|
||||||
}
|
}
|
||||||
case bytes.HasPrefix(text, []byte(prefix+"[x] ")):
|
node.LinkData.Destination = link
|
||||||
text = append([]byte(`<span class="ui checked fitted disabled checkbox"><input type="checkbox" checked="" disabled="disabled" /><label /></span>`), text[3+len(prefix):]...)
|
// Render link around image only if parent is not link already
|
||||||
if prefix != "" {
|
if node.Parent != nil && node.Parent.Type != blackfriday.Link {
|
||||||
text = bytes.Replace(text, []byte(prefix), []byte{}, 1)
|
if entering {
|
||||||
|
_, _ = w.Write([]byte(`<a href="`))
|
||||||
|
escapeHTML(w, link)
|
||||||
|
_, _ = w.Write([]byte(`">`))
|
||||||
|
return r.Renderer.RenderNode(w, node, entering)
|
||||||
|
}
|
||||||
|
s := r.Renderer.RenderNode(w, node, entering)
|
||||||
|
_, _ = w.Write([]byte(`</a>`))
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
return r.Renderer.RenderNode(w, node, entering)
|
||||||
|
case blackfriday.Link:
|
||||||
|
// special case: this is not a link, a hash link or a mailto:, so it's a
|
||||||
|
// relative URL
|
||||||
|
link := node.LinkData.Destination
|
||||||
|
if len(link) > 0 && !markup.IsLink(link) &&
|
||||||
|
link[0] != '#' && !bytes.HasPrefix(link, byteMailto) &&
|
||||||
|
node.LinkData.Footnote == nil {
|
||||||
|
lnk := string(link)
|
||||||
|
if r.IsWiki {
|
||||||
|
lnk = util.URLJoin("wiki", lnk)
|
||||||
|
}
|
||||||
|
link = []byte(util.URLJoin(r.URLPrefix, lnk))
|
||||||
|
}
|
||||||
|
node.LinkData.Destination = link
|
||||||
|
return r.Renderer.RenderNode(w, node, entering)
|
||||||
|
case blackfriday.Text:
|
||||||
|
isListItem := false
|
||||||
|
for n := node.Parent; n != nil; n = n.Parent {
|
||||||
|
if n.Type == blackfriday.Item {
|
||||||
|
isListItem = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if isListItem {
|
||||||
|
text := node.Literal
|
||||||
|
switch {
|
||||||
|
case bytes.HasPrefix(text, []byte("[ ] ")):
|
||||||
|
_, _ = w.Write([]byte(`<span class="ui fitted disabled checkbox"><input type="checkbox" disabled="disabled" /><label /></span>`))
|
||||||
|
text = text[3:]
|
||||||
|
case bytes.HasPrefix(text, []byte("[x] ")):
|
||||||
|
_, _ = w.Write([]byte(`<span class="ui checked fitted disabled checkbox"><input type="checkbox" checked="" disabled="disabled" /><label /></span>`))
|
||||||
|
text = text[3:]
|
||||||
|
}
|
||||||
|
node.Literal = text
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
r.Renderer.ListItem(out, text, flags)
|
return r.Renderer.RenderNode(w, node, entering)
|
||||||
}
|
|
||||||
|
|
||||||
// Image defines how images should be processed to produce corresponding HTML elements.
|
|
||||||
func (r *Renderer) Image(out *bytes.Buffer, link []byte, title []byte, alt []byte) {
|
|
||||||
prefix := r.URLPrefix
|
|
||||||
if r.IsWiki {
|
|
||||||
prefix = util.URLJoin(prefix, "wiki", "raw")
|
|
||||||
}
|
|
||||||
prefix = strings.Replace(prefix, "/src/", "/media/", 1)
|
|
||||||
if len(link) > 0 && !markup.IsLink(link) {
|
|
||||||
lnk := string(link)
|
|
||||||
lnk = util.URLJoin(prefix, lnk)
|
|
||||||
lnk = strings.Replace(lnk, " ", "+", -1)
|
|
||||||
link = []byte(lnk)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Put a link around it pointing to itself by default
|
|
||||||
out.WriteString(`<a href="`)
|
|
||||||
out.Write(link)
|
|
||||||
out.WriteString(`">`)
|
|
||||||
r.Renderer.Image(out, link, title, alt)
|
|
||||||
out.WriteString("</a>")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const (
|
const (
|
||||||
blackfridayExtensions = 0 |
|
blackfridayExtensions = 0 |
|
||||||
blackfriday.EXTENSION_NO_INTRA_EMPHASIS |
|
blackfriday.NoIntraEmphasis |
|
||||||
blackfriday.EXTENSION_TABLES |
|
blackfriday.Tables |
|
||||||
blackfriday.EXTENSION_FENCED_CODE |
|
blackfriday.FencedCode |
|
||||||
blackfriday.EXTENSION_STRIKETHROUGH |
|
blackfriday.Strikethrough |
|
||||||
blackfriday.EXTENSION_NO_EMPTY_LINE_BEFORE_BLOCK |
|
blackfriday.NoEmptyLineBeforeBlock |
|
||||||
blackfriday.EXTENSION_DEFINITION_LISTS |
|
blackfriday.DefinitionLists |
|
||||||
blackfriday.EXTENSION_FOOTNOTES |
|
blackfriday.Footnotes |
|
||||||
blackfriday.EXTENSION_HEADER_IDS |
|
blackfriday.HeadingIDs |
|
||||||
blackfriday.EXTENSION_AUTO_HEADER_IDS
|
blackfriday.AutoHeadingIDs
|
||||||
blackfridayHTMLFlags = 0 |
|
blackfridayHTMLFlags = 0 |
|
||||||
blackfriday.HTML_SKIP_STYLE |
|
blackfriday.Smartypants
|
||||||
blackfriday.HTML_OMIT_CONTENTS |
|
|
||||||
blackfriday.HTML_USE_SMARTYPANTS
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// RenderRaw renders Markdown to HTML without handling special links.
|
// RenderRaw renders Markdown to HTML without handling special links.
|
||||||
func RenderRaw(body []byte, urlPrefix string, wikiMarkdown bool) []byte {
|
func RenderRaw(body []byte, urlPrefix string, wikiMarkdown bool) []byte {
|
||||||
renderer := &Renderer{
|
renderer := &Renderer{
|
||||||
Renderer: blackfriday.HtmlRenderer(blackfridayHTMLFlags, "", ""),
|
Renderer: blackfriday.NewHTMLRenderer(blackfriday.HTMLRendererParameters{
|
||||||
|
Flags: blackfridayHTMLFlags,
|
||||||
|
}),
|
||||||
URLPrefix: urlPrefix,
|
URLPrefix: urlPrefix,
|
||||||
IsWiki: wikiMarkdown,
|
IsWiki: wikiMarkdown,
|
||||||
}
|
}
|
||||||
|
|
||||||
exts := blackfridayExtensions
|
exts := blackfridayExtensions
|
||||||
if setting.Markdown.EnableHardLineBreak {
|
if setting.Markdown.EnableHardLineBreak {
|
||||||
exts |= blackfriday.EXTENSION_HARD_LINE_BREAK
|
exts |= blackfriday.HardLineBreak
|
||||||
}
|
}
|
||||||
|
|
||||||
body = blackfriday.Markdown(body, renderer, exts)
|
body = blackfriday.Run(body, blackfriday.WithRenderer(renderer), blackfriday.WithExtensions(exts))
|
||||||
return markup.SanitizeBytes(body)
|
return markup.SanitizeBytes(body)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -166,13 +166,13 @@ func testAnswers(baseURLContent, baseURLImages string) []string {
|
|||||||
<h3 id="footnotes">Footnotes</h3>
|
<h3 id="footnotes">Footnotes</h3>
|
||||||
|
|
||||||
<p>Here is a simple footnote,<sup id="fnref:1"><a href="#fn:1" rel="nofollow">1</a></sup> and here is a longer one.<sup id="fnref:bignote"><a href="#fn:bignote" rel="nofollow">2</a></sup></p>
|
<p>Here is a simple footnote,<sup id="fnref:1"><a href="#fn:1" rel="nofollow">1</a></sup> and here is a longer one.<sup id="fnref:bignote"><a href="#fn:bignote" rel="nofollow">2</a></sup></p>
|
||||||
|
|
||||||
<div>
|
<div>
|
||||||
|
|
||||||
<hr/>
|
<hr/>
|
||||||
|
|
||||||
<ol>
|
<ol>
|
||||||
<li id="fn:1">This is the first footnote.
|
<li id="fn:1">This is the first footnote.</li>
|
||||||
</li>
|
|
||||||
|
|
||||||
<li id="fn:bignote"><p>Here is one with multiple paragraphs and code.</p>
|
<li id="fn:bignote"><p>Here is one with multiple paragraphs and code.</p>
|
||||||
|
|
||||||
@ -180,9 +180,9 @@ func testAnswers(baseURLContent, baseURLImages string) []string {
|
|||||||
|
|
||||||
<p><code>{ my code }</code></p>
|
<p><code>{ my code }</code></p>
|
||||||
|
|
||||||
<p>Add as many paragraphs as you like.</p>
|
<p>Add as many paragraphs as you like.</p></li>
|
||||||
</li>
|
|
||||||
</ol>
|
</ol>
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
`,
|
`,
|
||||||
}
|
}
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -5,12 +5,16 @@
|
|||||||
package markup
|
package markup
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"bytes"
|
||||||
|
"fmt"
|
||||||
|
"html"
|
||||||
|
"strings"
|
||||||
|
|
||||||
"code.gitea.io/gitea/modules/log"
|
"code.gitea.io/gitea/modules/log"
|
||||||
"code.gitea.io/gitea/modules/markup"
|
"code.gitea.io/gitea/modules/markup"
|
||||||
"code.gitea.io/gitea/modules/markup/markdown"
|
"code.gitea.io/gitea/modules/util"
|
||||||
|
|
||||||
"github.com/chaseadamsio/goorgeous"
|
"github.com/niklasfasching/go-org/org"
|
||||||
"github.com/russross/blackfriday"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
@ -32,23 +36,23 @@ func (Parser) Extensions() []string {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Render renders orgmode rawbytes to HTML
|
// Render renders orgmode rawbytes to HTML
|
||||||
func Render(rawBytes []byte, urlPrefix string, metas map[string]string, isWiki bool) (result []byte) {
|
func Render(rawBytes []byte, urlPrefix string, metas map[string]string, isWiki bool) []byte {
|
||||||
defer func() {
|
htmlWriter := org.NewHTMLWriter()
|
||||||
if err := recover(); err != nil {
|
|
||||||
log.Error("Panic in orgmode.Render: %v Just returning the rawBytes", err)
|
renderer := &Renderer{
|
||||||
result = rawBytes
|
HTMLWriter: htmlWriter,
|
||||||
}
|
URLPrefix: urlPrefix,
|
||||||
}()
|
IsWiki: isWiki,
|
||||||
htmlFlags := blackfriday.HTML_USE_XHTML
|
|
||||||
htmlFlags |= blackfriday.HTML_SKIP_STYLE
|
|
||||||
htmlFlags |= blackfriday.HTML_OMIT_CONTENTS
|
|
||||||
renderer := &markdown.Renderer{
|
|
||||||
Renderer: blackfriday.HtmlRenderer(htmlFlags, "", ""),
|
|
||||||
URLPrefix: urlPrefix,
|
|
||||||
IsWiki: isWiki,
|
|
||||||
}
|
}
|
||||||
result = goorgeous.Org(rawBytes, renderer)
|
|
||||||
return
|
htmlWriter.ExtendingWriter = renderer
|
||||||
|
|
||||||
|
res, err := org.New().Silent().Parse(bytes.NewReader(rawBytes), "").Write(renderer)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Panic in orgmode.Render: %v Just returning the rawBytes", err)
|
||||||
|
return rawBytes
|
||||||
|
}
|
||||||
|
return []byte(res)
|
||||||
}
|
}
|
||||||
|
|
||||||
// RenderString reners orgmode string to HTML string
|
// RenderString reners orgmode string to HTML string
|
||||||
@ -56,7 +60,63 @@ func RenderString(rawContent string, urlPrefix string, metas map[string]string,
|
|||||||
return string(Render([]byte(rawContent), urlPrefix, metas, isWiki))
|
return string(Render([]byte(rawContent), urlPrefix, metas, isWiki))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Render implements markup.Parser
|
// Render reners orgmode string to HTML string
|
||||||
func (Parser) Render(rawBytes []byte, urlPrefix string, metas map[string]string, isWiki bool) []byte {
|
func (Parser) Render(rawBytes []byte, urlPrefix string, metas map[string]string, isWiki bool) []byte {
|
||||||
return Render(rawBytes, urlPrefix, metas, isWiki)
|
return Render(rawBytes, urlPrefix, metas, isWiki)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Renderer implements org.Writer
|
||||||
|
type Renderer struct {
|
||||||
|
*org.HTMLWriter
|
||||||
|
URLPrefix string
|
||||||
|
IsWiki bool
|
||||||
|
}
|
||||||
|
|
||||||
|
var byteMailto = []byte("mailto:")
|
||||||
|
|
||||||
|
// WriteRegularLink renders images, links or videos
|
||||||
|
func (r *Renderer) WriteRegularLink(l org.RegularLink) {
|
||||||
|
link := []byte(html.EscapeString(l.URL))
|
||||||
|
if l.Protocol == "file" {
|
||||||
|
link = link[len("file:"):]
|
||||||
|
}
|
||||||
|
if len(link) > 0 && !markup.IsLink(link) &&
|
||||||
|
link[0] != '#' && !bytes.HasPrefix(link, byteMailto) {
|
||||||
|
lnk := string(link)
|
||||||
|
if r.IsWiki {
|
||||||
|
lnk = util.URLJoin("wiki", lnk)
|
||||||
|
}
|
||||||
|
link = []byte(util.URLJoin(r.URLPrefix, lnk))
|
||||||
|
}
|
||||||
|
|
||||||
|
description := string(link)
|
||||||
|
if l.Description != nil {
|
||||||
|
description = r.nodesAsString(l.Description...)
|
||||||
|
}
|
||||||
|
switch l.Kind() {
|
||||||
|
case "image":
|
||||||
|
r.WriteString(fmt.Sprintf(`<img src="%s" alt="%s" title="%s" />`, link, description, description))
|
||||||
|
case "video":
|
||||||
|
r.WriteString(fmt.Sprintf(`<video src="%s" title="%s">%s</video>`, link, description, description))
|
||||||
|
default:
|
||||||
|
r.WriteString(fmt.Sprintf(`<a href="%s" title="%s">%s</a>`, link, description, description))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Renderer) emptyClone() *Renderer {
|
||||||
|
wcopy := *(r.HTMLWriter)
|
||||||
|
wcopy.Builder = strings.Builder{}
|
||||||
|
|
||||||
|
rcopy := *r
|
||||||
|
rcopy.HTMLWriter = &wcopy
|
||||||
|
|
||||||
|
wcopy.ExtendingWriter = &rcopy
|
||||||
|
|
||||||
|
return &rcopy
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Renderer) nodesAsString(nodes ...org.Node) string {
|
||||||
|
tmp := r.emptyClone()
|
||||||
|
org.WriteNodes(tmp, nodes...)
|
||||||
|
return tmp.String()
|
||||||
|
}
|
||||||
|
@ -27,12 +27,12 @@ func TestRender_StandardLinks(t *testing.T) {
|
|||||||
assert.Equal(t, strings.TrimSpace(expected), strings.TrimSpace(buffer))
|
assert.Equal(t, strings.TrimSpace(expected), strings.TrimSpace(buffer))
|
||||||
}
|
}
|
||||||
|
|
||||||
googleRendered := `<p><a href="https://google.com/" title="https://google.com/">https://google.com/</a></p>`
|
googleRendered := "<p>\n<a href=\"https://google.com/\" title=\"https://google.com/\">https://google.com/</a>\n</p>"
|
||||||
test("[[https://google.com/]]", googleRendered)
|
test("[[https://google.com/]]", googleRendered)
|
||||||
|
|
||||||
lnk := util.URLJoin(AppSubURL, "WikiPage")
|
lnk := util.URLJoin(AppSubURL, "WikiPage")
|
||||||
test("[[WikiPage][WikiPage]]",
|
test("[[WikiPage][WikiPage]]",
|
||||||
`<p><a href="`+lnk+`" title="WikiPage">WikiPage</a></p>`)
|
"<p>\n<a href=\""+lnk+"\" title=\"WikiPage\">WikiPage</a>\n</p>")
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestRender_Images(t *testing.T) {
|
func TestRender_Images(t *testing.T) {
|
||||||
@ -45,10 +45,8 @@ func TestRender_Images(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
url := "../../.images/src/02/train.jpg"
|
url := "../../.images/src/02/train.jpg"
|
||||||
title := "Train"
|
|
||||||
result := util.URLJoin(AppSubURL, url)
|
result := util.URLJoin(AppSubURL, url)
|
||||||
|
|
||||||
test(
|
test("[[file:"+url+"]]",
|
||||||
"[[file:"+url+"]["+title+"]]",
|
"<p>\n<img src=\""+result+"\" alt=\""+result+"\" title=\""+result+"\" />\n</p>")
|
||||||
`<p><a href="`+result+`"><img src="`+result+`" alt="`+title+`" title="`+title+`" /></a></p>`)
|
|
||||||
}
|
}
|
||||||
|
1
vendor/github.com/chaseadamsio/goorgeous/.gitignore
generated
vendored
1
vendor/github.com/chaseadamsio/goorgeous/.gitignore
generated
vendored
@ -1 +0,0 @@
|
|||||||
.DS_Store
|
|
12
vendor/github.com/chaseadamsio/goorgeous/.travis.yml
generated
vendored
12
vendor/github.com/chaseadamsio/goorgeous/.travis.yml
generated
vendored
@ -1,12 +0,0 @@
|
|||||||
language: go
|
|
||||||
|
|
||||||
go:
|
|
||||||
- 1.7
|
|
||||||
|
|
||||||
before_install:
|
|
||||||
- go get golang.org/x/tools/cmd/cover
|
|
||||||
- go get github.com/mattn/goveralls
|
|
||||||
|
|
||||||
script:
|
|
||||||
- go test -v -covermode=count -coverprofile=coverage.out
|
|
||||||
- $HOME/gopath/bin/goveralls -coverprofile=coverage.out -service=travis-ci
|
|
66
vendor/github.com/chaseadamsio/goorgeous/README.org
generated
vendored
66
vendor/github.com/chaseadamsio/goorgeous/README.org
generated
vendored
@ -1,66 +0,0 @@
|
|||||||
#+TITLE: chaseadamsio/goorgeous
|
|
||||||
|
|
||||||
[[https://travis-ci.org/chaseadamsio/goorgeous.svg?branch=master]]
|
|
||||||
[[https://coveralls.io/repos/github/chaseadamsio/goorgeous/badge.svg?branch=master]]
|
|
||||||
|
|
||||||
/goorgeous is a Go Org to HTML Parser./
|
|
||||||
|
|
||||||
[[file:gopher_small.gif]]
|
|
||||||
|
|
||||||
*Pronounced: Go? Org? Yes!*
|
|
||||||
|
|
||||||
#+BEGIN_QUOTE
|
|
||||||
"Org mode is for keeping notes, maintaining TODO lists, planning projects, and authoring documents with a fast and effective plain-text system."
|
|
||||||
|
|
||||||
- [[orgmode.org]]
|
|
||||||
#+END_QUOTE
|
|
||||||
|
|
||||||
The purpose of this package is to come as close as possible as parsing an =*.org= document into HTML, the same way one might publish [[http://orgmode.org/worg/org-tutorials/org-publish-html-tutorial.html][with org-publish-html from Emacs]].
|
|
||||||
|
|
||||||
* Installation
|
|
||||||
|
|
||||||
#+BEGIN_SRC sh
|
|
||||||
go get -u github.com/chaseadamsio/goorgeous
|
|
||||||
#+END_SRC
|
|
||||||
|
|
||||||
* Usage
|
|
||||||
|
|
||||||
** Org Headers
|
|
||||||
|
|
||||||
To retrieve the headers from a =[]byte=, call =OrgHeaders= and it will return a =map[string]interface{}=:
|
|
||||||
|
|
||||||
#+BEGIN_SRC go
|
|
||||||
input := "#+title: goorgeous\n* Some Headline\n"
|
|
||||||
out := goorgeous.OrgHeaders(input)
|
|
||||||
#+END_SRC
|
|
||||||
|
|
||||||
#+BEGIN_SRC go
|
|
||||||
map[string]interface{}{
|
|
||||||
"title": "goorgeous"
|
|
||||||
}
|
|
||||||
#+END_SRC
|
|
||||||
|
|
||||||
** Org Content
|
|
||||||
|
|
||||||
After importing =github.com/chaseadamsio/goorgeous=, you can call =Org= with a =[]byte= and it will return an =html= version of the content as a =[]byte=
|
|
||||||
|
|
||||||
#+BEGIN_SRC go
|
|
||||||
input := "#+TITLE: goorgeous\n* Some Headline\n"
|
|
||||||
out := goorgeous.Org(input)
|
|
||||||
#+END_SRC
|
|
||||||
|
|
||||||
=out= will be:
|
|
||||||
|
|
||||||
#+BEGIN_SRC html
|
|
||||||
<h1>Some Headline</h1>/n
|
|
||||||
#+END_SRC
|
|
||||||
|
|
||||||
* Why?
|
|
||||||
|
|
||||||
First off, I've become an unapologetic user of Emacs & ever since finding =org-mode= I use it for anything having to do with writing content, organizing my life and keeping documentation of my days/weeks/months.
|
|
||||||
|
|
||||||
Although I like Emacs & =emacs-lisp=, I publish all of my html sites with [[https://gohugo.io][Hugo Static Site Generator]] and wanted to be able to write my content in =org-mode= in Emacs rather than markdown.
|
|
||||||
|
|
||||||
Hugo's implementation of templating and speed are unmatched, so the only way I knew for sure I could continue to use Hugo and write in =org-mode= seamlessly was to write a golang parser for org content and submit a PR for Hugo to use it.
|
|
||||||
* Acknowledgements
|
|
||||||
I leaned heavily on russross' [[https://github.com/russross/blackfriday][blackfriday markdown renderer]] as both an example of how to write a parser (with some updates to leverage the go we know today) and reusing the blackfriday HTML Renderer so I didn't have to write my own!
|
|
803
vendor/github.com/chaseadamsio/goorgeous/goorgeous.go
generated
vendored
803
vendor/github.com/chaseadamsio/goorgeous/goorgeous.go
generated
vendored
File diff suppressed because it is too large
Load Diff
BIN
vendor/github.com/chaseadamsio/goorgeous/gopher.gif
generated
vendored
BIN
vendor/github.com/chaseadamsio/goorgeous/gopher.gif
generated
vendored
Binary file not shown.
Before Width: | Height: | Size: 15 KiB |
BIN
vendor/github.com/chaseadamsio/goorgeous/gopher_small.gif
generated
vendored
BIN
vendor/github.com/chaseadamsio/goorgeous/gopher_small.gif
generated
vendored
Binary file not shown.
Before Width: | Height: | Size: 3.2 KiB |
70
vendor/github.com/chaseadamsio/goorgeous/header.go
generated
vendored
70
vendor/github.com/chaseadamsio/goorgeous/header.go
generated
vendored
@ -1,70 +0,0 @@
|
|||||||
package goorgeous
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bufio"
|
|
||||||
"bytes"
|
|
||||||
"regexp"
|
|
||||||
"strings"
|
|
||||||
)
|
|
||||||
|
|
||||||
// ExtractOrgHeaders finds and returns all of the headers
|
|
||||||
// from a bufio.Reader and returns them as their own byte slice
|
|
||||||
func ExtractOrgHeaders(r *bufio.Reader) (fm []byte, err error) {
|
|
||||||
var out bytes.Buffer
|
|
||||||
endOfHeaders := true
|
|
||||||
for endOfHeaders {
|
|
||||||
p, err := r.Peek(2)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if !charMatches(p[0], '#') && !charMatches(p[1], '+') {
|
|
||||||
endOfHeaders = false
|
|
||||||
break
|
|
||||||
}
|
|
||||||
line, _, err := r.ReadLine()
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
out.Write(line)
|
|
||||||
out.WriteByte('\n')
|
|
||||||
}
|
|
||||||
return out.Bytes(), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
var reHeader = regexp.MustCompile(`^#\+(\w+?): (.*)`)
|
|
||||||
|
|
||||||
// OrgHeaders find all of the headers from a byte slice and returns
|
|
||||||
// them as a map of string interface
|
|
||||||
func OrgHeaders(input []byte) (map[string]interface{}, error) {
|
|
||||||
out := make(map[string]interface{})
|
|
||||||
scanner := bufio.NewScanner(bytes.NewReader(input))
|
|
||||||
|
|
||||||
for scanner.Scan() {
|
|
||||||
data := scanner.Bytes()
|
|
||||||
if !charMatches(data[0], '#') && !charMatches(data[1], '+') {
|
|
||||||
return out, nil
|
|
||||||
}
|
|
||||||
matches := reHeader.FindSubmatch(data)
|
|
||||||
|
|
||||||
if len(matches) < 3 {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
key := string(matches[1])
|
|
||||||
val := matches[2]
|
|
||||||
switch {
|
|
||||||
case strings.ToLower(key) == "tags" || strings.ToLower(key) == "categories" || strings.ToLower(key) == "aliases":
|
|
||||||
bTags := bytes.Split(val, []byte(" "))
|
|
||||||
tags := make([]string, len(bTags))
|
|
||||||
for idx, tag := range bTags {
|
|
||||||
tags[idx] = string(tag)
|
|
||||||
}
|
|
||||||
out[key] = tags
|
|
||||||
default:
|
|
||||||
out[key] = string(val)
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
return out, nil
|
|
||||||
|
|
||||||
}
|
|
@ -1,6 +1,6 @@
|
|||||||
MIT License
|
MIT License
|
||||||
|
|
||||||
Copyright (c) 2017 Chase Adams <realchaseadams@gmail.com>
|
Copyright (c) 2018 Niklas Fasching
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
84
vendor/github.com/niklasfasching/go-org/org/block.go
generated
vendored
Normal file
84
vendor/github.com/niklasfasching/go-org/org/block.go
generated
vendored
Normal file
@ -0,0 +1,84 @@
|
|||||||
|
package org
|
||||||
|
|
||||||
|
import (
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
"unicode"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Block struct {
|
||||||
|
Name string
|
||||||
|
Parameters []string
|
||||||
|
Children []Node
|
||||||
|
}
|
||||||
|
|
||||||
|
type Example struct {
|
||||||
|
Children []Node
|
||||||
|
}
|
||||||
|
|
||||||
|
var exampleLineRegexp = regexp.MustCompile(`^(\s*):(\s(.*)|\s*$)`)
|
||||||
|
var beginBlockRegexp = regexp.MustCompile(`(?i)^(\s*)#\+BEGIN_(\w+)(.*)`)
|
||||||
|
var endBlockRegexp = regexp.MustCompile(`(?i)^(\s*)#\+END_(\w+)`)
|
||||||
|
|
||||||
|
func lexBlock(line string) (token, bool) {
|
||||||
|
if m := beginBlockRegexp.FindStringSubmatch(line); m != nil {
|
||||||
|
return token{"beginBlock", len(m[1]), strings.ToUpper(m[2]), m}, true
|
||||||
|
} else if m := endBlockRegexp.FindStringSubmatch(line); m != nil {
|
||||||
|
return token{"endBlock", len(m[1]), strings.ToUpper(m[2]), m}, true
|
||||||
|
}
|
||||||
|
return nilToken, false
|
||||||
|
}
|
||||||
|
|
||||||
|
func lexExample(line string) (token, bool) {
|
||||||
|
if m := exampleLineRegexp.FindStringSubmatch(line); m != nil {
|
||||||
|
return token{"example", len(m[1]), m[3], m}, true
|
||||||
|
}
|
||||||
|
return nilToken, false
|
||||||
|
}
|
||||||
|
|
||||||
|
func isRawTextBlock(name string) bool { return name == "SRC" || name == "EXAMPLE" || name == "EXPORT" }
|
||||||
|
|
||||||
|
func (d *Document) parseBlock(i int, parentStop stopFn) (int, Node) {
|
||||||
|
t, start := d.tokens[i], i
|
||||||
|
name, parameters := t.content, strings.Fields(t.matches[3])
|
||||||
|
trim := trimIndentUpTo(d.tokens[i].lvl)
|
||||||
|
stop := func(d *Document, i int) bool {
|
||||||
|
return i >= len(d.tokens) || (d.tokens[i].kind == "endBlock" && d.tokens[i].content == name)
|
||||||
|
}
|
||||||
|
block, i := Block{name, parameters, nil}, i+1
|
||||||
|
if isRawTextBlock(name) {
|
||||||
|
rawText := ""
|
||||||
|
for ; !stop(d, i); i++ {
|
||||||
|
rawText += trim(d.tokens[i].matches[0]) + "\n"
|
||||||
|
}
|
||||||
|
block.Children = d.parseRawInline(rawText)
|
||||||
|
} else {
|
||||||
|
consumed, nodes := d.parseMany(i, stop)
|
||||||
|
block.Children = nodes
|
||||||
|
i += consumed
|
||||||
|
}
|
||||||
|
if i < len(d.tokens) && d.tokens[i].kind == "endBlock" && d.tokens[i].content == name {
|
||||||
|
return i + 1 - start, block
|
||||||
|
}
|
||||||
|
return 0, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Document) parseExample(i int, parentStop stopFn) (int, Node) {
|
||||||
|
example, start := Example{}, i
|
||||||
|
for ; !parentStop(d, i) && d.tokens[i].kind == "example"; i++ {
|
||||||
|
example.Children = append(example.Children, Text{d.tokens[i].content, true})
|
||||||
|
}
|
||||||
|
return i - start, example
|
||||||
|
}
|
||||||
|
|
||||||
|
func trimIndentUpTo(max int) func(string) string {
|
||||||
|
return func(line string) string {
|
||||||
|
i := 0
|
||||||
|
for ; i < len(line) && i < max && unicode.IsSpace(rune(line[i])); i++ {
|
||||||
|
}
|
||||||
|
return line[i:]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (n Example) String() string { return orgWriter.nodesAsString(n) }
|
||||||
|
func (n Block) String() string { return orgWriter.nodesAsString(n) }
|
260
vendor/github.com/niklasfasching/go-org/org/document.go
generated
vendored
Normal file
260
vendor/github.com/niklasfasching/go-org/org/document.go
generated
vendored
Normal file
@ -0,0 +1,260 @@
|
|||||||
|
// Package org is an Org mode syntax processor.
|
||||||
|
//
|
||||||
|
// It parses plain text into an AST and can export it as HTML or pretty printed Org mode syntax.
|
||||||
|
// Further export formats can be defined using the Writer interface.
|
||||||
|
//
|
||||||
|
// You probably want to start with something like this:
|
||||||
|
// input := strings.NewReader("Your Org mode input")
|
||||||
|
// html, err := org.New().Parse(input, "./").Write(org.NewHTMLWriter())
|
||||||
|
// if err != nil {
|
||||||
|
// log.Fatalf("Something went wrong: %s", err)
|
||||||
|
// }
|
||||||
|
// log.Print(html)
|
||||||
|
package org
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"io/ioutil"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Configuration struct {
|
||||||
|
MaxEmphasisNewLines int // Maximum number of newlines inside an emphasis. See org-emphasis-regexp-components newline.
|
||||||
|
AutoLink bool // Try to convert text passages that look like hyperlinks into hyperlinks.
|
||||||
|
DefaultSettings map[string]string // Default values for settings that are overriden by setting the same key in BufferSettings.
|
||||||
|
Log *log.Logger // Log is used to print warnings during parsing.
|
||||||
|
ReadFile func(filename string) ([]byte, error) // ReadFile is used to read e.g. #+INCLUDE files.
|
||||||
|
}
|
||||||
|
|
||||||
|
// Document contains the parsing results and a pointer to the Configuration.
|
||||||
|
type Document struct {
|
||||||
|
*Configuration
|
||||||
|
Path string // Path of the file containing the parse input - used to resolve relative paths during parsing (e.g. INCLUDE).
|
||||||
|
tokens []token
|
||||||
|
Nodes []Node
|
||||||
|
NamedNodes map[string]Node
|
||||||
|
Outline Outline // Outline is a Table Of Contents for the document and contains all sections (headline + content).
|
||||||
|
BufferSettings map[string]string // Settings contains all settings that were parsed from keywords.
|
||||||
|
Error error
|
||||||
|
}
|
||||||
|
|
||||||
|
// Node represents a parsed node of the document.
|
||||||
|
type Node interface {
|
||||||
|
String() string // String returns the pretty printed Org mode string for the node (see OrgWriter).
|
||||||
|
}
|
||||||
|
|
||||||
|
type lexFn = func(line string) (t token, ok bool)
|
||||||
|
type parseFn = func(*Document, int, stopFn) (int, Node)
|
||||||
|
type stopFn = func(*Document, int) bool
|
||||||
|
|
||||||
|
type token struct {
|
||||||
|
kind string
|
||||||
|
lvl int
|
||||||
|
content string
|
||||||
|
matches []string
|
||||||
|
}
|
||||||
|
|
||||||
|
var lexFns = []lexFn{
|
||||||
|
lexHeadline,
|
||||||
|
lexDrawer,
|
||||||
|
lexBlock,
|
||||||
|
lexList,
|
||||||
|
lexTable,
|
||||||
|
lexHorizontalRule,
|
||||||
|
lexKeywordOrComment,
|
||||||
|
lexFootnoteDefinition,
|
||||||
|
lexExample,
|
||||||
|
lexText,
|
||||||
|
}
|
||||||
|
|
||||||
|
var nilToken = token{"nil", -1, "", nil}
|
||||||
|
var orgWriter = NewOrgWriter()
|
||||||
|
|
||||||
|
// New returns a new Configuration with (hopefully) sane defaults.
|
||||||
|
func New() *Configuration {
|
||||||
|
return &Configuration{
|
||||||
|
AutoLink: true,
|
||||||
|
MaxEmphasisNewLines: 1,
|
||||||
|
DefaultSettings: map[string]string{
|
||||||
|
"TODO": "TODO | DONE",
|
||||||
|
"EXCLUDE_TAGS": "noexport",
|
||||||
|
"OPTIONS": "toc:t <:t e:t f:t pri:t todo:t tags:t",
|
||||||
|
},
|
||||||
|
Log: log.New(os.Stderr, "go-org: ", 0),
|
||||||
|
ReadFile: ioutil.ReadFile,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// String returns the pretty printed Org mode string for the given nodes (see OrgWriter).
|
||||||
|
func String(nodes []Node) string { return orgWriter.nodesAsString(nodes...) }
|
||||||
|
|
||||||
|
// Write is called after with an instance of the Writer interface to export a parsed Document into another format.
|
||||||
|
func (d *Document) Write(w Writer) (out string, err error) {
|
||||||
|
defer func() {
|
||||||
|
if recovered := recover(); recovered != nil {
|
||||||
|
err = fmt.Errorf("could not write output: %s", recovered)
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
if d.Error != nil {
|
||||||
|
return "", d.Error
|
||||||
|
} else if d.Nodes == nil {
|
||||||
|
return "", fmt.Errorf("could not write output: parse was not called")
|
||||||
|
}
|
||||||
|
w.Before(d)
|
||||||
|
WriteNodes(w, d.Nodes...)
|
||||||
|
w.After(d)
|
||||||
|
return w.String(), err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse parses the input into an AST (and some other helpful fields like Outline).
|
||||||
|
// To allow method chaining, errors are stored in document.Error rather than being returned.
|
||||||
|
func (c *Configuration) Parse(input io.Reader, path string) (d *Document) {
|
||||||
|
outlineSection := &Section{}
|
||||||
|
d = &Document{
|
||||||
|
Configuration: c,
|
||||||
|
Outline: Outline{outlineSection, outlineSection, 0},
|
||||||
|
BufferSettings: map[string]string{},
|
||||||
|
NamedNodes: map[string]Node{},
|
||||||
|
Path: path,
|
||||||
|
}
|
||||||
|
defer func() {
|
||||||
|
if recovered := recover(); recovered != nil {
|
||||||
|
d.Error = fmt.Errorf("could not parse input: %v", recovered)
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
if d.tokens != nil {
|
||||||
|
d.Error = fmt.Errorf("parse was called multiple times")
|
||||||
|
}
|
||||||
|
d.tokenize(input)
|
||||||
|
_, nodes := d.parseMany(0, func(d *Document, i int) bool { return i >= len(d.tokens) })
|
||||||
|
d.Nodes = nodes
|
||||||
|
return d
|
||||||
|
}
|
||||||
|
|
||||||
|
// Silent disables all logging of warnings during parsing.
|
||||||
|
func (c *Configuration) Silent() *Configuration {
|
||||||
|
c.Log = log.New(ioutil.Discard, "", 0)
|
||||||
|
return c
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Document) tokenize(input io.Reader) {
|
||||||
|
d.tokens = []token{}
|
||||||
|
scanner := bufio.NewScanner(input)
|
||||||
|
for scanner.Scan() {
|
||||||
|
d.tokens = append(d.tokens, tokenize(scanner.Text()))
|
||||||
|
}
|
||||||
|
if err := scanner.Err(); err != nil {
|
||||||
|
d.Error = fmt.Errorf("could not tokenize input: %s", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get returns the value for key in BufferSettings or DefaultSettings if key does not exist in the former
|
||||||
|
func (d *Document) Get(key string) string {
|
||||||
|
if v, ok := d.BufferSettings[key]; ok {
|
||||||
|
return v
|
||||||
|
}
|
||||||
|
if v, ok := d.DefaultSettings[key]; ok {
|
||||||
|
return v
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetOption returns the value associated to the export option key
|
||||||
|
// Currently supported options:
|
||||||
|
// - < (export timestamps)
|
||||||
|
// - e (export org entities)
|
||||||
|
// - f (export footnotes)
|
||||||
|
// - toc (export table of content)
|
||||||
|
// - todo (export headline todo status)
|
||||||
|
// - pri (export headline priority)
|
||||||
|
// - tags (export headline tags)
|
||||||
|
// see https://orgmode.org/manual/Export-settings.html for more information
|
||||||
|
func (d *Document) GetOption(key string) bool {
|
||||||
|
get := func(settings map[string]string) string {
|
||||||
|
for _, field := range strings.Fields(settings["OPTIONS"]) {
|
||||||
|
if strings.HasPrefix(field, key+":") {
|
||||||
|
return field[len(key)+1:]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
value := get(d.BufferSettings)
|
||||||
|
if value == "" {
|
||||||
|
value = get(d.DefaultSettings)
|
||||||
|
}
|
||||||
|
switch value {
|
||||||
|
case "t":
|
||||||
|
return true
|
||||||
|
case "nil":
|
||||||
|
return false
|
||||||
|
default:
|
||||||
|
d.Log.Printf("Bad value for export option %s (%s)", key, value)
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Document) parseOne(i int, stop stopFn) (consumed int, node Node) {
|
||||||
|
switch d.tokens[i].kind {
|
||||||
|
case "unorderedList", "orderedList":
|
||||||
|
consumed, node = d.parseList(i, stop)
|
||||||
|
case "tableRow", "tableSeparator":
|
||||||
|
consumed, node = d.parseTable(i, stop)
|
||||||
|
case "beginBlock":
|
||||||
|
consumed, node = d.parseBlock(i, stop)
|
||||||
|
case "beginDrawer":
|
||||||
|
consumed, node = d.parseDrawer(i, stop)
|
||||||
|
case "text":
|
||||||
|
consumed, node = d.parseParagraph(i, stop)
|
||||||
|
case "example":
|
||||||
|
consumed, node = d.parseExample(i, stop)
|
||||||
|
case "horizontalRule":
|
||||||
|
consumed, node = d.parseHorizontalRule(i, stop)
|
||||||
|
case "comment":
|
||||||
|
consumed, node = d.parseComment(i, stop)
|
||||||
|
case "keyword":
|
||||||
|
consumed, node = d.parseKeyword(i, stop)
|
||||||
|
case "headline":
|
||||||
|
consumed, node = d.parseHeadline(i, stop)
|
||||||
|
case "footnoteDefinition":
|
||||||
|
consumed, node = d.parseFootnoteDefinition(i, stop)
|
||||||
|
}
|
||||||
|
|
||||||
|
if consumed != 0 {
|
||||||
|
return consumed, node
|
||||||
|
}
|
||||||
|
d.Log.Printf("Could not parse token %#v: Falling back to treating it as plain text.", d.tokens[i])
|
||||||
|
m := plainTextRegexp.FindStringSubmatch(d.tokens[i].matches[0])
|
||||||
|
d.tokens[i] = token{"text", len(m[1]), m[2], m}
|
||||||
|
return d.parseOne(i, stop)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Document) parseMany(i int, stop stopFn) (int, []Node) {
|
||||||
|
start, nodes := i, []Node{}
|
||||||
|
for i < len(d.tokens) && !stop(d, i) {
|
||||||
|
consumed, node := d.parseOne(i, stop)
|
||||||
|
i += consumed
|
||||||
|
nodes = append(nodes, node)
|
||||||
|
}
|
||||||
|
return i - start, nodes
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Document) addHeadline(headline *Headline) int {
|
||||||
|
current := &Section{Headline: headline}
|
||||||
|
d.Outline.last.add(current)
|
||||||
|
d.Outline.count++
|
||||||
|
d.Outline.last = current
|
||||||
|
return d.Outline.count
|
||||||
|
}
|
||||||
|
|
||||||
|
func tokenize(line string) token {
|
||||||
|
for _, lexFn := range lexFns {
|
||||||
|
if token, ok := lexFn(line); ok {
|
||||||
|
return token
|
||||||
|
}
|
||||||
|
}
|
||||||
|
panic(fmt.Sprintf("could not lex line: %s", line))
|
||||||
|
}
|
97
vendor/github.com/niklasfasching/go-org/org/drawer.go
generated
vendored
Normal file
97
vendor/github.com/niklasfasching/go-org/org/drawer.go
generated
vendored
Normal file
@ -0,0 +1,97 @@
|
|||||||
|
package org
|
||||||
|
|
||||||
|
import (
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Drawer struct {
|
||||||
|
Name string
|
||||||
|
Children []Node
|
||||||
|
}
|
||||||
|
|
||||||
|
type PropertyDrawer struct {
|
||||||
|
Properties [][]string
|
||||||
|
}
|
||||||
|
|
||||||
|
var beginDrawerRegexp = regexp.MustCompile(`^(\s*):(\S+):\s*$`)
|
||||||
|
var endDrawerRegexp = regexp.MustCompile(`^(\s*):END:\s*$`)
|
||||||
|
var propertyRegexp = regexp.MustCompile(`^(\s*):(\S+):(\s+(.*)$|$)`)
|
||||||
|
|
||||||
|
func lexDrawer(line string) (token, bool) {
|
||||||
|
if m := endDrawerRegexp.FindStringSubmatch(line); m != nil {
|
||||||
|
return token{"endDrawer", len(m[1]), "", m}, true
|
||||||
|
} else if m := beginDrawerRegexp.FindStringSubmatch(line); m != nil {
|
||||||
|
return token{"beginDrawer", len(m[1]), strings.ToUpper(m[2]), m}, true
|
||||||
|
}
|
||||||
|
return nilToken, false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Document) parseDrawer(i int, parentStop stopFn) (int, Node) {
|
||||||
|
name := strings.ToUpper(d.tokens[i].content)
|
||||||
|
if name == "PROPERTIES" {
|
||||||
|
return d.parsePropertyDrawer(i, parentStop)
|
||||||
|
}
|
||||||
|
drawer, start := Drawer{Name: name}, i
|
||||||
|
i++
|
||||||
|
stop := func(d *Document, i int) bool {
|
||||||
|
if parentStop(d, i) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
kind := d.tokens[i].kind
|
||||||
|
return kind == "beginDrawer" || kind == "endDrawer" || kind == "headline"
|
||||||
|
}
|
||||||
|
for {
|
||||||
|
consumed, nodes := d.parseMany(i, stop)
|
||||||
|
i += consumed
|
||||||
|
drawer.Children = append(drawer.Children, nodes...)
|
||||||
|
if i < len(d.tokens) && d.tokens[i].kind == "beginDrawer" {
|
||||||
|
p := Paragraph{[]Node{Text{":" + d.tokens[i].content + ":", false}}}
|
||||||
|
drawer.Children = append(drawer.Children, p)
|
||||||
|
i++
|
||||||
|
} else {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if i < len(d.tokens) && d.tokens[i].kind == "endDrawer" {
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
return i - start, drawer
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Document) parsePropertyDrawer(i int, parentStop stopFn) (int, Node) {
|
||||||
|
drawer, start := PropertyDrawer{}, i
|
||||||
|
i++
|
||||||
|
stop := func(d *Document, i int) bool {
|
||||||
|
return parentStop(d, i) || (d.tokens[i].kind != "text" && d.tokens[i].kind != "beginDrawer")
|
||||||
|
}
|
||||||
|
for ; !stop(d, i); i++ {
|
||||||
|
m := propertyRegexp.FindStringSubmatch(d.tokens[i].matches[0])
|
||||||
|
if m == nil {
|
||||||
|
return 0, nil
|
||||||
|
}
|
||||||
|
k, v := strings.ToUpper(m[2]), strings.TrimSpace(m[4])
|
||||||
|
drawer.Properties = append(drawer.Properties, []string{k, v})
|
||||||
|
}
|
||||||
|
if i < len(d.tokens) && d.tokens[i].kind == "endDrawer" {
|
||||||
|
i++
|
||||||
|
} else {
|
||||||
|
return 0, nil
|
||||||
|
}
|
||||||
|
return i - start, drawer
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *PropertyDrawer) Get(key string) (string, bool) {
|
||||||
|
if d == nil {
|
||||||
|
return "", false
|
||||||
|
}
|
||||||
|
for _, kvPair := range d.Properties {
|
||||||
|
if kvPair[0] == key {
|
||||||
|
return kvPair[1], true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return "", false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (n Drawer) String() string { return orgWriter.nodesAsString(n) }
|
||||||
|
func (n PropertyDrawer) String() string { return orgWriter.nodesAsString(n) }
|
35
vendor/github.com/niklasfasching/go-org/org/footnote.go
generated
vendored
Normal file
35
vendor/github.com/niklasfasching/go-org/org/footnote.go
generated
vendored
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
package org
|
||||||
|
|
||||||
|
import (
|
||||||
|
"regexp"
|
||||||
|
)
|
||||||
|
|
||||||
|
type FootnoteDefinition struct {
|
||||||
|
Name string
|
||||||
|
Children []Node
|
||||||
|
Inline bool
|
||||||
|
}
|
||||||
|
|
||||||
|
var footnoteDefinitionRegexp = regexp.MustCompile(`^\[fn:([\w-]+)\](\s+(.+)|\s*$)`)
|
||||||
|
|
||||||
|
func lexFootnoteDefinition(line string) (token, bool) {
|
||||||
|
if m := footnoteDefinitionRegexp.FindStringSubmatch(line); m != nil {
|
||||||
|
return token{"footnoteDefinition", 0, m[1], m}, true
|
||||||
|
}
|
||||||
|
return nilToken, false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Document) parseFootnoteDefinition(i int, parentStop stopFn) (int, Node) {
|
||||||
|
start, name := i, d.tokens[i].content
|
||||||
|
d.tokens[i] = tokenize(d.tokens[i].matches[2])
|
||||||
|
stop := func(d *Document, i int) bool {
|
||||||
|
return parentStop(d, i) ||
|
||||||
|
(isSecondBlankLine(d, i) && i > start+1) ||
|
||||||
|
d.tokens[i].kind == "headline" || d.tokens[i].kind == "footnoteDefinition"
|
||||||
|
}
|
||||||
|
consumed, nodes := d.parseMany(i, stop)
|
||||||
|
definition := FootnoteDefinition{name, nodes, false}
|
||||||
|
return consumed, definition
|
||||||
|
}
|
||||||
|
|
||||||
|
func (n FootnoteDefinition) String() string { return orgWriter.nodesAsString(n) }
|
27
vendor/github.com/niklasfasching/go-org/org/fuzz.go
generated
vendored
Normal file
27
vendor/github.com/niklasfasching/go-org/org/fuzz.go
generated
vendored
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
// +build gofuzz
|
||||||
|
|
||||||
|
package org
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Fuzz function to be used by https://github.com/dvyukov/go-fuzz
|
||||||
|
func Fuzz(input []byte) int {
|
||||||
|
conf := New().Silent()
|
||||||
|
d := conf.Parse(bytes.NewReader(input), "")
|
||||||
|
orgOutput, err := d.Write(NewOrgWriter())
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
htmlOutputA, err := d.Write(NewHTMLWriter())
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
htmlOutputB, err := conf.Parse(strings.NewReader(orgOutput), "").Write(NewHTMLWriter())
|
||||||
|
if htmlOutputA != htmlOutputB {
|
||||||
|
panic("rendered org results in different html than original input")
|
||||||
|
}
|
||||||
|
return 0
|
||||||
|
}
|
101
vendor/github.com/niklasfasching/go-org/org/headline.go
generated
vendored
Normal file
101
vendor/github.com/niklasfasching/go-org/org/headline.go
generated
vendored
Normal file
@ -0,0 +1,101 @@
|
|||||||
|
package org
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
"unicode"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Outline struct {
|
||||||
|
*Section
|
||||||
|
last *Section
|
||||||
|
count int
|
||||||
|
}
|
||||||
|
|
||||||
|
type Section struct {
|
||||||
|
Headline *Headline
|
||||||
|
Parent *Section
|
||||||
|
Children []*Section
|
||||||
|
}
|
||||||
|
|
||||||
|
type Headline struct {
|
||||||
|
Index int
|
||||||
|
Lvl int
|
||||||
|
Status string
|
||||||
|
Priority string
|
||||||
|
Properties *PropertyDrawer
|
||||||
|
Title []Node
|
||||||
|
Tags []string
|
||||||
|
Children []Node
|
||||||
|
}
|
||||||
|
|
||||||
|
var headlineRegexp = regexp.MustCompile(`^([*]+)\s+(.*)`)
|
||||||
|
var tagRegexp = regexp.MustCompile(`(.*?)\s+(:[A-Za-z0-9_@#%:]+:\s*$)`)
|
||||||
|
|
||||||
|
func lexHeadline(line string) (token, bool) {
|
||||||
|
if m := headlineRegexp.FindStringSubmatch(line); m != nil {
|
||||||
|
return token{"headline", len(m[1]), m[2], m}, true
|
||||||
|
}
|
||||||
|
return nilToken, false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Document) parseHeadline(i int, parentStop stopFn) (int, Node) {
|
||||||
|
t, headline := d.tokens[i], Headline{}
|
||||||
|
headline.Lvl = t.lvl
|
||||||
|
|
||||||
|
headline.Index = d.addHeadline(&headline)
|
||||||
|
|
||||||
|
text := t.content
|
||||||
|
todoKeywords := strings.FieldsFunc(d.Get("TODO"), func(r rune) bool { return unicode.IsSpace(r) || r == '|' })
|
||||||
|
for _, k := range todoKeywords {
|
||||||
|
if strings.HasPrefix(text, k) && len(text) > len(k) && unicode.IsSpace(rune(text[len(k)])) {
|
||||||
|
headline.Status = k
|
||||||
|
text = text[len(k)+1:]
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(text) >= 4 && text[0:2] == "[#" && strings.Contains("ABC", text[2:3]) && text[3] == ']' {
|
||||||
|
headline.Priority = text[2:3]
|
||||||
|
text = strings.TrimSpace(text[4:])
|
||||||
|
}
|
||||||
|
|
||||||
|
if m := tagRegexp.FindStringSubmatch(text); m != nil {
|
||||||
|
text = m[1]
|
||||||
|
headline.Tags = strings.FieldsFunc(m[2], func(r rune) bool { return r == ':' })
|
||||||
|
}
|
||||||
|
|
||||||
|
headline.Title = d.parseInline(text)
|
||||||
|
|
||||||
|
stop := func(d *Document, i int) bool {
|
||||||
|
return parentStop(d, i) || d.tokens[i].kind == "headline" && d.tokens[i].lvl <= headline.Lvl
|
||||||
|
}
|
||||||
|
consumed, nodes := d.parseMany(i+1, stop)
|
||||||
|
if len(nodes) > 0 {
|
||||||
|
if d, ok := nodes[0].(PropertyDrawer); ok {
|
||||||
|
headline.Properties = &d
|
||||||
|
nodes = nodes[1:]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
headline.Children = nodes
|
||||||
|
return consumed + 1, headline
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h Headline) ID() string {
|
||||||
|
if customID, ok := h.Properties.Get("CUSTOM_ID"); ok {
|
||||||
|
return customID
|
||||||
|
}
|
||||||
|
return fmt.Sprintf("headline-%d", h.Index)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (parent *Section) add(current *Section) {
|
||||||
|
if parent.Headline == nil || parent.Headline.Lvl < current.Headline.Lvl {
|
||||||
|
parent.Children = append(parent.Children, current)
|
||||||
|
current.Parent = parent
|
||||||
|
} else {
|
||||||
|
parent.Parent.add(current)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (n Headline) String() string { return orgWriter.nodesAsString(n) }
|
437
vendor/github.com/niklasfasching/go-org/org/html_entity.go
generated
vendored
Normal file
437
vendor/github.com/niklasfasching/go-org/org/html_entity.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
504
vendor/github.com/niklasfasching/go-org/org/html_writer.go
generated
vendored
Normal file
504
vendor/github.com/niklasfasching/go-org/org/html_writer.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
357
vendor/github.com/niklasfasching/go-org/org/inline.go
generated
vendored
Normal file
357
vendor/github.com/niklasfasching/go-org/org/inline.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
184
vendor/github.com/niklasfasching/go-org/org/keyword.go
generated
vendored
Normal file
184
vendor/github.com/niklasfasching/go-org/org/keyword.go
generated
vendored
Normal file
@ -0,0 +1,184 @@
|
|||||||
|
package org
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"path/filepath"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Comment struct{ Content string }
|
||||||
|
|
||||||
|
type Keyword struct {
|
||||||
|
Key string
|
||||||
|
Value string
|
||||||
|
}
|
||||||
|
|
||||||
|
type NodeWithName struct {
|
||||||
|
Name string
|
||||||
|
Node Node
|
||||||
|
}
|
||||||
|
|
||||||
|
type NodeWithMeta struct {
|
||||||
|
Node Node
|
||||||
|
Meta Metadata
|
||||||
|
}
|
||||||
|
|
||||||
|
type Metadata struct {
|
||||||
|
Caption [][]Node
|
||||||
|
HTMLAttributes [][]string
|
||||||
|
}
|
||||||
|
|
||||||
|
type Include struct {
|
||||||
|
Keyword
|
||||||
|
Resolve func() Node
|
||||||
|
}
|
||||||
|
|
||||||
|
var keywordRegexp = regexp.MustCompile(`^(\s*)#\+([^:]+):(\s+(.*)|$)`)
|
||||||
|
var commentRegexp = regexp.MustCompile(`^(\s*)#(.*)`)
|
||||||
|
|
||||||
|
var includeFileRegexp = regexp.MustCompile(`(?i)^"([^"]+)" (src|example|export) (\w+)$`)
|
||||||
|
var attributeRegexp = regexp.MustCompile(`(?:^|\s+)(:[-\w]+)\s+(.*)$`)
|
||||||
|
|
||||||
|
func lexKeywordOrComment(line string) (token, bool) {
|
||||||
|
if m := keywordRegexp.FindStringSubmatch(line); m != nil {
|
||||||
|
return token{"keyword", len(m[1]), m[2], m}, true
|
||||||
|
} else if m := commentRegexp.FindStringSubmatch(line); m != nil {
|
||||||
|
return token{"comment", len(m[1]), m[2], m}, true
|
||||||
|
}
|
||||||
|
return nilToken, false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Document) parseComment(i int, stop stopFn) (int, Node) {
|
||||||
|
return 1, Comment{d.tokens[i].content}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Document) parseKeyword(i int, stop stopFn) (int, Node) {
|
||||||
|
k := parseKeyword(d.tokens[i])
|
||||||
|
switch k.Key {
|
||||||
|
case "NAME":
|
||||||
|
return d.parseNodeWithName(k, i, stop)
|
||||||
|
case "SETUPFILE":
|
||||||
|
return d.loadSetupFile(k)
|
||||||
|
case "INCLUDE":
|
||||||
|
return d.parseInclude(k)
|
||||||
|
case "CAPTION", "ATTR_HTML":
|
||||||
|
consumed, node := d.parseAffiliated(i, stop)
|
||||||
|
if consumed != 0 {
|
||||||
|
return consumed, node
|
||||||
|
}
|
||||||
|
fallthrough
|
||||||
|
default:
|
||||||
|
if _, ok := d.BufferSettings[k.Key]; ok {
|
||||||
|
d.BufferSettings[k.Key] = strings.Join([]string{d.BufferSettings[k.Key], k.Value}, "\n")
|
||||||
|
} else {
|
||||||
|
d.BufferSettings[k.Key] = k.Value
|
||||||
|
}
|
||||||
|
return 1, k
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Document) parseNodeWithName(k Keyword, i int, stop stopFn) (int, Node) {
|
||||||
|
if stop(d, i+1) {
|
||||||
|
return 0, nil
|
||||||
|
}
|
||||||
|
consumed, node := d.parseOne(i+1, stop)
|
||||||
|
if consumed == 0 || node == nil {
|
||||||
|
return 0, nil
|
||||||
|
}
|
||||||
|
d.NamedNodes[k.Value] = node
|
||||||
|
return consumed + 1, NodeWithName{k.Value, node}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Document) parseAffiliated(i int, stop stopFn) (int, Node) {
|
||||||
|
start, meta := i, Metadata{}
|
||||||
|
for ; !stop(d, i) && d.tokens[i].kind == "keyword"; i++ {
|
||||||
|
switch k := parseKeyword(d.tokens[i]); k.Key {
|
||||||
|
case "CAPTION":
|
||||||
|
meta.Caption = append(meta.Caption, d.parseInline(k.Value))
|
||||||
|
case "ATTR_HTML":
|
||||||
|
attributes, rest := []string{}, k.Value
|
||||||
|
for {
|
||||||
|
if k, m := "", attributeRegexp.FindStringSubmatch(rest); m != nil {
|
||||||
|
k, rest = m[1], m[2]
|
||||||
|
attributes = append(attributes, k)
|
||||||
|
if v, m := "", attributeRegexp.FindStringSubmatchIndex(rest); m != nil {
|
||||||
|
v, rest = rest[:m[0]], rest[m[0]:]
|
||||||
|
attributes = append(attributes, v)
|
||||||
|
} else {
|
||||||
|
attributes = append(attributes, strings.TrimSpace(rest))
|
||||||
|
break
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
meta.HTMLAttributes = append(meta.HTMLAttributes, attributes)
|
||||||
|
default:
|
||||||
|
return 0, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if stop(d, i) {
|
||||||
|
return 0, nil
|
||||||
|
}
|
||||||
|
consumed, node := d.parseOne(i, stop)
|
||||||
|
if consumed == 0 || node == nil {
|
||||||
|
return 0, nil
|
||||||
|
}
|
||||||
|
i += consumed
|
||||||
|
return i - start, NodeWithMeta{node, meta}
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseKeyword(t token) Keyword {
|
||||||
|
k, v := t.matches[2], t.matches[4]
|
||||||
|
return Keyword{strings.ToUpper(k), strings.TrimSpace(v)}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Document) parseInclude(k Keyword) (int, Node) {
|
||||||
|
resolve := func() Node {
|
||||||
|
d.Log.Printf("Bad include %#v", k)
|
||||||
|
return k
|
||||||
|
}
|
||||||
|
if m := includeFileRegexp.FindStringSubmatch(k.Value); m != nil {
|
||||||
|
path, kind, lang := m[1], m[2], m[3]
|
||||||
|
if !filepath.IsAbs(path) {
|
||||||
|
path = filepath.Join(filepath.Dir(d.Path), path)
|
||||||
|
}
|
||||||
|
resolve = func() Node {
|
||||||
|
bs, err := d.ReadFile(path)
|
||||||
|
if err != nil {
|
||||||
|
d.Log.Printf("Bad include %#v: %s", k, err)
|
||||||
|
return k
|
||||||
|
}
|
||||||
|
return Block{strings.ToUpper(kind), []string{lang}, d.parseRawInline(string(bs))}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return 1, Include{k, resolve}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Document) loadSetupFile(k Keyword) (int, Node) {
|
||||||
|
path := k.Value
|
||||||
|
if !filepath.IsAbs(path) {
|
||||||
|
path = filepath.Join(filepath.Dir(d.Path), path)
|
||||||
|
}
|
||||||
|
bs, err := d.ReadFile(path)
|
||||||
|
if err != nil {
|
||||||
|
d.Log.Printf("Bad setup file: %#v: %s", k, err)
|
||||||
|
return 1, k
|
||||||
|
}
|
||||||
|
setupDocument := d.Configuration.Parse(bytes.NewReader(bs), path)
|
||||||
|
if err := setupDocument.Error; err != nil {
|
||||||
|
d.Log.Printf("Bad setup file: %#v: %s", k, err)
|
||||||
|
return 1, k
|
||||||
|
}
|
||||||
|
for k, v := range setupDocument.BufferSettings {
|
||||||
|
d.BufferSettings[k] = v
|
||||||
|
}
|
||||||
|
return 1, k
|
||||||
|
}
|
||||||
|
|
||||||
|
func (n Comment) String() string { return orgWriter.nodesAsString(n) }
|
||||||
|
func (n Keyword) String() string { return orgWriter.nodesAsString(n) }
|
||||||
|
func (n NodeWithMeta) String() string { return orgWriter.nodesAsString(n) }
|
||||||
|
func (n NodeWithName) String() string { return orgWriter.nodesAsString(n) }
|
||||||
|
func (n Include) String() string { return orgWriter.nodesAsString(n) }
|
114
vendor/github.com/niklasfasching/go-org/org/list.go
generated
vendored
Normal file
114
vendor/github.com/niklasfasching/go-org/org/list.go
generated
vendored
Normal file
@ -0,0 +1,114 @@
|
|||||||
|
package org
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
"unicode"
|
||||||
|
)
|
||||||
|
|
||||||
|
type List struct {
|
||||||
|
Kind string
|
||||||
|
Items []Node
|
||||||
|
}
|
||||||
|
|
||||||
|
type ListItem struct {
|
||||||
|
Bullet string
|
||||||
|
Status string
|
||||||
|
Children []Node
|
||||||
|
}
|
||||||
|
|
||||||
|
type DescriptiveListItem struct {
|
||||||
|
Bullet string
|
||||||
|
Status string
|
||||||
|
Term []Node
|
||||||
|
Details []Node
|
||||||
|
}
|
||||||
|
|
||||||
|
var unorderedListRegexp = regexp.MustCompile(`^(\s*)([+*-])(\s+(.*)|$)`)
|
||||||
|
var orderedListRegexp = regexp.MustCompile(`^(\s*)(([0-9]+|[a-zA-Z])[.)])(\s+(.*)|$)`)
|
||||||
|
var descriptiveListItemRegexp = regexp.MustCompile(`\s::(\s|$)`)
|
||||||
|
var listItemStatusRegexp = regexp.MustCompile(`\[( |X|-)\]\s`)
|
||||||
|
|
||||||
|
func lexList(line string) (token, bool) {
|
||||||
|
if m := unorderedListRegexp.FindStringSubmatch(line); m != nil {
|
||||||
|
return token{"unorderedList", len(m[1]), m[4], m}, true
|
||||||
|
} else if m := orderedListRegexp.FindStringSubmatch(line); m != nil {
|
||||||
|
return token{"orderedList", len(m[1]), m[5], m}, true
|
||||||
|
}
|
||||||
|
return nilToken, false
|
||||||
|
}
|
||||||
|
|
||||||
|
func isListToken(t token) bool {
|
||||||
|
return t.kind == "unorderedList" || t.kind == "orderedList"
|
||||||
|
}
|
||||||
|
|
||||||
|
func listKind(t token) (string, string) {
|
||||||
|
kind := ""
|
||||||
|
switch bullet := t.matches[2]; {
|
||||||
|
case bullet == "*" || bullet == "+" || bullet == "-":
|
||||||
|
kind = "unordered"
|
||||||
|
case unicode.IsLetter(rune(bullet[0])), unicode.IsDigit(rune(bullet[0])):
|
||||||
|
kind = "ordered"
|
||||||
|
default:
|
||||||
|
panic(fmt.Sprintf("bad list bullet '%s': %#v", bullet, t))
|
||||||
|
}
|
||||||
|
if descriptiveListItemRegexp.MatchString(t.content) {
|
||||||
|
return kind, "descriptive"
|
||||||
|
}
|
||||||
|
return kind, kind
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Document) parseList(i int, parentStop stopFn) (int, Node) {
|
||||||
|
start, lvl := i, d.tokens[i].lvl
|
||||||
|
listMainKind, kind := listKind(d.tokens[i])
|
||||||
|
list := List{Kind: kind}
|
||||||
|
stop := func(*Document, int) bool {
|
||||||
|
if parentStop(d, i) || d.tokens[i].lvl != lvl || !isListToken(d.tokens[i]) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
itemMainKind, _ := listKind(d.tokens[i])
|
||||||
|
return itemMainKind != listMainKind
|
||||||
|
}
|
||||||
|
for !stop(d, i) {
|
||||||
|
consumed, node := d.parseListItem(list, i, parentStop)
|
||||||
|
i += consumed
|
||||||
|
list.Items = append(list.Items, node)
|
||||||
|
}
|
||||||
|
return i - start, list
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Document) parseListItem(l List, i int, parentStop stopFn) (int, Node) {
|
||||||
|
start, nodes, bullet := i, []Node{}, d.tokens[i].matches[2]
|
||||||
|
minIndent, dterm, content, status := d.tokens[i].lvl+len(bullet), "", d.tokens[i].content, ""
|
||||||
|
if m := listItemStatusRegexp.FindStringSubmatch(content); m != nil {
|
||||||
|
status, content = m[1], content[len("[ ] "):]
|
||||||
|
}
|
||||||
|
if l.Kind == "descriptive" {
|
||||||
|
if m := descriptiveListItemRegexp.FindStringIndex(content); m != nil {
|
||||||
|
dterm, content = content[:m[0]], content[m[1]:]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
d.tokens[i] = tokenize(strings.Repeat(" ", minIndent) + content)
|
||||||
|
stop := func(d *Document, i int) bool {
|
||||||
|
if parentStop(d, i) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
t := d.tokens[i]
|
||||||
|
return t.lvl < minIndent && !(t.kind == "text" && t.content == "")
|
||||||
|
}
|
||||||
|
for !stop(d, i) && (i <= start+1 || !isSecondBlankLine(d, i)) {
|
||||||
|
consumed, node := d.parseOne(i, stop)
|
||||||
|
i += consumed
|
||||||
|
nodes = append(nodes, node)
|
||||||
|
}
|
||||||
|
if l.Kind == "descriptive" {
|
||||||
|
return i - start, DescriptiveListItem{bullet, status, d.parseInline(dterm), nodes}
|
||||||
|
}
|
||||||
|
return i - start, ListItem{bullet, status, nodes}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (n List) String() string { return orgWriter.nodesAsString(n) }
|
||||||
|
func (n ListItem) String() string { return orgWriter.nodesAsString(n) }
|
||||||
|
func (n DescriptiveListItem) String() string { return orgWriter.nodesAsString(n) }
|
334
vendor/github.com/niklasfasching/go-org/org/org_writer.go
generated
vendored
Normal file
334
vendor/github.com/niklasfasching/go-org/org/org_writer.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
46
vendor/github.com/niklasfasching/go-org/org/paragraph.go
generated
vendored
Normal file
46
vendor/github.com/niklasfasching/go-org/org/paragraph.go
generated
vendored
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
package org
|
||||||
|
|
||||||
|
import (
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Paragraph struct{ Children []Node }
|
||||||
|
type HorizontalRule struct{}
|
||||||
|
|
||||||
|
var horizontalRuleRegexp = regexp.MustCompile(`^(\s*)-{5,}\s*$`)
|
||||||
|
var plainTextRegexp = regexp.MustCompile(`^(\s*)(.*)`)
|
||||||
|
|
||||||
|
func lexText(line string) (token, bool) {
|
||||||
|
if m := plainTextRegexp.FindStringSubmatch(line); m != nil {
|
||||||
|
return token{"text", len(m[1]), m[2], m}, true
|
||||||
|
}
|
||||||
|
return nilToken, false
|
||||||
|
}
|
||||||
|
|
||||||
|
func lexHorizontalRule(line string) (token, bool) {
|
||||||
|
if m := horizontalRuleRegexp.FindStringSubmatch(line); m != nil {
|
||||||
|
return token{"horizontalRule", len(m[1]), "", m}, true
|
||||||
|
}
|
||||||
|
return nilToken, false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Document) parseParagraph(i int, parentStop stopFn) (int, Node) {
|
||||||
|
lines, start := []string{d.tokens[i].content}, i
|
||||||
|
i++
|
||||||
|
stop := func(d *Document, i int) bool {
|
||||||
|
return parentStop(d, i) || d.tokens[i].kind != "text" || d.tokens[i].content == ""
|
||||||
|
}
|
||||||
|
for ; !stop(d, i); i++ {
|
||||||
|
lines = append(lines, d.tokens[i].content)
|
||||||
|
}
|
||||||
|
consumed := i - start
|
||||||
|
return consumed, Paragraph{d.parseInline(strings.Join(lines, "\n"))}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Document) parseHorizontalRule(i int, parentStop stopFn) (int, Node) {
|
||||||
|
return 1, HorizontalRule{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (n Paragraph) String() string { return orgWriter.nodesAsString(n) }
|
||||||
|
func (n HorizontalRule) String() string { return orgWriter.nodesAsString(n) }
|
130
vendor/github.com/niklasfasching/go-org/org/table.go
generated
vendored
Normal file
130
vendor/github.com/niklasfasching/go-org/org/table.go
generated
vendored
Normal file
@ -0,0 +1,130 @@
|
|||||||
|
package org
|
||||||
|
|
||||||
|
import (
|
||||||
|
"regexp"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"unicode/utf8"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Table struct {
|
||||||
|
Rows []Row
|
||||||
|
ColumnInfos []ColumnInfo
|
||||||
|
}
|
||||||
|
|
||||||
|
type Row struct {
|
||||||
|
Columns []Column
|
||||||
|
IsSpecial bool
|
||||||
|
}
|
||||||
|
|
||||||
|
type Column struct {
|
||||||
|
Children []Node
|
||||||
|
*ColumnInfo
|
||||||
|
}
|
||||||
|
|
||||||
|
type ColumnInfo struct {
|
||||||
|
Align string
|
||||||
|
Len int
|
||||||
|
}
|
||||||
|
|
||||||
|
var tableSeparatorRegexp = regexp.MustCompile(`^(\s*)(\|[+-|]*)\s*$`)
|
||||||
|
var tableRowRegexp = regexp.MustCompile(`^(\s*)(\|.*)`)
|
||||||
|
|
||||||
|
var columnAlignRegexp = regexp.MustCompile(`^<(l|c|r)>$`)
|
||||||
|
|
||||||
|
func lexTable(line string) (token, bool) {
|
||||||
|
if m := tableSeparatorRegexp.FindStringSubmatch(line); m != nil {
|
||||||
|
return token{"tableSeparator", len(m[1]), m[2], m}, true
|
||||||
|
} else if m := tableRowRegexp.FindStringSubmatch(line); m != nil {
|
||||||
|
return token{"tableRow", len(m[1]), m[2], m}, true
|
||||||
|
}
|
||||||
|
return nilToken, false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Document) parseTable(i int, parentStop stopFn) (int, Node) {
|
||||||
|
rawRows, start := [][]string{}, i
|
||||||
|
for ; !parentStop(d, i); i++ {
|
||||||
|
if t := d.tokens[i]; t.kind == "tableRow" {
|
||||||
|
rawRow := strings.FieldsFunc(d.tokens[i].content, func(r rune) bool { return r == '|' })
|
||||||
|
for i := range rawRow {
|
||||||
|
rawRow[i] = strings.TrimSpace(rawRow[i])
|
||||||
|
}
|
||||||
|
rawRows = append(rawRows, rawRow)
|
||||||
|
} else if t.kind == "tableSeparator" {
|
||||||
|
rawRows = append(rawRows, nil)
|
||||||
|
} else {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
table := Table{nil, getColumnInfos(rawRows)}
|
||||||
|
for _, rawColumns := range rawRows {
|
||||||
|
row := Row{nil, isSpecialRow(rawColumns)}
|
||||||
|
if len(rawColumns) != 0 {
|
||||||
|
for i := range table.ColumnInfos {
|
||||||
|
column := Column{nil, &table.ColumnInfos[i]}
|
||||||
|
if i < len(rawColumns) {
|
||||||
|
column.Children = d.parseInline(rawColumns[i])
|
||||||
|
}
|
||||||
|
row.Columns = append(row.Columns, column)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
table.Rows = append(table.Rows, row)
|
||||||
|
}
|
||||||
|
return i - start, table
|
||||||
|
}
|
||||||
|
|
||||||
|
func getColumnInfos(rows [][]string) []ColumnInfo {
|
||||||
|
columnCount := 0
|
||||||
|
for _, columns := range rows {
|
||||||
|
if n := len(columns); n > columnCount {
|
||||||
|
columnCount = n
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
columnInfos := make([]ColumnInfo, columnCount)
|
||||||
|
for i := 0; i < columnCount; i++ {
|
||||||
|
countNumeric, countNonNumeric := 0, 0
|
||||||
|
for _, columns := range rows {
|
||||||
|
if i >= len(columns) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if n := utf8.RuneCountInString(columns[i]); n > columnInfos[i].Len {
|
||||||
|
columnInfos[i].Len = n
|
||||||
|
}
|
||||||
|
|
||||||
|
if m := columnAlignRegexp.FindStringSubmatch(columns[i]); m != nil && isSpecialRow(columns) {
|
||||||
|
switch m[1] {
|
||||||
|
case "l":
|
||||||
|
columnInfos[i].Align = "left"
|
||||||
|
case "c":
|
||||||
|
columnInfos[i].Align = "center"
|
||||||
|
case "r":
|
||||||
|
columnInfos[i].Align = "right"
|
||||||
|
}
|
||||||
|
} else if _, err := strconv.ParseFloat(columns[i], 32); err == nil {
|
||||||
|
countNumeric++
|
||||||
|
} else if strings.TrimSpace(columns[i]) != "" {
|
||||||
|
countNonNumeric++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if columnInfos[i].Align == "" && countNumeric >= countNonNumeric {
|
||||||
|
columnInfos[i].Align = "right"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return columnInfos
|
||||||
|
}
|
||||||
|
|
||||||
|
func isSpecialRow(rawColumns []string) bool {
|
||||||
|
isAlignRow := true
|
||||||
|
for _, rawColumn := range rawColumns {
|
||||||
|
if !columnAlignRegexp.MatchString(rawColumn) && rawColumn != "" {
|
||||||
|
isAlignRow = false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return isAlignRow
|
||||||
|
}
|
||||||
|
|
||||||
|
func (n Table) String() string { return orgWriter.nodesAsString(n) }
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user