mirror of https://github.com/kubernetes/kops.git
go.mod deps for feat toolbox instance-selector
This commit is contained in:
parent
cdbb780534
commit
fe3671fff5
|
|
@ -90,6 +90,8 @@ go_library(
|
||||||
"//util/pkg/text:go_default_library",
|
"//util/pkg/text:go_default_library",
|
||||||
"//util/pkg/ui:go_default_library",
|
"//util/pkg/ui:go_default_library",
|
||||||
"//util/pkg/vfs:go_default_library",
|
"//util/pkg/vfs:go_default_library",
|
||||||
|
"//vendor/github.com/aws/amazon-ec2-instance-selector/v2/pkg/cli:go_default_library",
|
||||||
|
"//vendor/github.com/aws/amazon-ec2-instance-selector/v2/pkg/selector:go_default_library",
|
||||||
"//vendor/github.com/blang/semver/v4:go_default_library",
|
"//vendor/github.com/blang/semver/v4:go_default_library",
|
||||||
"//vendor/github.com/ghodss/yaml:go_default_library",
|
"//vendor/github.com/ghodss/yaml:go_default_library",
|
||||||
"//vendor/github.com/spf13/cobra:go_default_library",
|
"//vendor/github.com/spf13/cobra:go_default_library",
|
||||||
|
|
@ -158,6 +160,7 @@ go_test(
|
||||||
"//upup/pkg/fi/cloudup/awsup:go_default_library",
|
"//upup/pkg/fi/cloudup/awsup:go_default_library",
|
||||||
"//upup/pkg/fi/cloudup/gce:go_default_library",
|
"//upup/pkg/fi/cloudup/gce:go_default_library",
|
||||||
"//util/pkg/ui:go_default_library",
|
"//util/pkg/ui:go_default_library",
|
||||||
|
"//vendor/github.com/aws/amazon-ec2-instance-selector/v2/pkg/cli:go_default_library",
|
||||||
"//vendor/github.com/aws/aws-sdk-go/aws:go_default_library",
|
"//vendor/github.com/aws/aws-sdk-go/aws:go_default_library",
|
||||||
"//vendor/github.com/aws/aws-sdk-go/service/ec2:go_default_library",
|
"//vendor/github.com/aws/aws-sdk-go/service/ec2:go_default_library",
|
||||||
"//vendor/github.com/ghodss/yaml:go_default_library",
|
"//vendor/github.com/ghodss/yaml:go_default_library",
|
||||||
|
|
|
||||||
3
go.mod
3
go.mod
|
|
@ -56,6 +56,7 @@ require (
|
||||||
github.com/Microsoft/go-winio v0.4.14 // indirect
|
github.com/Microsoft/go-winio v0.4.14 // indirect
|
||||||
github.com/aliyun/alibaba-cloud-sdk-go v1.61.264
|
github.com/aliyun/alibaba-cloud-sdk-go v1.61.264
|
||||||
github.com/aokoli/goutils v1.0.1 // indirect
|
github.com/aokoli/goutils v1.0.1 // indirect
|
||||||
|
github.com/aws/amazon-ec2-instance-selector/v2 v2.0.1
|
||||||
github.com/aws/aws-sdk-go v1.33.13
|
github.com/aws/aws-sdk-go v1.33.13
|
||||||
github.com/bazelbuild/bazel-gazelle v0.19.1
|
github.com/bazelbuild/bazel-gazelle v0.19.1
|
||||||
github.com/blang/semver/v4 v4.0.0
|
github.com/blang/semver/v4 v4.0.0
|
||||||
|
|
@ -88,7 +89,7 @@ require (
|
||||||
github.com/pkg/sftp v0.0.0-20160930220758-4d0e916071f6
|
github.com/pkg/sftp v0.0.0-20160930220758-4d0e916071f6
|
||||||
github.com/prometheus/client_golang v1.0.0
|
github.com/prometheus/client_golang v1.0.0
|
||||||
github.com/sergi/go-diff v1.0.0
|
github.com/sergi/go-diff v1.0.0
|
||||||
github.com/spf13/cobra v0.0.5
|
github.com/spf13/cobra v0.0.7
|
||||||
github.com/spf13/pflag v1.0.5
|
github.com/spf13/pflag v1.0.5
|
||||||
github.com/spf13/viper v1.4.0
|
github.com/spf13/viper v1.4.0
|
||||||
github.com/spotinst/spotinst-sdk-go v1.56.0
|
github.com/spotinst/spotinst-sdk-go v1.56.0
|
||||||
|
|
|
||||||
13
go.sum
13
go.sum
|
|
@ -75,8 +75,11 @@ github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj
|
||||||
github.com/asaskevich/govalidator v0.0.0-20180720115003-f9ffefc3facf/go.mod h1:lB+ZfQJz7igIIfQNfa7Ml4HSf2uFQQRzpGGRXenZAgY=
|
github.com/asaskevich/govalidator v0.0.0-20180720115003-f9ffefc3facf/go.mod h1:lB+ZfQJz7igIIfQNfa7Ml4HSf2uFQQRzpGGRXenZAgY=
|
||||||
github.com/asaskevich/govalidator v0.0.0-20190424111038-f61b66f89f4a/go.mod h1:lB+ZfQJz7igIIfQNfa7Ml4HSf2uFQQRzpGGRXenZAgY=
|
github.com/asaskevich/govalidator v0.0.0-20190424111038-f61b66f89f4a/go.mod h1:lB+ZfQJz7igIIfQNfa7Ml4HSf2uFQQRzpGGRXenZAgY=
|
||||||
github.com/auth0/go-jwt-middleware v0.0.0-20170425171159-5493cabe49f7/go.mod h1:LWMyo4iOLWXHGdBki7NIht1kHru/0wM179h+d3g8ATM=
|
github.com/auth0/go-jwt-middleware v0.0.0-20170425171159-5493cabe49f7/go.mod h1:LWMyo4iOLWXHGdBki7NIht1kHru/0wM179h+d3g8ATM=
|
||||||
|
github.com/aws/amazon-ec2-instance-selector/v2 v2.0.1 h1:wqSb1757J+cwNAj0bQaknVVQjLbJ/nngxUeOCIlyrhc=
|
||||||
|
github.com/aws/amazon-ec2-instance-selector/v2 v2.0.1/go.mod h1:juzWzDAyOHbufrohZ77l2VW+CEgtXIm2SzrTVMmfJy8=
|
||||||
github.com/aws/aws-sdk-go v1.16.26/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo=
|
github.com/aws/aws-sdk-go v1.16.26/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo=
|
||||||
github.com/aws/aws-sdk-go v1.28.2/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo=
|
github.com/aws/aws-sdk-go v1.28.2/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo=
|
||||||
|
github.com/aws/aws-sdk-go v1.31.12/go.mod h1:5zCpMtNQVjRREroY7sYe8lOMRSxkhG6MZveU8YkpAk0=
|
||||||
github.com/aws/aws-sdk-go v1.33.13 h1:3+AsCrxxnhiUQEhWV+j3kEs7aBCIn2qkDjA+elpxYPU=
|
github.com/aws/aws-sdk-go v1.33.13 h1:3+AsCrxxnhiUQEhWV+j3kEs7aBCIn2qkDjA+elpxYPU=
|
||||||
github.com/aws/aws-sdk-go v1.33.13/go.mod h1:5zCpMtNQVjRREroY7sYe8lOMRSxkhG6MZveU8YkpAk0=
|
github.com/aws/aws-sdk-go v1.33.13/go.mod h1:5zCpMtNQVjRREroY7sYe8lOMRSxkhG6MZveU8YkpAk0=
|
||||||
github.com/bazelbuild/bazel-gazelle v0.18.2/go.mod h1:D0ehMSbS+vesFsLGiD6JXu3mVEzOlfUl8wNnq+x/9p0=
|
github.com/bazelbuild/bazel-gazelle v0.18.2/go.mod h1:D0ehMSbS+vesFsLGiD6JXu3mVEzOlfUl8wNnq+x/9p0=
|
||||||
|
|
@ -148,6 +151,8 @@ github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f h1:lBNOc5arjvs8E5mO2tbp
|
||||||
github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA=
|
github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA=
|
||||||
github.com/cpuguy83/go-md2man v1.0.10 h1:BSKMNlYxDvnunlTymqtgONjNnaRV1sTpcovwwjF22jk=
|
github.com/cpuguy83/go-md2man v1.0.10 h1:BSKMNlYxDvnunlTymqtgONjNnaRV1sTpcovwwjF22jk=
|
||||||
github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE=
|
github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE=
|
||||||
|
github.com/cpuguy83/go-md2man/v2 v2.0.0 h1:EoUDS0afbrsXAZ9YQ9jdu/mZ2sXgT1/2yyNng4PGlyM=
|
||||||
|
github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
|
||||||
github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY=
|
github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY=
|
||||||
github.com/cyphar/filepath-securejoin v0.2.2/go.mod h1:FpkQEhXnPnOthhzymB7CGsFk2G9VLXONKD9G7QGMM+4=
|
github.com/cyphar/filepath-securejoin v0.2.2/go.mod h1:FpkQEhXnPnOthhzymB7CGsFk2G9VLXONKD9G7QGMM+4=
|
||||||
github.com/davecgh/go-spew v0.0.0-20151105211317-5215b55f46b2/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v0.0.0-20151105211317-5215b55f46b2/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
|
|
@ -723,6 +728,8 @@ github.com/rubiojr/go-vhd v0.0.0-20160810183302-0bfd3b39853c/go.mod h1:DM5xW0nvf
|
||||||
github.com/russross/blackfriday v0.0.0-20170610170232-067529f716f4/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g=
|
github.com/russross/blackfriday v0.0.0-20170610170232-067529f716f4/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g=
|
||||||
github.com/russross/blackfriday v1.5.2 h1:HyvC0ARfnZBqnXwABFeSZHpKvJHJJfPz81GNueLj0oo=
|
github.com/russross/blackfriday v1.5.2 h1:HyvC0ARfnZBqnXwABFeSZHpKvJHJJfPz81GNueLj0oo=
|
||||||
github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g=
|
github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g=
|
||||||
|
github.com/russross/blackfriday/v2 v2.0.1 h1:lPqVAte+HuHNfhJ/0LC98ESWRz8afy9tM/0RK8m9o+Q=
|
||||||
|
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||||
github.com/ryanuber/columnize v2.1.0+incompatible/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts=
|
github.com/ryanuber/columnize v2.1.0+incompatible/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts=
|
||||||
github.com/ryanuber/go-glob v0.0.0-20170128012129-256dc444b735/go.mod h1:807d1WSdnB0XRJzKNil9Om6lcp/3a0v4qIHxIXzX/Yc=
|
github.com/ryanuber/go-glob v0.0.0-20170128012129-256dc444b735/go.mod h1:807d1WSdnB0XRJzKNil9Om6lcp/3a0v4qIHxIXzX/Yc=
|
||||||
github.com/ryanuber/go-glob v1.0.0 h1:iQh3xXAumdQ+4Ufa5b25cRpC5TYKlno6hsv6Cb3pkBk=
|
github.com/ryanuber/go-glob v1.0.0 h1:iQh3xXAumdQ+4Ufa5b25cRpC5TYKlno6hsv6Cb3pkBk=
|
||||||
|
|
@ -739,6 +746,8 @@ github.com/shirou/gopsutil v0.0.0-20180427012116-c95755e4bcd7/go.mod h1:5b4v6he4
|
||||||
github.com/shirou/w32 v0.0.0-20160930032740-bb4de0191aa4/go.mod h1:qsXQc7+bwAM3Q1u/4XEfrquwF8Lw7D7y5cD8CuHnfIc=
|
github.com/shirou/w32 v0.0.0-20160930032740-bb4de0191aa4/go.mod h1:qsXQc7+bwAM3Q1u/4XEfrquwF8Lw7D7y5cD8CuHnfIc=
|
||||||
github.com/shurcooL/go v0.0.0-20180423040247-9e1955d9fb6e/go.mod h1:TDJrrUr11Vxrven61rcy3hJMUqaf/CLWYhHNPmT14Lk=
|
github.com/shurcooL/go v0.0.0-20180423040247-9e1955d9fb6e/go.mod h1:TDJrrUr11Vxrven61rcy3hJMUqaf/CLWYhHNPmT14Lk=
|
||||||
github.com/shurcooL/go-goon v0.0.0-20170922171312-37c2f522c041/go.mod h1:N5mDOmsrJOB+vfqUK+7DmDyjhSLIIBnXo9lvZJj3MWQ=
|
github.com/shurcooL/go-goon v0.0.0-20170922171312-37c2f522c041/go.mod h1:N5mDOmsrJOB+vfqUK+7DmDyjhSLIIBnXo9lvZJj3MWQ=
|
||||||
|
github.com/shurcooL/sanitized_anchor_name v1.0.0 h1:PdmoCO6wvbs+7yrJyMORt4/BmY5IYyJwS/kOiWx8mHo=
|
||||||
|
github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
|
||||||
github.com/sirupsen/logrus v1.0.5/go.mod h1:pMByvHTf9Beacp5x1UXfOR9xyW/9antXMhjMPG0dEzc=
|
github.com/sirupsen/logrus v1.0.5/go.mod h1:pMByvHTf9Beacp5x1UXfOR9xyW/9antXMhjMPG0dEzc=
|
||||||
github.com/sirupsen/logrus v1.0.6/go.mod h1:pMByvHTf9Beacp5x1UXfOR9xyW/9antXMhjMPG0dEzc=
|
github.com/sirupsen/logrus v1.0.6/go.mod h1:pMByvHTf9Beacp5x1UXfOR9xyW/9antXMhjMPG0dEzc=
|
||||||
github.com/sirupsen/logrus v1.2.0 h1:juTguoYk5qI21pwyTXY3B3Y5cOTH3ZUyZCg1v/mihuo=
|
github.com/sirupsen/logrus v1.2.0 h1:juTguoYk5qI21pwyTXY3B3Y5cOTH3ZUyZCg1v/mihuo=
|
||||||
|
|
@ -766,6 +775,8 @@ github.com/spf13/cobra v0.0.2/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3
|
||||||
github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ=
|
github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ=
|
||||||
github.com/spf13/cobra v0.0.5 h1:f0B+LkLX6DtmRH1isoNA9VTtNUK9K8xYd28JNNfOv/s=
|
github.com/spf13/cobra v0.0.5 h1:f0B+LkLX6DtmRH1isoNA9VTtNUK9K8xYd28JNNfOv/s=
|
||||||
github.com/spf13/cobra v0.0.5/go.mod h1:3K3wKZymM7VvHMDS9+Akkh4K60UwM26emMESw8tLCHU=
|
github.com/spf13/cobra v0.0.5/go.mod h1:3K3wKZymM7VvHMDS9+Akkh4K60UwM26emMESw8tLCHU=
|
||||||
|
github.com/spf13/cobra v0.0.7 h1:FfTH+vuMXOas8jmfb5/M7dzEYx7LpcLb7a0LPe34uOU=
|
||||||
|
github.com/spf13/cobra v0.0.7/go.mod h1:/6GTrnGXV9HjY+aR4k0oJ5tcvakLuG6EuKReYlHNrgE=
|
||||||
github.com/spf13/jwalterweatherman v0.0.0-20180109140146-7c0cea34c8ec/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo=
|
github.com/spf13/jwalterweatherman v0.0.0-20180109140146-7c0cea34c8ec/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo=
|
||||||
github.com/spf13/jwalterweatherman v1.0.0 h1:XHEdyB+EcvlqZamSM4ZOMGlc93t6AcsBEu9Gc1vn7yk=
|
github.com/spf13/jwalterweatherman v1.0.0 h1:XHEdyB+EcvlqZamSM4ZOMGlc93t6AcsBEu9Gc1vn7yk=
|
||||||
github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo=
|
github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo=
|
||||||
|
|
@ -1094,6 +1105,8 @@ gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw=
|
||||||
gopkg.in/ini.v1 v1.42.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
|
gopkg.in/ini.v1 v1.42.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
|
||||||
gopkg.in/ini.v1 v1.51.0 h1:AQvPpx3LzTDM0AjnIRlVFwFFGC+npRopjZxLJj6gdno=
|
gopkg.in/ini.v1 v1.51.0 h1:AQvPpx3LzTDM0AjnIRlVFwFFGC+npRopjZxLJj6gdno=
|
||||||
gopkg.in/ini.v1 v1.51.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
|
gopkg.in/ini.v1 v1.51.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
|
||||||
|
gopkg.in/ini.v1 v1.57.0 h1:9unxIsFcTt4I55uWluz+UmL95q4kdJ0buvQ1ZIqVQww=
|
||||||
|
gopkg.in/ini.v1 v1.57.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
|
||||||
gopkg.in/mcuadros/go-syslog.v2 v2.2.1/go.mod h1:l5LPIyOOyIdQquNg+oU6Z3524YwrcqEm0aKH+5zpt2U=
|
gopkg.in/mcuadros/go-syslog.v2 v2.2.1/go.mod h1:l5LPIyOOyIdQquNg+oU6Z3524YwrcqEm0aKH+5zpt2U=
|
||||||
gopkg.in/natefinch/lumberjack.v2 v2.0.0/go.mod h1:l0ndWWf7gzL7RNwBG7wST/UCcT4T24xpD6X8LsfU/+k=
|
gopkg.in/natefinch/lumberjack.v2 v2.0.0/go.mod h1:l0ndWWf7gzL7RNwBG7wST/UCcT4T24xpD6X8LsfU/+k=
|
||||||
gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo=
|
gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo=
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,175 @@
|
||||||
|
|
||||||
|
Apache License
|
||||||
|
Version 2.0, January 2004
|
||||||
|
http://www.apache.org/licenses/
|
||||||
|
|
||||||
|
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||||
|
|
||||||
|
1. Definitions.
|
||||||
|
|
||||||
|
"License" shall mean the terms and conditions for use, reproduction,
|
||||||
|
and distribution as defined by Sections 1 through 9 of this document.
|
||||||
|
|
||||||
|
"Licensor" shall mean the copyright owner or entity authorized by
|
||||||
|
the copyright owner that is granting the License.
|
||||||
|
|
||||||
|
"Legal Entity" shall mean the union of the acting entity and all
|
||||||
|
other entities that control, are controlled by, or are under common
|
||||||
|
control with that entity. For the purposes of this definition,
|
||||||
|
"control" means (i) the power, direct or indirect, to cause the
|
||||||
|
direction or management of such entity, whether by contract or
|
||||||
|
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||||
|
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||||
|
|
||||||
|
"You" (or "Your") shall mean an individual or Legal Entity
|
||||||
|
exercising permissions granted by this License.
|
||||||
|
|
||||||
|
"Source" form shall mean the preferred form for making modifications,
|
||||||
|
including but not limited to software source code, documentation
|
||||||
|
source, and configuration files.
|
||||||
|
|
||||||
|
"Object" form shall mean any form resulting from mechanical
|
||||||
|
transformation or translation of a Source form, including but
|
||||||
|
not limited to compiled object code, generated documentation,
|
||||||
|
and conversions to other media types.
|
||||||
|
|
||||||
|
"Work" shall mean the work of authorship, whether in Source or
|
||||||
|
Object form, made available under the License, as indicated by a
|
||||||
|
copyright notice that is included in or attached to the work
|
||||||
|
(an example is provided in the Appendix below).
|
||||||
|
|
||||||
|
"Derivative Works" shall mean any work, whether in Source or Object
|
||||||
|
form, that is based on (or derived from) the Work and for which the
|
||||||
|
editorial revisions, annotations, elaborations, or other modifications
|
||||||
|
represent, as a whole, an original work of authorship. For the purposes
|
||||||
|
of this License, Derivative Works shall not include works that remain
|
||||||
|
separable from, or merely link (or bind by name) to the interfaces of,
|
||||||
|
the Work and Derivative Works thereof.
|
||||||
|
|
||||||
|
"Contribution" shall mean any work of authorship, including
|
||||||
|
the original version of the Work and any modifications or additions
|
||||||
|
to that Work or Derivative Works thereof, that is intentionally
|
||||||
|
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||||
|
or by an individual or Legal Entity authorized to submit on behalf of
|
||||||
|
the copyright owner. For the purposes of this definition, "submitted"
|
||||||
|
means any form of electronic, verbal, or written communication sent
|
||||||
|
to the Licensor or its representatives, including but not limited to
|
||||||
|
communication on electronic mailing lists, source code control systems,
|
||||||
|
and issue tracking systems that are managed by, or on behalf of, the
|
||||||
|
Licensor for the purpose of discussing and improving the Work, but
|
||||||
|
excluding communication that is conspicuously marked or otherwise
|
||||||
|
designated in writing by the copyright owner as "Not a Contribution."
|
||||||
|
|
||||||
|
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||||
|
on behalf of whom a Contribution has been received by Licensor and
|
||||||
|
subsequently incorporated within the Work.
|
||||||
|
|
||||||
|
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
copyright license to reproduce, prepare Derivative Works of,
|
||||||
|
publicly display, publicly perform, sublicense, and distribute the
|
||||||
|
Work and such Derivative Works in Source or Object form.
|
||||||
|
|
||||||
|
3. Grant of Patent License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
(except as stated in this section) patent license to make, have made,
|
||||||
|
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||||
|
where such license applies only to those patent claims licensable
|
||||||
|
by such Contributor that are necessarily infringed by their
|
||||||
|
Contribution(s) alone or by combination of their Contribution(s)
|
||||||
|
with the Work to which such Contribution(s) was submitted. If You
|
||||||
|
institute patent litigation against any entity (including a
|
||||||
|
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||||
|
or a Contribution incorporated within the Work constitutes direct
|
||||||
|
or contributory patent infringement, then any patent licenses
|
||||||
|
granted to You under this License for that Work shall terminate
|
||||||
|
as of the date such litigation is filed.
|
||||||
|
|
||||||
|
4. Redistribution. You may reproduce and distribute copies of the
|
||||||
|
Work or Derivative Works thereof in any medium, with or without
|
||||||
|
modifications, and in Source or Object form, provided that You
|
||||||
|
meet the following conditions:
|
||||||
|
|
||||||
|
(a) You must give any other recipients of the Work or
|
||||||
|
Derivative Works a copy of this License; and
|
||||||
|
|
||||||
|
(b) You must cause any modified files to carry prominent notices
|
||||||
|
stating that You changed the files; and
|
||||||
|
|
||||||
|
(c) You must retain, in the Source form of any Derivative Works
|
||||||
|
that You distribute, all copyright, patent, trademark, and
|
||||||
|
attribution notices from the Source form of the Work,
|
||||||
|
excluding those notices that do not pertain to any part of
|
||||||
|
the Derivative Works; and
|
||||||
|
|
||||||
|
(d) If the Work includes a "NOTICE" text file as part of its
|
||||||
|
distribution, then any Derivative Works that You distribute must
|
||||||
|
include a readable copy of the attribution notices contained
|
||||||
|
within such NOTICE file, excluding those notices that do not
|
||||||
|
pertain to any part of the Derivative Works, in at least one
|
||||||
|
of the following places: within a NOTICE text file distributed
|
||||||
|
as part of the Derivative Works; within the Source form or
|
||||||
|
documentation, if provided along with the Derivative Works; or,
|
||||||
|
within a display generated by the Derivative Works, if and
|
||||||
|
wherever such third-party notices normally appear. The contents
|
||||||
|
of the NOTICE file are for informational purposes only and
|
||||||
|
do not modify the License. You may add Your own attribution
|
||||||
|
notices within Derivative Works that You distribute, alongside
|
||||||
|
or as an addendum to the NOTICE text from the Work, provided
|
||||||
|
that such additional attribution notices cannot be construed
|
||||||
|
as modifying the License.
|
||||||
|
|
||||||
|
You may add Your own copyright statement to Your modifications and
|
||||||
|
may provide additional or different license terms and conditions
|
||||||
|
for use, reproduction, or distribution of Your modifications, or
|
||||||
|
for any such Derivative Works as a whole, provided Your use,
|
||||||
|
reproduction, and distribution of the Work otherwise complies with
|
||||||
|
the conditions stated in this License.
|
||||||
|
|
||||||
|
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||||
|
any Contribution intentionally submitted for inclusion in the Work
|
||||||
|
by You to the Licensor shall be under the terms and conditions of
|
||||||
|
this License, without any additional terms or conditions.
|
||||||
|
Notwithstanding the above, nothing herein shall supersede or modify
|
||||||
|
the terms of any separate license agreement you may have executed
|
||||||
|
with Licensor regarding such Contributions.
|
||||||
|
|
||||||
|
6. Trademarks. This License does not grant permission to use the trade
|
||||||
|
names, trademarks, service marks, or product names of the Licensor,
|
||||||
|
except as required for reasonable and customary use in describing the
|
||||||
|
origin of the Work and reproducing the content of the NOTICE file.
|
||||||
|
|
||||||
|
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||||
|
agreed to in writing, Licensor provides the Work (and each
|
||||||
|
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
implied, including, without limitation, any warranties or conditions
|
||||||
|
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||||
|
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||||
|
appropriateness of using or redistributing the Work and assume any
|
||||||
|
risks associated with Your exercise of permissions under this License.
|
||||||
|
|
||||||
|
8. Limitation of Liability. In no event and under no legal theory,
|
||||||
|
whether in tort (including negligence), contract, or otherwise,
|
||||||
|
unless required by applicable law (such as deliberate and grossly
|
||||||
|
negligent acts) or agreed to in writing, shall any Contributor be
|
||||||
|
liable to You for damages, including any direct, indirect, special,
|
||||||
|
incidental, or consequential damages of any character arising as a
|
||||||
|
result of this License or out of the use or inability to use the
|
||||||
|
Work (including but not limited to damages for loss of goodwill,
|
||||||
|
work stoppage, computer failure or malfunction, or any and all
|
||||||
|
other commercial damages or losses), even if such Contributor
|
||||||
|
has been advised of the possibility of such damages.
|
||||||
|
|
||||||
|
9. Accepting Warranty or Additional Liability. While redistributing
|
||||||
|
the Work or Derivative Works thereof, You may choose to offer,
|
||||||
|
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||||
|
or other liability obligations and/or rights consistent with this
|
||||||
|
License. However, in accepting such obligations, You may act only
|
||||||
|
on Your own behalf and on Your sole responsibility, not on behalf
|
||||||
|
of any other Contributor, and only if You agree to indemnify,
|
||||||
|
defend, and hold each Contributor harmless for any liability
|
||||||
|
incurred by, or claims asserted against, such Contributor by reason
|
||||||
|
of your accepting any such warranty or additional liability.
|
||||||
|
|
@ -0,0 +1 @@
|
||||||
|
Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||||
9
vendor/github.com/aws/amazon-ec2-instance-selector/v2/pkg/bytequantity/BUILD.bazel
generated
vendored
Normal file
9
vendor/github.com/aws/amazon-ec2-instance-selector/v2/pkg/bytequantity/BUILD.bazel
generated
vendored
Normal file
|
|
@ -0,0 +1,9 @@
|
||||||
|
load("@io_bazel_rules_go//go:def.bzl", "go_library")
|
||||||
|
|
||||||
|
go_library(
|
||||||
|
name = "go_default_library",
|
||||||
|
srcs = ["bytequantity.go"],
|
||||||
|
importmap = "k8s.io/kops/vendor/github.com/aws/amazon-ec2-instance-selector/v2/pkg/bytequantity",
|
||||||
|
importpath = "github.com/aws/amazon-ec2-instance-selector/v2/pkg/bytequantity",
|
||||||
|
visibility = ["//visibility:public"],
|
||||||
|
)
|
||||||
153
vendor/github.com/aws/amazon-ec2-instance-selector/v2/pkg/bytequantity/bytequantity.go
generated
vendored
Normal file
153
vendor/github.com/aws/amazon-ec2-instance-selector/v2/pkg/bytequantity/bytequantity.go
generated
vendored
Normal file
|
|
@ -0,0 +1,153 @@
|
||||||
|
// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License"). You may
|
||||||
|
// not use this file except in compliance with the License. A copy of the
|
||||||
|
// License is located at
|
||||||
|
//
|
||||||
|
// http://aws.amazon.com/apache2.0/
|
||||||
|
//
|
||||||
|
// or in the "license" file accompanying this file. This file is distributed
|
||||||
|
// on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||||
|
// express or implied. See the License for the specific language governing
|
||||||
|
// permissions and limitations under the License.
|
||||||
|
|
||||||
|
package bytequantity
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"math"
|
||||||
|
"regexp"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
/// Examples: 1mb, 1 gb, 1.0tb, 1mib, 2g, 2.001 t
|
||||||
|
byteQuantityRegex = `^([0-9]+\.?[0-9]{0,3})[ ]?(mi?b?|gi?b?|ti?b?)?$`
|
||||||
|
mib = "MiB"
|
||||||
|
gib = "GiB"
|
||||||
|
tib = "TiB"
|
||||||
|
gbConvert = 1 << 10
|
||||||
|
tbConvert = gbConvert << 10
|
||||||
|
maxGiB = math.MaxUint64 / gbConvert
|
||||||
|
maxTiB = math.MaxUint64 / tbConvert
|
||||||
|
)
|
||||||
|
|
||||||
|
// ByteQuantity is a data type representing a byte quantity
|
||||||
|
type ByteQuantity struct {
|
||||||
|
Quantity uint64
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParseToByteQuantity parses a string representation of a byte quantity to a ByteQuantity type.
|
||||||
|
// A unit can be appended such as 16 GiB. If no unit is appended, GiB is assumed.
|
||||||
|
func ParseToByteQuantity(byteQuantityStr string) (ByteQuantity, error) {
|
||||||
|
bqRegexp := regexp.MustCompile(byteQuantityRegex)
|
||||||
|
matches := bqRegexp.FindStringSubmatch(strings.ToLower(byteQuantityStr))
|
||||||
|
if len(matches) < 2 {
|
||||||
|
return ByteQuantity{}, fmt.Errorf("%s is not a valid byte quantity", byteQuantityStr)
|
||||||
|
}
|
||||||
|
|
||||||
|
quantityStr := matches[1]
|
||||||
|
unit := gib
|
||||||
|
if len(matches) > 2 && matches[2] != "" {
|
||||||
|
unit = matches[2]
|
||||||
|
}
|
||||||
|
quantity := uint64(0)
|
||||||
|
switch strings.ToLower(string(unit[0])) {
|
||||||
|
//mib
|
||||||
|
case "m":
|
||||||
|
inputDecSplit := strings.Split(quantityStr, ".")
|
||||||
|
if len(inputDecSplit) == 2 {
|
||||||
|
d, err := strconv.Atoi(inputDecSplit[1])
|
||||||
|
if err != nil {
|
||||||
|
return ByteQuantity{}, err
|
||||||
|
}
|
||||||
|
if d != 0 {
|
||||||
|
return ByteQuantity{}, fmt.Errorf("cannot accept floating point MB value, only integers are accepted")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// need error here so that this quantity doesn't bind in the local scope
|
||||||
|
var err error
|
||||||
|
quantity, err = strconv.ParseUint(inputDecSplit[0], 10, 64)
|
||||||
|
if err != nil {
|
||||||
|
return ByteQuantity{}, err
|
||||||
|
}
|
||||||
|
//gib
|
||||||
|
case "g":
|
||||||
|
quantityDec, err := strconv.ParseFloat(quantityStr, 10)
|
||||||
|
if err != nil {
|
||||||
|
return ByteQuantity{}, err
|
||||||
|
}
|
||||||
|
if quantityDec > maxGiB {
|
||||||
|
return ByteQuantity{}, fmt.Errorf("error GiB value is too large")
|
||||||
|
}
|
||||||
|
quantity = uint64(quantityDec * gbConvert)
|
||||||
|
//tib
|
||||||
|
case "t":
|
||||||
|
quantityDec, err := strconv.ParseFloat(quantityStr, 10)
|
||||||
|
if err != nil {
|
||||||
|
return ByteQuantity{}, err
|
||||||
|
}
|
||||||
|
if quantityDec > maxTiB {
|
||||||
|
return ByteQuantity{}, fmt.Errorf("error TiB value is too large")
|
||||||
|
}
|
||||||
|
quantity = uint64(quantityDec * tbConvert)
|
||||||
|
default:
|
||||||
|
return ByteQuantity{}, fmt.Errorf("error unit %s is not supported", unit)
|
||||||
|
}
|
||||||
|
|
||||||
|
return ByteQuantity{
|
||||||
|
Quantity: quantity,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// FromTiB returns a byte quantity of the passed in tebibytes quantity
|
||||||
|
func FromTiB(tib uint64) ByteQuantity {
|
||||||
|
return ByteQuantity{
|
||||||
|
Quantity: tib * tbConvert,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// FromGiB returns a byte quantity of the passed in gibibytes quantity
|
||||||
|
func FromGiB(gib uint64) ByteQuantity {
|
||||||
|
return ByteQuantity{
|
||||||
|
Quantity: gib * gbConvert,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// FromMiB returns a byte quantity of the passed in mebibytes quantity
|
||||||
|
func FromMiB(mib uint64) ByteQuantity {
|
||||||
|
return ByteQuantity{
|
||||||
|
Quantity: mib,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// StringMiB returns a byte quantity in a mebibytes string representation
|
||||||
|
func (bq ByteQuantity) StringMiB() string {
|
||||||
|
return fmt.Sprintf("%.0f %s", bq.MiB(), mib)
|
||||||
|
}
|
||||||
|
|
||||||
|
// StringGiB returns a byte quantity in a gibibytes string representation
|
||||||
|
func (bq ByteQuantity) StringGiB() string {
|
||||||
|
return fmt.Sprintf("%.3f %s", bq.GiB(), gib)
|
||||||
|
}
|
||||||
|
|
||||||
|
// StringTiB returns a byte quantity in a tebibytes string representation
|
||||||
|
func (bq ByteQuantity) StringTiB() string {
|
||||||
|
return fmt.Sprintf("%.3f %s", bq.TiB(), tib)
|
||||||
|
}
|
||||||
|
|
||||||
|
// MiB returns a byte quantity in mebibytes
|
||||||
|
func (bq ByteQuantity) MiB() float64 {
|
||||||
|
return float64(bq.Quantity)
|
||||||
|
}
|
||||||
|
|
||||||
|
// GiB returns a byte quantity in gibibytes
|
||||||
|
func (bq ByteQuantity) GiB() float64 {
|
||||||
|
return float64(bq.Quantity) * 1 / gbConvert
|
||||||
|
}
|
||||||
|
|
||||||
|
// TiB returns a byte quantity in tebibytes
|
||||||
|
func (bq ByteQuantity) TiB() float64 {
|
||||||
|
return float64(bq.Quantity) * 1 / tbConvert
|
||||||
|
}
|
||||||
19
vendor/github.com/aws/amazon-ec2-instance-selector/v2/pkg/cli/BUILD.bazel
generated
vendored
Normal file
19
vendor/github.com/aws/amazon-ec2-instance-selector/v2/pkg/cli/BUILD.bazel
generated
vendored
Normal file
|
|
@ -0,0 +1,19 @@
|
||||||
|
load("@io_bazel_rules_go//go:def.bzl", "go_library")
|
||||||
|
|
||||||
|
go_library(
|
||||||
|
name = "go_default_library",
|
||||||
|
srcs = [
|
||||||
|
"cli.go",
|
||||||
|
"flags.go",
|
||||||
|
"types.go",
|
||||||
|
],
|
||||||
|
importmap = "k8s.io/kops/vendor/github.com/aws/amazon-ec2-instance-selector/v2/pkg/cli",
|
||||||
|
importpath = "github.com/aws/amazon-ec2-instance-selector/v2/pkg/cli",
|
||||||
|
visibility = ["//visibility:public"],
|
||||||
|
deps = [
|
||||||
|
"//vendor/github.com/aws/amazon-ec2-instance-selector/v2/pkg/bytequantity:go_default_library",
|
||||||
|
"//vendor/github.com/aws/amazon-ec2-instance-selector/v2/pkg/selector:go_default_library",
|
||||||
|
"//vendor/github.com/spf13/cobra:go_default_library",
|
||||||
|
"//vendor/github.com/spf13/pflag:go_default_library",
|
||||||
|
],
|
||||||
|
)
|
||||||
262
vendor/github.com/aws/amazon-ec2-instance-selector/v2/pkg/cli/cli.go
generated
vendored
Normal file
262
vendor/github.com/aws/amazon-ec2-instance-selector/v2/pkg/cli/cli.go
generated
vendored
Normal file
|
|
@ -0,0 +1,262 @@
|
||||||
|
// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License"). You may
|
||||||
|
// not use this file except in compliance with the License. A copy of the
|
||||||
|
// License is located at
|
||||||
|
//
|
||||||
|
// http://aws.amazon.com/apache2.0/
|
||||||
|
//
|
||||||
|
// or in the "license" file accompanying this file. This file is distributed
|
||||||
|
// on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||||
|
// express or implied. See the License for the specific language governing
|
||||||
|
// permissions and limitations under the License.
|
||||||
|
|
||||||
|
// Package cli provides functions to build the selector command line interface
|
||||||
|
package cli
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"reflect"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/aws/amazon-ec2-instance-selector/v2/pkg/bytequantity"
|
||||||
|
"github.com/aws/amazon-ec2-instance-selector/v2/pkg/selector"
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
"github.com/spf13/pflag"
|
||||||
|
)
|
||||||
|
|
||||||
|
type runFunc = func(cmd *cobra.Command, args []string)
|
||||||
|
|
||||||
|
// New creates an instance of CommandLineInterface
|
||||||
|
func New(binaryName string, shortUsage string, longUsage, examples string, run runFunc) CommandLineInterface {
|
||||||
|
cmd := &cobra.Command{
|
||||||
|
Use: binaryName,
|
||||||
|
Short: shortUsage,
|
||||||
|
Long: longUsage,
|
||||||
|
Example: examples,
|
||||||
|
Run: run,
|
||||||
|
}
|
||||||
|
return CommandLineInterface{
|
||||||
|
Command: cmd,
|
||||||
|
Flags: map[string]interface{}{},
|
||||||
|
nilDefaults: map[string]bool{},
|
||||||
|
rangeFlags: map[string]bool{},
|
||||||
|
validators: map[string]validator{},
|
||||||
|
processors: map[string]processor{},
|
||||||
|
suiteFlags: pflag.NewFlagSet("suite", pflag.ExitOnError),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParseFlags will parse flags registered in this instance of CLI from os.Args
|
||||||
|
func (cl *CommandLineInterface) ParseFlags() (map[string]interface{}, error) {
|
||||||
|
cl.setUsageTemplate()
|
||||||
|
// Remove Suite Flags so that args only include Config and Filter Flags
|
||||||
|
cl.Command.SetArgs(removeIntersectingArgs(cl.suiteFlags))
|
||||||
|
// This parses Config and Filter flags only
|
||||||
|
if err := cl.Command.Execute(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove Config and Filter flags so that only suite flags are parsed
|
||||||
|
if err := cl.suiteFlags.Parse(removeIntersectingArgs(cl.Command.Flags())); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add suite flags to Command flagset so that other processing can occur
|
||||||
|
// This has to be done after usage is printed so that the flagsets can be grouped properly when printed
|
||||||
|
cl.Command.Flags().AddFlagSet(cl.suiteFlags)
|
||||||
|
if err := cl.SetUntouchedFlagValuesToNil(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := cl.ProcessFlags(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return cl.Flags, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParseAndValidateFlags will parse flags registered in this instance of CLI from os.Args
|
||||||
|
// and then perform validation
|
||||||
|
func (cl *CommandLineInterface) ParseAndValidateFlags() (map[string]interface{}, error) {
|
||||||
|
flags, err := cl.ParseFlags()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if err := cl.ValidateFlags(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return flags, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ProcessFlags iterates through any registered processors and executes them
|
||||||
|
// Processors are executed before validators
|
||||||
|
func (cl *CommandLineInterface) ProcessFlags() error {
|
||||||
|
for flagName, processorFn := range cl.processors {
|
||||||
|
if processorFn == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if err := processorFn(cl.Flags[flagName]); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if err := cl.ProcessRangeFilterFlags(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ValidateFlags iterates through any registered validators and executes them
|
||||||
|
func (cl *CommandLineInterface) ValidateFlags() error {
|
||||||
|
for flagName, validationFn := range cl.validators {
|
||||||
|
if validationFn == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
err := validationFn(cl.Flags[flagName])
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func removeIntersectingArgs(flagSet *pflag.FlagSet) []string {
|
||||||
|
newArgs := []string{}
|
||||||
|
skipNext := false
|
||||||
|
for i, arg := range os.Args {
|
||||||
|
if skipNext {
|
||||||
|
skipNext = false
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
arg = strings.Split(arg, "=")[0]
|
||||||
|
longFlag := strings.Replace(arg, "--", "", 1)
|
||||||
|
if flagSet.Lookup(longFlag) != nil || shorthandLookup(flagSet, arg) != nil {
|
||||||
|
if len(os.Args) > i+1 && os.Args[i+1][0] != '-' {
|
||||||
|
skipNext = true
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
newArgs = append(newArgs, os.Args[i])
|
||||||
|
}
|
||||||
|
return newArgs
|
||||||
|
}
|
||||||
|
|
||||||
|
func shorthandLookup(flagSet *pflag.FlagSet, arg string) *pflag.Flag {
|
||||||
|
if len(arg) == 2 && arg[0] == '-' && arg[1] != '-' {
|
||||||
|
return flagSet.ShorthandLookup(strings.Replace(arg, "-", "", 1))
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (cl *CommandLineInterface) setUsageTemplate() {
|
||||||
|
transformedUsage := usageTemplate
|
||||||
|
suiteFlagCount := 0
|
||||||
|
cl.suiteFlags.VisitAll(func(*pflag.Flag) {
|
||||||
|
suiteFlagCount++
|
||||||
|
})
|
||||||
|
if suiteFlagCount > 0 {
|
||||||
|
transformedUsage = fmt.Sprintf(transformedUsage, "\n\nSuite Flags:\n"+cl.suiteFlags.FlagUsages()+"\n")
|
||||||
|
} else {
|
||||||
|
transformedUsage = fmt.Sprintf(transformedUsage, "")
|
||||||
|
}
|
||||||
|
cl.Command.SetUsageTemplate(transformedUsage)
|
||||||
|
cl.suiteFlags.Usage = func() {}
|
||||||
|
cl.Command.Flags().Usage = func() {}
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetUntouchedFlagValuesToNil iterates through all flags and sets their value to nil if they were not specifically set by the user
|
||||||
|
// This allows for a specified value, a negative value (like false or empty string), or an unspecified (nil) entry.
|
||||||
|
func (cl *CommandLineInterface) SetUntouchedFlagValuesToNil() error {
|
||||||
|
defaultHandlerErrMsg := "Unable to find a default value handler for %v, marking as no default value. This could be an error"
|
||||||
|
defaultHandlerFlags := []string{}
|
||||||
|
|
||||||
|
cl.Command.Flags().VisitAll(func(f *pflag.Flag) {
|
||||||
|
if !f.Changed {
|
||||||
|
// If nilDefaults entry for flag is set to false, do not change default
|
||||||
|
if val, _ := cl.nilDefaults[f.Name]; !val {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
switch v := cl.Flags[f.Name].(type) {
|
||||||
|
case *int:
|
||||||
|
if reflect.ValueOf(*v).IsZero() {
|
||||||
|
cl.Flags[f.Name] = nil
|
||||||
|
}
|
||||||
|
case *bytequantity.ByteQuantity:
|
||||||
|
if v.Quantity == 0 {
|
||||||
|
cl.Flags[f.Name] = nil
|
||||||
|
}
|
||||||
|
case *string:
|
||||||
|
if reflect.ValueOf(*v).IsZero() {
|
||||||
|
cl.Flags[f.Name] = nil
|
||||||
|
}
|
||||||
|
case *bool:
|
||||||
|
if reflect.ValueOf(*v).IsZero() {
|
||||||
|
cl.Flags[f.Name] = nil
|
||||||
|
}
|
||||||
|
case *[]string:
|
||||||
|
if reflect.ValueOf(v).IsZero() {
|
||||||
|
cl.Flags[f.Name] = nil
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
defaultHandlerFlags = append(defaultHandlerFlags, f.Name)
|
||||||
|
cl.Flags[f.Name] = nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
if len(defaultHandlerFlags) != 0 {
|
||||||
|
return fmt.Errorf(defaultHandlerErrMsg, defaultHandlerFlags)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ProcessRangeFilterFlags sets min and max to the appropriate 0 or max bounds based on the 3-tuple that a user specifies for base flag, min, and/or max
|
||||||
|
func (cl *CommandLineInterface) ProcessRangeFilterFlags() error {
|
||||||
|
for flagName := range cl.rangeFlags {
|
||||||
|
rangeHelperMin := fmt.Sprintf("%s-%s", flagName, "min")
|
||||||
|
rangeHelperMax := fmt.Sprintf("%s-%s", flagName, "max")
|
||||||
|
if cl.Flags[flagName] != nil {
|
||||||
|
if cl.Flags[rangeHelperMin] != nil || cl.Flags[rangeHelperMax] != nil {
|
||||||
|
return fmt.Errorf("error: --%s and --%s cannot be set when using --%s", rangeHelperMin, rangeHelperMax, flagName)
|
||||||
|
}
|
||||||
|
cl.Flags[rangeHelperMin] = cl.Flags[flagName]
|
||||||
|
cl.Flags[rangeHelperMax] = cl.Flags[flagName]
|
||||||
|
}
|
||||||
|
if cl.Flags[rangeHelperMin] == nil && cl.Flags[rangeHelperMax] == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if cl.Flags[rangeHelperMin] == nil {
|
||||||
|
switch cl.Flags[rangeHelperMax].(type) {
|
||||||
|
case *int:
|
||||||
|
cl.Flags[rangeHelperMin] = cl.IntMe(0)
|
||||||
|
case *bytequantity.ByteQuantity:
|
||||||
|
cl.Flags[rangeHelperMin] = cl.ByteQuantityMe(bytequantity.ByteQuantity{Quantity: 0})
|
||||||
|
default:
|
||||||
|
return fmt.Errorf("Unable to set %s", rangeHelperMax)
|
||||||
|
}
|
||||||
|
} else if cl.Flags[rangeHelperMax] == nil {
|
||||||
|
switch cl.Flags[rangeHelperMin].(type) {
|
||||||
|
case *int:
|
||||||
|
cl.Flags[rangeHelperMax] = cl.IntMe(maxInt)
|
||||||
|
case *bytequantity.ByteQuantity:
|
||||||
|
cl.Flags[rangeHelperMax] = cl.ByteQuantityMe(bytequantity.ByteQuantity{Quantity: maxUint64})
|
||||||
|
default:
|
||||||
|
return fmt.Errorf("Unable to set %s", rangeHelperMin)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
switch cl.Flags[rangeHelperMin].(type) {
|
||||||
|
case *int:
|
||||||
|
cl.Flags[flagName] = &selector.IntRangeFilter{
|
||||||
|
LowerBound: *cl.IntMe(cl.Flags[rangeHelperMin]),
|
||||||
|
UpperBound: *cl.IntMe(cl.Flags[rangeHelperMax]),
|
||||||
|
}
|
||||||
|
case *bytequantity.ByteQuantity:
|
||||||
|
cl.Flags[flagName] = &selector.ByteQuantityRangeFilter{
|
||||||
|
LowerBound: *cl.ByteQuantityMe(cl.Flags[rangeHelperMin]),
|
||||||
|
UpperBound: *cl.ByteQuantityMe(cl.Flags[rangeHelperMax]),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
341
vendor/github.com/aws/amazon-ec2-instance-selector/v2/pkg/cli/flags.go
generated
vendored
Normal file
341
vendor/github.com/aws/amazon-ec2-instance-selector/v2/pkg/cli/flags.go
generated
vendored
Normal file
|
|
@ -0,0 +1,341 @@
|
||||||
|
package cli
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"math"
|
||||||
|
"regexp"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/aws/amazon-ec2-instance-selector/v2/pkg/bytequantity"
|
||||||
|
"github.com/spf13/pflag"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
maxInt = int(^uint(0) >> 1)
|
||||||
|
maxUint64 = math.MaxUint64
|
||||||
|
)
|
||||||
|
|
||||||
|
// RatioFlag creates and registers a flag accepting a Ratio
|
||||||
|
func (cl *CommandLineInterface) RatioFlag(name string, shorthand *string, defaultValue *string, description string) error {
|
||||||
|
if defaultValue == nil {
|
||||||
|
cl.nilDefaults[name] = true
|
||||||
|
defaultValue = cl.StringMe("")
|
||||||
|
}
|
||||||
|
if shorthand != nil {
|
||||||
|
cl.Flags[name] = cl.Command.Flags().StringP(name, string(*shorthand), *defaultValue, description)
|
||||||
|
} else {
|
||||||
|
cl.Flags[name] = cl.Command.Flags().String(name, *defaultValue, description)
|
||||||
|
}
|
||||||
|
|
||||||
|
cl.validators[name] = func(val interface{}) error {
|
||||||
|
if val == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
vcpuToMemRatioVal := *val.(*string)
|
||||||
|
valid, err := regexp.Match(`^[0-9]+:[0-9]+$`, []byte(vcpuToMemRatioVal))
|
||||||
|
if err != nil || !valid {
|
||||||
|
return fmt.Errorf("Invalid input for --%s. A valid example is 1:2", name)
|
||||||
|
}
|
||||||
|
vals := strings.Split(vcpuToMemRatioVal, ":")
|
||||||
|
vcpusRatioVal, err1 := strconv.Atoi(vals[0])
|
||||||
|
memRatioVal, err2 := strconv.Atoi(vals[1])
|
||||||
|
if err1 != nil || err2 != nil {
|
||||||
|
return fmt.Errorf("Invalid input for --%s. Ratio values must be integers. A valid example is 1:2", name)
|
||||||
|
}
|
||||||
|
cl.Flags[name] = cl.Float64Me(float64(memRatioVal) / float64(vcpusRatioVal))
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// IntMinMaxRangeFlags creates and registers a min, max, and helper flag each accepting an Integer
|
||||||
|
func (cl *CommandLineInterface) IntMinMaxRangeFlags(name string, shorthand *string, defaultValue *int, description string) {
|
||||||
|
cl.IntMinMaxRangeFlagOnFlagSet(cl.Command.Flags(), name, shorthand, defaultValue, description)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByteQuantityMinMaxRangeFlags creates and registers a min, max, and helper flag each accepting a byte quantity like 512mb
|
||||||
|
func (cl *CommandLineInterface) ByteQuantityMinMaxRangeFlags(name string, shorthand *string, defaultValue *bytequantity.ByteQuantity, description string) {
|
||||||
|
cl.ByteQuantityMinMaxRangeFlagOnFlagSet(cl.Command.Flags(), name, shorthand, defaultValue, description)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByteQuantityFlag creates and registers a flag accepting a byte quantity like 512mb
|
||||||
|
func (cl *CommandLineInterface) ByteQuantityFlag(name string, shorthand *string, defaultValue *bytequantity.ByteQuantity, description string) {
|
||||||
|
cl.ByteQuantityFlagOnFlagSet(cl.Command.Flags(), name, shorthand, defaultValue, description)
|
||||||
|
}
|
||||||
|
|
||||||
|
// IntFlag creates and registers a flag accepting an Integer
|
||||||
|
func (cl *CommandLineInterface) IntFlag(name string, shorthand *string, defaultValue *int, description string) {
|
||||||
|
cl.IntFlagOnFlagSet(cl.Command.Flags(), name, shorthand, defaultValue, description)
|
||||||
|
}
|
||||||
|
|
||||||
|
// StringFlag creates and registers a flag accepting a String and a validator function.
|
||||||
|
// The validator function is provided so that more complex flags can be created from a string input.
|
||||||
|
func (cl *CommandLineInterface) StringFlag(name string, shorthand *string, defaultValue *string, description string, validationFn validator) {
|
||||||
|
cl.StringFlagOnFlagSet(cl.Command.Flags(), name, shorthand, defaultValue, description, nil, validationFn)
|
||||||
|
}
|
||||||
|
|
||||||
|
// StringSliceFlag creates and registers a flag accepting a list of strings.
|
||||||
|
func (cl *CommandLineInterface) StringSliceFlag(name string, shorthand *string, defaultValue []string, description string) {
|
||||||
|
cl.StringSliceFlagOnFlagSet(cl.Command.Flags(), name, shorthand, defaultValue, description)
|
||||||
|
}
|
||||||
|
|
||||||
|
// RegexFlag creates and registers a flag accepting a string and validates that it is a valid regex.
|
||||||
|
func (cl *CommandLineInterface) RegexFlag(name string, shorthand *string, defaultValue *string, description string) {
|
||||||
|
cl.RegexFlagOnFlagSet(cl.Command.Flags(), name, shorthand, defaultValue, description)
|
||||||
|
}
|
||||||
|
|
||||||
|
// StringOptionsFlag creates and registers a flag accepting a string and valid options for use in validation.
|
||||||
|
func (cl *CommandLineInterface) StringOptionsFlag(name string, shorthand *string, defaultValue *string, description string, validOpts []string) {
|
||||||
|
cl.StringOptionsFlagOnFlagSet(cl.Command.Flags(), name, shorthand, defaultValue, description, validOpts)
|
||||||
|
}
|
||||||
|
|
||||||
|
// BoolFlag creates and registers a flag accepting a boolean
|
||||||
|
func (cl *CommandLineInterface) BoolFlag(name string, shorthand *string, defaultValue *bool, description string) {
|
||||||
|
cl.BoolFlagOnFlagSet(cl.Command.Flags(), name, shorthand, defaultValue, description)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ConfigStringFlag creates and registers a flag accepting a String for configuration purposes.
|
||||||
|
// Config flags will be grouped at the bottom in the output of --help
|
||||||
|
func (cl *CommandLineInterface) ConfigStringFlag(name string, shorthand *string, defaultValue *string, description string, validationFn validator) {
|
||||||
|
cl.StringFlagOnFlagSet(cl.Command.PersistentFlags(), name, shorthand, defaultValue, description, nil, validationFn)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ConfigStringSliceFlag creates and registers a flag accepting a list of strings.
|
||||||
|
// Suite flags will be grouped in the middle of the output --help
|
||||||
|
func (cl *CommandLineInterface) ConfigStringSliceFlag(name string, shorthand *string, defaultValue []string, description string) {
|
||||||
|
cl.StringSliceFlagOnFlagSet(cl.Command.PersistentFlags(), name, shorthand, defaultValue, description)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ConfigIntFlag creates and registers a flag accepting an Integer for configuration purposes.
|
||||||
|
// Config flags will be grouped at the bottom in the output of --help
|
||||||
|
func (cl *CommandLineInterface) ConfigIntFlag(name string, shorthand *string, defaultValue *int, description string) {
|
||||||
|
cl.IntFlagOnFlagSet(cl.Command.PersistentFlags(), name, shorthand, defaultValue, description)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ConfigBoolFlag creates and registers a flag accepting a boolean for configuration purposes.
|
||||||
|
// Config flags will be grouped at the bottom in the output of --help
|
||||||
|
func (cl *CommandLineInterface) ConfigBoolFlag(name string, shorthand *string, defaultValue *bool, description string) {
|
||||||
|
cl.BoolFlagOnFlagSet(cl.Command.PersistentFlags(), name, shorthand, defaultValue, description)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ConfigStringOptionsFlag creates and registers a flag accepting a string and valid options for use in validation.
|
||||||
|
// Config flags will be grouped at the bottom in the output of --help
|
||||||
|
func (cl *CommandLineInterface) ConfigStringOptionsFlag(name string, shorthand *string, defaultValue *string, description string, validOpts []string) {
|
||||||
|
cl.StringOptionsFlagOnFlagSet(cl.Command.PersistentFlags(), name, shorthand, defaultValue, description, validOpts)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SuiteBoolFlag creates and registers a flag accepting a boolean for aggregate filters.
|
||||||
|
// Suite flags will be grouped in the middle of the output --help
|
||||||
|
func (cl *CommandLineInterface) SuiteBoolFlag(name string, shorthand *string, defaultValue *bool, description string) {
|
||||||
|
cl.BoolFlagOnFlagSet(cl.suiteFlags, name, shorthand, defaultValue, description)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SuiteStringFlag creates and registers a flag accepting a string for aggreagate filters.
|
||||||
|
// Suite flags will be grouped in the middle of the output --help
|
||||||
|
func (cl *CommandLineInterface) SuiteStringFlag(name string, shorthand *string, defaultValue *string, description string, validationFn validator) {
|
||||||
|
cl.StringFlagOnFlagSet(cl.suiteFlags, name, shorthand, defaultValue, description, nil, validationFn)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SuiteStringOptionsFlag creates and registers a flag accepting a string and valid options for use in validation.
|
||||||
|
// Suite flags will be grouped in the middle of the output --help
|
||||||
|
func (cl *CommandLineInterface) SuiteStringOptionsFlag(name string, shorthand *string, defaultValue *string, description string, validOpts []string) {
|
||||||
|
cl.StringOptionsFlagOnFlagSet(cl.suiteFlags, name, shorthand, defaultValue, description, validOpts)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SuiteStringSliceFlag creates and registers a flag accepting a list of strings.
|
||||||
|
// Suite flags will be grouped in the middle of the output --help
|
||||||
|
func (cl *CommandLineInterface) SuiteStringSliceFlag(name string, shorthand *string, defaultValue []string, description string) {
|
||||||
|
cl.StringSliceFlagOnFlagSet(cl.suiteFlags, name, shorthand, defaultValue, description)
|
||||||
|
}
|
||||||
|
|
||||||
|
// BoolFlagOnFlagSet creates and registers a flag accepting a boolean for configuration purposes.
|
||||||
|
func (cl *CommandLineInterface) BoolFlagOnFlagSet(flagSet *pflag.FlagSet, name string, shorthand *string, defaultValue *bool, description string) {
|
||||||
|
if defaultValue == nil {
|
||||||
|
cl.nilDefaults[name] = true
|
||||||
|
defaultValue = cl.BoolMe(false)
|
||||||
|
}
|
||||||
|
if shorthand != nil {
|
||||||
|
cl.Flags[name] = flagSet.BoolP(name, string(*shorthand), *defaultValue, description)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
cl.Flags[name] = flagSet.Bool(name, *defaultValue, description)
|
||||||
|
}
|
||||||
|
|
||||||
|
// IntMinMaxRangeFlagOnFlagSet creates and registers a min, max, and helper flag each accepting an Integer
|
||||||
|
func (cl *CommandLineInterface) IntMinMaxRangeFlagOnFlagSet(flagSet *pflag.FlagSet, name string, shorthand *string, defaultValue *int, description string) {
|
||||||
|
cl.IntFlagOnFlagSet(flagSet, name, shorthand, defaultValue, fmt.Sprintf("%s (sets --%s-min and -max to the same value)", description, name))
|
||||||
|
cl.IntFlagOnFlagSet(flagSet, name+"-min", nil, nil, fmt.Sprintf("Minimum %s If --%s-max is not specified, the upper bound will be infinity", description, name))
|
||||||
|
cl.IntFlagOnFlagSet(flagSet, name+"-max", nil, nil, fmt.Sprintf("Maximum %s If --%s-min is not specified, the lower bound will be 0", description, name))
|
||||||
|
cl.validators[name] = func(val interface{}) error {
|
||||||
|
if cl.Flags[name+"-min"] == nil || cl.Flags[name+"-max"] == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
minArg := name + "-min"
|
||||||
|
maxArg := name + "-max"
|
||||||
|
minVal := cl.Flags[minArg].(*int)
|
||||||
|
maxVal := cl.Flags[maxArg].(*int)
|
||||||
|
if *minVal > *maxVal {
|
||||||
|
return fmt.Errorf("Invalid input for --%s and --%s. %s must be less than or equal to %s", minArg, maxArg, minArg, maxArg)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
cl.rangeFlags[name] = true
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByteQuantityMinMaxRangeFlagOnFlagSet creates and registers a min, max, and helper flag each accepting a ByteQuantity like 5mb or 12gb
|
||||||
|
func (cl *CommandLineInterface) ByteQuantityMinMaxRangeFlagOnFlagSet(flagSet *pflag.FlagSet, name string, shorthand *string, defaultValue *bytequantity.ByteQuantity, description string) {
|
||||||
|
cl.ByteQuantityFlagOnFlagSet(flagSet, name, shorthand, defaultValue, fmt.Sprintf("%s (sets --%s-min and -max to the same value)", description, name))
|
||||||
|
cl.ByteQuantityFlagOnFlagSet(flagSet, name+"-min", nil, nil, fmt.Sprintf("Minimum %s If --%s-max is not specified, the upper bound will be infinity", description, name))
|
||||||
|
cl.ByteQuantityFlagOnFlagSet(flagSet, name+"-max", nil, nil, fmt.Sprintf("Maximum %s If --%s-min is not specified, the lower bound will be 0", description, name))
|
||||||
|
cl.validators[name] = func(val interface{}) error {
|
||||||
|
if cl.Flags[name+"-min"] == nil || cl.Flags[name+"-max"] == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
minArg := name + "-min"
|
||||||
|
maxArg := name + "-max"
|
||||||
|
minVal := cl.Flags[name+"-min"].(*bytequantity.ByteQuantity).MiB()
|
||||||
|
maxVal := cl.Flags[name+"-max"].(*bytequantity.ByteQuantity).MiB()
|
||||||
|
if minVal > maxVal {
|
||||||
|
return fmt.Errorf("Invalid input for --%s and --%s. %s must be less than or equal to %s", minArg, maxArg, minArg, maxArg)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
cl.rangeFlags[name] = true
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByteQuantityFlagOnFlagSet creates and registers a flag accepting a ByteQuantity
|
||||||
|
func (cl *CommandLineInterface) ByteQuantityFlagOnFlagSet(flagSet *pflag.FlagSet, name string, shorthand *string, defaultValue *bytequantity.ByteQuantity, description string) {
|
||||||
|
invalidInputMsg := fmt.Sprintf("Invalid input for --%s. A valid example is 16gb. ", name)
|
||||||
|
byteQuantityProcessor := func(val interface{}) error {
|
||||||
|
if val == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
switch byteQuantityInput := val.(type) {
|
||||||
|
case *string:
|
||||||
|
bq, err := bytequantity.ParseToByteQuantity(*byteQuantityInput)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf(invalidInputMsg+"Can't parse byte quantity %s.", *byteQuantityInput)
|
||||||
|
}
|
||||||
|
cl.Flags[name] = &bq
|
||||||
|
case *bytequantity.ByteQuantity:
|
||||||
|
return nil
|
||||||
|
default:
|
||||||
|
return fmt.Errorf(invalidInputMsg + "Input type is unsupported.")
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
byteQuantityValidator := func(val interface{}) error {
|
||||||
|
if val == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
switch val.(type) {
|
||||||
|
case *bytequantity.ByteQuantity:
|
||||||
|
return nil
|
||||||
|
default:
|
||||||
|
return fmt.Errorf(invalidInputMsg + "Processing failed.")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
var stringDefaultValue *string
|
||||||
|
if defaultValue != nil {
|
||||||
|
stringDefaultValue = cl.StringMe(defaultValue.StringGiB())
|
||||||
|
} else {
|
||||||
|
stringDefaultValue = nil
|
||||||
|
}
|
||||||
|
cl.StringFlagOnFlagSet(flagSet, name, shorthand, stringDefaultValue, description, byteQuantityProcessor, byteQuantityValidator)
|
||||||
|
}
|
||||||
|
|
||||||
|
// IntFlagOnFlagSet creates and registers a flag accepting an Integer
|
||||||
|
func (cl *CommandLineInterface) IntFlagOnFlagSet(flagSet *pflag.FlagSet, name string, shorthand *string, defaultValue *int, description string) {
|
||||||
|
if defaultValue == nil {
|
||||||
|
cl.nilDefaults[name] = true
|
||||||
|
defaultValue = cl.IntMe(0)
|
||||||
|
}
|
||||||
|
if shorthand != nil {
|
||||||
|
cl.Flags[name] = flagSet.IntP(name, string(*shorthand), *defaultValue, description)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
cl.Flags[name] = flagSet.Int(name, *defaultValue, description)
|
||||||
|
}
|
||||||
|
|
||||||
|
// StringFlagOnFlagSet creates and registers a flag accepting a String and a validator function.
|
||||||
|
// The validator function is provided so that more complex flags can be created from a string input.
|
||||||
|
func (cl *CommandLineInterface) StringFlagOnFlagSet(flagSet *pflag.FlagSet, name string, shorthand *string, defaultValue *string, description string, processorFn processor, validationFn validator) {
|
||||||
|
if defaultValue == nil {
|
||||||
|
cl.nilDefaults[name] = true
|
||||||
|
defaultValue = cl.StringMe("")
|
||||||
|
}
|
||||||
|
if shorthand != nil {
|
||||||
|
cl.Flags[name] = flagSet.StringP(name, string(*shorthand), *defaultValue, description)
|
||||||
|
} else {
|
||||||
|
cl.Flags[name] = flagSet.String(name, *defaultValue, description)
|
||||||
|
}
|
||||||
|
cl.processors[name] = processorFn
|
||||||
|
cl.validators[name] = validationFn
|
||||||
|
}
|
||||||
|
|
||||||
|
// StringOptionsFlagOnFlagSet creates and registers a flag accepting a String with valid options.
|
||||||
|
// The validOpts slice of strings will be used to perform validation
|
||||||
|
func (cl *CommandLineInterface) StringOptionsFlagOnFlagSet(flagSet *pflag.FlagSet, name string, shorthand *string, defaultValue *string, description string, validOpts []string) {
|
||||||
|
validationFn := func(val interface{}) error {
|
||||||
|
if val == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
for _, v := range validOpts {
|
||||||
|
if v == *val.(*string) {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return fmt.Errorf("error %s must be one of: %s", name, strings.Join(validOpts, ", "))
|
||||||
|
}
|
||||||
|
cl.StringFlagOnFlagSet(flagSet, name, shorthand, defaultValue, description, nil, validationFn)
|
||||||
|
}
|
||||||
|
|
||||||
|
// StringSliceFlagOnFlagSet creates and registers a flag accepting a String Slice.
|
||||||
|
func (cl *CommandLineInterface) StringSliceFlagOnFlagSet(flagSet *pflag.FlagSet, name string, shorthand *string, defaultValue []string, description string) {
|
||||||
|
if defaultValue == nil {
|
||||||
|
cl.nilDefaults[name] = true
|
||||||
|
defaultValue = []string{}
|
||||||
|
}
|
||||||
|
if shorthand != nil {
|
||||||
|
cl.Flags[name] = flagSet.StringSliceP(name, string(*shorthand), defaultValue, description)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
cl.Flags[name] = flagSet.StringSlice(name, defaultValue, description)
|
||||||
|
}
|
||||||
|
|
||||||
|
// RegexFlagOnFlagSet creates and registers a flag accepting a string slice of regular expressions.
|
||||||
|
func (cl *CommandLineInterface) RegexFlagOnFlagSet(flagSet *pflag.FlagSet, name string, shorthand *string, defaultValue *string, description string) {
|
||||||
|
invalidInputMsg := fmt.Sprintf("Invalid regex input for --%s. ", name)
|
||||||
|
regexProcessor := func(val interface{}) error {
|
||||||
|
if val == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
switch v := val.(type) {
|
||||||
|
case *string:
|
||||||
|
regexVal, err := regexp.Compile(*v)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf(invalidInputMsg + "Unable to compile the regex.")
|
||||||
|
}
|
||||||
|
cl.Flags[name] = regexVal
|
||||||
|
case *regexp.Regexp:
|
||||||
|
return nil
|
||||||
|
default:
|
||||||
|
return fmt.Errorf(invalidInputMsg + "Input type is unsupported.")
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
regexValidator := func(val interface{}) error {
|
||||||
|
if val == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
switch val.(type) {
|
||||||
|
case *regexp.Regexp:
|
||||||
|
return nil
|
||||||
|
default:
|
||||||
|
return fmt.Errorf(invalidInputMsg + "Processing failed.")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
cl.StringFlagOnFlagSet(flagSet, name, shorthand, defaultValue, description, regexProcessor, regexValidator)
|
||||||
|
}
|
||||||
225
vendor/github.com/aws/amazon-ec2-instance-selector/v2/pkg/cli/types.go
generated
vendored
Normal file
225
vendor/github.com/aws/amazon-ec2-instance-selector/v2/pkg/cli/types.go
generated
vendored
Normal file
|
|
@ -0,0 +1,225 @@
|
||||||
|
// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License"). You may
|
||||||
|
// not use this file except in compliance with the License. A copy of the
|
||||||
|
// License is located at
|
||||||
|
//
|
||||||
|
// http://aws.amazon.com/apache2.0/
|
||||||
|
//
|
||||||
|
// or in the "license" file accompanying this file. This file is distributed
|
||||||
|
// on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||||
|
// express or implied. See the License for the specific language governing
|
||||||
|
// permissions and limitations under the License.
|
||||||
|
|
||||||
|
// Package cli provides functions to build the selector command line interface
|
||||||
|
package cli
|
||||||
|
|
||||||
|
import (
|
||||||
|
"log"
|
||||||
|
"regexp"
|
||||||
|
|
||||||
|
"github.com/aws/amazon-ec2-instance-selector/v2/pkg/bytequantity"
|
||||||
|
"github.com/aws/amazon-ec2-instance-selector/v2/pkg/selector"
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
"github.com/spf13/pflag"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
// Usage Template to run on --help
|
||||||
|
usageTemplate = `Usage:{{if .Runnable}}
|
||||||
|
{{.UseLine}}{{end}}{{if .HasAvailableSubCommands}}
|
||||||
|
{{.CommandPath}} [command]{{end}}{{if gt (len .Aliases) 0}}
|
||||||
|
|
||||||
|
Aliases:
|
||||||
|
{{.NameAndAliases}}{{end}}{{if .HasExample}}
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
{{.Example}}{{end}}{{if .HasAvailableSubCommands}}
|
||||||
|
|
||||||
|
Available Commands:{{range .Commands}}{{if (or .IsAvailableCommand (eq .Name "help"))}}
|
||||||
|
{{rpad .Name .NamePadding }} {{.Short}}{{end}}{{end}}{{end}}{{if .HasAvailableLocalFlags}}
|
||||||
|
|
||||||
|
Filter Flags:
|
||||||
|
{{.LocalNonPersistentFlags.FlagUsages | trimTrailingWhitespaces}}
|
||||||
|
%s
|
||||||
|
Global Flags:
|
||||||
|
{{.PersistentFlags.FlagUsages | trimTrailingWhitespaces}}
|
||||||
|
|
||||||
|
{{end}}`
|
||||||
|
)
|
||||||
|
|
||||||
|
// validator defines the function for providing validation on a flag
|
||||||
|
type validator = func(val interface{}) error
|
||||||
|
|
||||||
|
// processor defines the function for providing mutating processing on a flag
|
||||||
|
type processor = func(val interface{}) error
|
||||||
|
|
||||||
|
// CommandLineInterface is a type to group CLI funcs and state
|
||||||
|
type CommandLineInterface struct {
|
||||||
|
Command *cobra.Command
|
||||||
|
Flags map[string]interface{}
|
||||||
|
nilDefaults map[string]bool
|
||||||
|
rangeFlags map[string]bool
|
||||||
|
validators map[string]validator
|
||||||
|
processors map[string]processor
|
||||||
|
suiteFlags *pflag.FlagSet
|
||||||
|
}
|
||||||
|
|
||||||
|
// Float64Me takes an interface and returns a pointer to a float64 value
|
||||||
|
// If the underlying interface kind is not float64 or *float64 then nil is returned
|
||||||
|
func (*CommandLineInterface) Float64Me(i interface{}) *float64 {
|
||||||
|
if i == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
switch v := i.(type) {
|
||||||
|
case *float64:
|
||||||
|
return v
|
||||||
|
case float64:
|
||||||
|
return &v
|
||||||
|
default:
|
||||||
|
log.Printf("%s cannot be converted to a float64", i)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// IntMe takes an interface and returns a pointer to an int value
|
||||||
|
// If the underlying interface kind is not int or *int then nil is returned
|
||||||
|
func (*CommandLineInterface) IntMe(i interface{}) *int {
|
||||||
|
if i == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
switch v := i.(type) {
|
||||||
|
case *int:
|
||||||
|
return v
|
||||||
|
case int:
|
||||||
|
return &v
|
||||||
|
case *int32:
|
||||||
|
val := int(*v)
|
||||||
|
return &val
|
||||||
|
case int32:
|
||||||
|
val := int(v)
|
||||||
|
return &val
|
||||||
|
default:
|
||||||
|
log.Printf("%s cannot be converted to an int", i)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// IntRangeMe takes an interface and returns a pointer to an IntRangeFilter value
|
||||||
|
// If the underlying interface kind is not IntRangeFilter or *IntRangeFilter then nil is returned
|
||||||
|
func (*CommandLineInterface) IntRangeMe(i interface{}) *selector.IntRangeFilter {
|
||||||
|
if i == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
switch v := i.(type) {
|
||||||
|
case *selector.IntRangeFilter:
|
||||||
|
return v
|
||||||
|
case selector.IntRangeFilter:
|
||||||
|
return &v
|
||||||
|
default:
|
||||||
|
log.Printf("%s cannot be converted to an IntRange", i)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByteQuantityRangeMe takes an interface and returns a pointer to a ByteQuantityRangeFilter value
|
||||||
|
// If the underlying interface kind is not ByteQuantityRangeFilter or *ByteQuantityRangeFilter then nil is returned
|
||||||
|
func (*CommandLineInterface) ByteQuantityRangeMe(i interface{}) *selector.ByteQuantityRangeFilter {
|
||||||
|
if i == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
switch v := i.(type) {
|
||||||
|
case *selector.ByteQuantityRangeFilter:
|
||||||
|
return v
|
||||||
|
case selector.ByteQuantityRangeFilter:
|
||||||
|
return &v
|
||||||
|
default:
|
||||||
|
log.Printf("%s cannot be converted to a ByteQuantityRange", i)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// StringMe takes an interface and returns a pointer to a string value
|
||||||
|
// If the underlying interface kind is not string or *string then nil is returned
|
||||||
|
func (*CommandLineInterface) StringMe(i interface{}) *string {
|
||||||
|
if i == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
switch v := i.(type) {
|
||||||
|
case *string:
|
||||||
|
return v
|
||||||
|
case string:
|
||||||
|
return &v
|
||||||
|
default:
|
||||||
|
log.Printf("%s cannot be converted to a string", i)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// BoolMe takes an interface and returns a pointer to a bool value
|
||||||
|
// If the underlying interface kind is not bool or *bool then nil is returned
|
||||||
|
func (*CommandLineInterface) BoolMe(i interface{}) *bool {
|
||||||
|
if i == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
switch v := i.(type) {
|
||||||
|
case *bool:
|
||||||
|
return v
|
||||||
|
case bool:
|
||||||
|
return &v
|
||||||
|
default:
|
||||||
|
log.Printf("%s cannot be converted to a bool", i)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// StringSliceMe takes an interface and returns a pointer to a string slice
|
||||||
|
// If the underlying interface kind is not []string or *[]string then nil is returned
|
||||||
|
func (*CommandLineInterface) StringSliceMe(i interface{}) *[]string {
|
||||||
|
if i == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
switch v := i.(type) {
|
||||||
|
case *[]string:
|
||||||
|
return v
|
||||||
|
case []string:
|
||||||
|
return &v
|
||||||
|
default:
|
||||||
|
log.Printf("%s cannot be converted to a string list", i)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// RegexMe takes an interface and returns a pointer to a regex
|
||||||
|
// If the underlying interface kind is not regexp.Regexp or *regexp.Regexp then nil is returned
|
||||||
|
func (*CommandLineInterface) RegexMe(i interface{}) *regexp.Regexp {
|
||||||
|
if i == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
switch v := i.(type) {
|
||||||
|
case *regexp.Regexp:
|
||||||
|
return v
|
||||||
|
case regexp.Regexp:
|
||||||
|
return &v
|
||||||
|
default:
|
||||||
|
log.Printf("%s cannot be converted to a regexp", i)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByteQuantityMe takes an interface and returns a pointer to a ByteQuantity
|
||||||
|
// If the underlying interface kind is not bytequantity.ByteQuantity or *bytequantity.ByteQuantity then nil is returned
|
||||||
|
func (*CommandLineInterface) ByteQuantityMe(i interface{}) *bytequantity.ByteQuantity {
|
||||||
|
if i == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
switch v := i.(type) {
|
||||||
|
case *bytequantity.ByteQuantity:
|
||||||
|
return v
|
||||||
|
case bytequantity.ByteQuantity:
|
||||||
|
return &v
|
||||||
|
default:
|
||||||
|
log.Printf("%s cannot be converted to a byte quantity", i)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
23
vendor/github.com/aws/amazon-ec2-instance-selector/v2/pkg/selector/BUILD.bazel
generated
vendored
Normal file
23
vendor/github.com/aws/amazon-ec2-instance-selector/v2/pkg/selector/BUILD.bazel
generated
vendored
Normal file
|
|
@ -0,0 +1,23 @@
|
||||||
|
load("@io_bazel_rules_go//go:def.bzl", "go_library")
|
||||||
|
|
||||||
|
go_library(
|
||||||
|
name = "go_default_library",
|
||||||
|
srcs = [
|
||||||
|
"aggregates.go",
|
||||||
|
"comparators.go",
|
||||||
|
"selector.go",
|
||||||
|
"types.go",
|
||||||
|
],
|
||||||
|
importmap = "k8s.io/kops/vendor/github.com/aws/amazon-ec2-instance-selector/v2/pkg/selector",
|
||||||
|
importpath = "github.com/aws/amazon-ec2-instance-selector/v2/pkg/selector",
|
||||||
|
visibility = ["//visibility:public"],
|
||||||
|
deps = [
|
||||||
|
"//vendor/github.com/aws/amazon-ec2-instance-selector/v2/pkg/bytequantity:go_default_library",
|
||||||
|
"//vendor/github.com/aws/amazon-ec2-instance-selector/v2/pkg/selector/outputs:go_default_library",
|
||||||
|
"//vendor/github.com/aws/aws-sdk-go/aws:go_default_library",
|
||||||
|
"//vendor/github.com/aws/aws-sdk-go/aws/request:go_default_library",
|
||||||
|
"//vendor/github.com/aws/aws-sdk-go/aws/session:go_default_library",
|
||||||
|
"//vendor/github.com/aws/aws-sdk-go/service/ec2:go_default_library",
|
||||||
|
"//vendor/github.com/aws/aws-sdk-go/service/ec2/ec2iface:go_default_library",
|
||||||
|
],
|
||||||
|
)
|
||||||
109
vendor/github.com/aws/amazon-ec2-instance-selector/v2/pkg/selector/aggregates.go
generated
vendored
Normal file
109
vendor/github.com/aws/amazon-ec2-instance-selector/v2/pkg/selector/aggregates.go
generated
vendored
Normal file
|
|
@ -0,0 +1,109 @@
|
||||||
|
package selector
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"regexp"
|
||||||
|
|
||||||
|
"github.com/aws/amazon-ec2-instance-selector/v2/pkg/bytequantity"
|
||||||
|
"github.com/aws/aws-sdk-go/aws"
|
||||||
|
"github.com/aws/aws-sdk-go/service/ec2"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
// AggregateLowPercentile is the default lower percentile for resource ranges on similar instance type comparisons
|
||||||
|
AggregateLowPercentile = 0.9
|
||||||
|
// AggregateHighPercentile is the default upper percentile for resource ranges on similar instance type comparisons
|
||||||
|
AggregateHighPercentile = 1.2
|
||||||
|
)
|
||||||
|
|
||||||
|
// FiltersTransform can be implemented to provide custom transforms
|
||||||
|
type FiltersTransform interface {
|
||||||
|
Transform(Filters) (Filters, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
// TransformFn is the func type definition for a FiltersTransform
|
||||||
|
type TransformFn func(Filters) (Filters, error)
|
||||||
|
|
||||||
|
// Transform implements FiltersTransform interface on TransformFn
|
||||||
|
// This allows any TransformFn to be passed into funcs accepting FiltersTransform interface
|
||||||
|
func (fn TransformFn) Transform(filters Filters) (Filters, error) {
|
||||||
|
return fn(filters)
|
||||||
|
}
|
||||||
|
|
||||||
|
// TransformBaseInstanceType transforms lower level filters based on the instanceTypeBase specs
|
||||||
|
func (itf Selector) TransformBaseInstanceType(filters Filters) (Filters, error) {
|
||||||
|
if filters.InstanceTypeBase == nil {
|
||||||
|
return filters, nil
|
||||||
|
}
|
||||||
|
instanceTypesOutput, err := itf.EC2.DescribeInstanceTypes(&ec2.DescribeInstanceTypesInput{
|
||||||
|
InstanceTypes: []*string{filters.InstanceTypeBase},
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return filters, err
|
||||||
|
}
|
||||||
|
if len(instanceTypesOutput.InstanceTypes) == 0 {
|
||||||
|
return filters, fmt.Errorf("error instance type %s is not a valid instance type", *filters.InstanceTypeBase)
|
||||||
|
}
|
||||||
|
instanceTypeInfo := instanceTypesOutput.InstanceTypes[0]
|
||||||
|
if filters.BareMetal == nil {
|
||||||
|
filters.BareMetal = instanceTypeInfo.BareMetal
|
||||||
|
}
|
||||||
|
if filters.CPUArchitecture == nil {
|
||||||
|
filters.CPUArchitecture = instanceTypeInfo.ProcessorInfo.SupportedArchitectures[0]
|
||||||
|
}
|
||||||
|
if filters.Fpga == nil {
|
||||||
|
isFpgaSupported := instanceTypeInfo.FpgaInfo != nil
|
||||||
|
filters.Fpga = &isFpgaSupported
|
||||||
|
}
|
||||||
|
if filters.GpusRange == nil {
|
||||||
|
gpuCount := 0
|
||||||
|
if instanceTypeInfo.GpuInfo != nil {
|
||||||
|
gpuCount = int(*getTotalGpusCount(instanceTypeInfo.GpuInfo))
|
||||||
|
}
|
||||||
|
filters.GpusRange = &IntRangeFilter{LowerBound: gpuCount, UpperBound: gpuCount}
|
||||||
|
}
|
||||||
|
if filters.MemoryRange == nil {
|
||||||
|
lowerBound := bytequantity.ByteQuantity{Quantity: uint64(float64(*instanceTypeInfo.MemoryInfo.SizeInMiB) * AggregateLowPercentile)}
|
||||||
|
upperBound := bytequantity.ByteQuantity{Quantity: uint64(float64(*instanceTypeInfo.MemoryInfo.SizeInMiB) * AggregateHighPercentile)}
|
||||||
|
filters.MemoryRange = &ByteQuantityRangeFilter{LowerBound: lowerBound, UpperBound: upperBound}
|
||||||
|
}
|
||||||
|
if filters.VCpusRange == nil {
|
||||||
|
lowerBound := int(float64(*instanceTypeInfo.VCpuInfo.DefaultVCpus) * AggregateLowPercentile)
|
||||||
|
upperBound := int(float64(*instanceTypeInfo.VCpuInfo.DefaultVCpus) * AggregateHighPercentile)
|
||||||
|
filters.VCpusRange = &IntRangeFilter{LowerBound: lowerBound, UpperBound: upperBound}
|
||||||
|
}
|
||||||
|
filters.InstanceTypeBase = nil
|
||||||
|
|
||||||
|
return filters, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// TransformFlexible transforms lower level filters based on a set of opinions
|
||||||
|
func (itf Selector) TransformFlexible(filters Filters) (Filters, error) {
|
||||||
|
if filters.Flexible == nil {
|
||||||
|
return filters, nil
|
||||||
|
}
|
||||||
|
if filters.CPUArchitecture == nil {
|
||||||
|
filters.CPUArchitecture = aws.String("x86_64")
|
||||||
|
}
|
||||||
|
if filters.BareMetal == nil {
|
||||||
|
filters.BareMetal = aws.Bool(false)
|
||||||
|
}
|
||||||
|
if filters.Fpga == nil {
|
||||||
|
filters.Fpga = aws.Bool(false)
|
||||||
|
}
|
||||||
|
|
||||||
|
if filters.AllowList == nil {
|
||||||
|
baseAllowedInstanceTypes, err := regexp.Compile("^[cmr][3-9][ag]?\\..*$|^a[1-9]\\..*$|^t[2-9]\\..*$")
|
||||||
|
if err != nil {
|
||||||
|
return filters, err
|
||||||
|
}
|
||||||
|
filters.AllowList = baseAllowedInstanceTypes
|
||||||
|
}
|
||||||
|
|
||||||
|
if filters.VCpusRange == nil && filters.MemoryRange == nil {
|
||||||
|
defaultVcpus := 4
|
||||||
|
filters.VCpusRange = &IntRangeFilter{LowerBound: defaultVcpus, UpperBound: defaultVcpus}
|
||||||
|
}
|
||||||
|
|
||||||
|
return filters, nil
|
||||||
|
}
|
||||||
175
vendor/github.com/aws/amazon-ec2-instance-selector/v2/pkg/selector/comparators.go
generated
vendored
Normal file
175
vendor/github.com/aws/amazon-ec2-instance-selector/v2/pkg/selector/comparators.go
generated
vendored
Normal file
|
|
@ -0,0 +1,175 @@
|
||||||
|
// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License"). You may
|
||||||
|
// not use this file except in compliance with the License. A copy of the
|
||||||
|
// License is located at
|
||||||
|
//
|
||||||
|
// http://aws.amazon.com/apache2.0/
|
||||||
|
//
|
||||||
|
// or in the "license" file accompanying this file. This file is distributed
|
||||||
|
// on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||||
|
// express or implied. See the License for the specific language governing
|
||||||
|
// permissions and limitations under the License.
|
||||||
|
|
||||||
|
package selector
|
||||||
|
|
||||||
|
import (
|
||||||
|
"log"
|
||||||
|
"math"
|
||||||
|
"regexp"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/aws/aws-sdk-go/aws"
|
||||||
|
"github.com/aws/aws-sdk-go/service/ec2"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
supported = "supported"
|
||||||
|
required = "required"
|
||||||
|
)
|
||||||
|
|
||||||
|
func isSupportedFromString(instanceTypeValue *string, target *string) bool {
|
||||||
|
if target == nil {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
if instanceTypeValue == nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return *instanceTypeValue == *target
|
||||||
|
}
|
||||||
|
|
||||||
|
func isSupportedFromStrings(instanceTypeValues []*string, target *string) bool {
|
||||||
|
if target == nil {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return contains(instanceTypeValues, *target)
|
||||||
|
}
|
||||||
|
|
||||||
|
func isSupportedWithRangeInt(instanceTypeValue *int, target *IntRangeFilter) bool {
|
||||||
|
var instanceTypeValueInt64 *int64
|
||||||
|
if instanceTypeValue != nil {
|
||||||
|
nonPtr := int64(*instanceTypeValue)
|
||||||
|
instanceTypeValueInt64 = &nonPtr
|
||||||
|
}
|
||||||
|
return isSupportedWithRangeInt64(instanceTypeValueInt64, target)
|
||||||
|
}
|
||||||
|
|
||||||
|
func isSupportedWithFloat64(instanceTypeValue *float64, target *float64) bool {
|
||||||
|
if target == nil {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
if instanceTypeValue == nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
// compare up to values' two decimal floor
|
||||||
|
return math.Floor(*instanceTypeValue*100)/100 == math.Floor(*target*100)/100
|
||||||
|
}
|
||||||
|
|
||||||
|
func isSupportedWithRangeInt64(instanceTypeValue *int64, target *IntRangeFilter) bool {
|
||||||
|
if target == nil {
|
||||||
|
return true
|
||||||
|
} else if instanceTypeValue == nil && target.LowerBound == 0 && target.UpperBound == 0 {
|
||||||
|
return true
|
||||||
|
} else if instanceTypeValue == nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return int(*instanceTypeValue) >= target.LowerBound && int(*instanceTypeValue) <= target.UpperBound
|
||||||
|
}
|
||||||
|
|
||||||
|
func isSupportedWithRangeUint64(instanceTypeValue *int64, target *Uint64RangeFilter) bool {
|
||||||
|
if target == nil {
|
||||||
|
return true
|
||||||
|
} else if instanceTypeValue == nil && target.LowerBound == 0 && target.UpperBound == 0 {
|
||||||
|
return true
|
||||||
|
} else if instanceTypeValue == nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if target.UpperBound > math.MaxInt64 {
|
||||||
|
target.UpperBound = math.MaxInt64
|
||||||
|
}
|
||||||
|
return uint64(*instanceTypeValue) >= target.LowerBound && uint64(*instanceTypeValue) <= target.UpperBound
|
||||||
|
}
|
||||||
|
|
||||||
|
func isSupportedWithBool(instanceTypeValue *bool, target *bool) bool {
|
||||||
|
if target == nil {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return *target == *instanceTypeValue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper functions for aggregating data parsed from AWS API calls
|
||||||
|
|
||||||
|
func getTotalGpusCount(gpusInfo *ec2.GpuInfo) *int64 {
|
||||||
|
if gpusInfo == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
total := aws.Int64(0)
|
||||||
|
for _, gpu := range gpusInfo.Gpus {
|
||||||
|
total = aws.Int64(*total + *gpu.Count)
|
||||||
|
}
|
||||||
|
return total
|
||||||
|
}
|
||||||
|
|
||||||
|
func getTotalGpuMemory(gpusInfo *ec2.GpuInfo) *int64 {
|
||||||
|
if gpusInfo == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return gpusInfo.TotalGpuMemoryInMiB
|
||||||
|
}
|
||||||
|
|
||||||
|
func getNetworkPerformance(networkPerformance *string) *int {
|
||||||
|
if networkPerformance == nil {
|
||||||
|
return aws.Int(-1)
|
||||||
|
}
|
||||||
|
re, err := regexp.Compile(`[0-9]+ Gigabit`)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("Unable to compile regexp to parse network performance: %s\n", *networkPerformance)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
networkBandwidth := re.FindString(*networkPerformance)
|
||||||
|
if networkBandwidth == "" {
|
||||||
|
return aws.Int(-1)
|
||||||
|
}
|
||||||
|
bandwidthAndUnit := strings.Split(networkBandwidth, " ")
|
||||||
|
if len(bandwidthAndUnit) != 2 {
|
||||||
|
return aws.Int(-1)
|
||||||
|
}
|
||||||
|
bandwidthNumber, err := strconv.Atoi(bandwidthAndUnit[0])
|
||||||
|
if err != nil {
|
||||||
|
return aws.Int(-1)
|
||||||
|
}
|
||||||
|
return aws.Int(bandwidthNumber)
|
||||||
|
}
|
||||||
|
|
||||||
|
// supportSyntaxToBool takes an instance spec field that uses ["unsupported", "supported", or "required"]
|
||||||
|
// and transforms it to a *bool to use in filter execution
|
||||||
|
func supportSyntaxToBool(instanceTypeSupport *string) *bool {
|
||||||
|
if instanceTypeSupport == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if strings.ToLower(*instanceTypeSupport) == required || strings.ToLower(*instanceTypeSupport) == supported {
|
||||||
|
return aws.Bool(true)
|
||||||
|
}
|
||||||
|
return aws.Bool(false)
|
||||||
|
}
|
||||||
|
|
||||||
|
func calculateVCpusToMemoryRatio(vcpusVal *int64, memoryVal *int64) *float64 {
|
||||||
|
if vcpusVal == nil || *vcpusVal == 0 || memoryVal == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
// normalize vcpus to a mebivcpu value
|
||||||
|
result := math.Ceil(float64(*memoryVal) / float64(*vcpusVal*1024))
|
||||||
|
return &result
|
||||||
|
}
|
||||||
|
|
||||||
|
// Slice helper function
|
||||||
|
|
||||||
|
func contains(slice []*string, target string) bool {
|
||||||
|
for _, it := range slice {
|
||||||
|
if it != nil && *it == target {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
16
vendor/github.com/aws/amazon-ec2-instance-selector/v2/pkg/selector/outputs/BUILD.bazel
generated
vendored
Normal file
16
vendor/github.com/aws/amazon-ec2-instance-selector/v2/pkg/selector/outputs/BUILD.bazel
generated
vendored
Normal file
|
|
@ -0,0 +1,16 @@
|
||||||
|
load("@io_bazel_rules_go//go:def.bzl", "go_library")
|
||||||
|
|
||||||
|
go_library(
|
||||||
|
name = "go_default_library",
|
||||||
|
srcs = [
|
||||||
|
"outputs.go",
|
||||||
|
"types.go",
|
||||||
|
],
|
||||||
|
importmap = "k8s.io/kops/vendor/github.com/aws/amazon-ec2-instance-selector/v2/pkg/selector/outputs",
|
||||||
|
importpath = "github.com/aws/amazon-ec2-instance-selector/v2/pkg/selector/outputs",
|
||||||
|
visibility = ["//visibility:public"],
|
||||||
|
deps = [
|
||||||
|
"//vendor/github.com/aws/aws-sdk-go/service/ec2:go_default_library",
|
||||||
|
"//vendor/github.com/ghodss/yaml:go_default_library",
|
||||||
|
],
|
||||||
|
)
|
||||||
262
vendor/github.com/aws/amazon-ec2-instance-selector/v2/pkg/selector/outputs/outputs.go
generated
vendored
Normal file
262
vendor/github.com/aws/amazon-ec2-instance-selector/v2/pkg/selector/outputs/outputs.go
generated
vendored
Normal file
|
|
@ -0,0 +1,262 @@
|
||||||
|
// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License"). You may
|
||||||
|
// not use this file except in compliance with the License. A copy of the
|
||||||
|
// License is located at
|
||||||
|
//
|
||||||
|
// http://aws.amazon.com/apache2.0/
|
||||||
|
//
|
||||||
|
// or in the "license" file accompanying this file. This file is distributed
|
||||||
|
// on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||||
|
// express or implied. See the License for the specific language governing
|
||||||
|
// permissions and limitations under the License.
|
||||||
|
|
||||||
|
// Package outputs provides types for implementing instance type output functions as well as prebuilt output functions.
|
||||||
|
package outputs
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"strings"
|
||||||
|
"text/tabwriter"
|
||||||
|
|
||||||
|
"github.com/aws/aws-sdk-go/service/ec2"
|
||||||
|
"github.com/ghodss/yaml"
|
||||||
|
)
|
||||||
|
|
||||||
|
// SimpleInstanceTypeOutput is an OutputFn which outputs a slice of instance type names
|
||||||
|
func SimpleInstanceTypeOutput(instanceTypeInfoSlice []*ec2.InstanceTypeInfo) []string {
|
||||||
|
instanceTypeStrings := []string{}
|
||||||
|
for _, instanceTypeInfo := range instanceTypeInfoSlice {
|
||||||
|
instanceTypeStrings = append(instanceTypeStrings, *instanceTypeInfo.InstanceType)
|
||||||
|
}
|
||||||
|
return instanceTypeStrings
|
||||||
|
}
|
||||||
|
|
||||||
|
// VerboseInstanceTypeOutput is an OutputFn which outputs a slice of instance type names
|
||||||
|
func VerboseInstanceTypeOutput(instanceTypeInfoSlice []*ec2.InstanceTypeInfo) []string {
|
||||||
|
output, err := json.MarshalIndent(instanceTypeInfoSlice, "", " ")
|
||||||
|
if err != nil {
|
||||||
|
log.Println("Unable to convert instance type info to JSON")
|
||||||
|
return []string{}
|
||||||
|
}
|
||||||
|
if string(output) == "[]" || string(output) == "null" {
|
||||||
|
return []string{}
|
||||||
|
}
|
||||||
|
return []string{string(output)}
|
||||||
|
}
|
||||||
|
|
||||||
|
// TerraformSpotMixedInstancesPolicyHCLOutput is an OutputFn which returns an ASG MixedInstancePolicy in Terraform HCL syntax
|
||||||
|
func TerraformSpotMixedInstancesPolicyHCLOutput(instanceTypeInfoSlice []*ec2.InstanceTypeInfo) []string {
|
||||||
|
instanceTypeOverrides := instanceTypeInfoToOverrides(instanceTypeInfoSlice)
|
||||||
|
overridesString := ""
|
||||||
|
for _, override := range instanceTypeOverrides {
|
||||||
|
overridesString = overridesString + fmt.Sprintf(`
|
||||||
|
override {
|
||||||
|
instance_type = "%s"
|
||||||
|
}
|
||||||
|
`, override.InstanceType)
|
||||||
|
}
|
||||||
|
asgResource := fmt.Sprintf(`resource "aws_autoscaling_group" "AutoScalingGroupMIG" {
|
||||||
|
vpc_zone_identifier = [
|
||||||
|
"REPLACE_WITH_SUBNET_ID"
|
||||||
|
]
|
||||||
|
|
||||||
|
name = "AutoScalingGroupMIG"
|
||||||
|
max_size = 0
|
||||||
|
min_size = 0
|
||||||
|
desired_capacity = 0
|
||||||
|
|
||||||
|
mixed_instances_policy {
|
||||||
|
instances_distribution {
|
||||||
|
on_demand_base_capacity = 0
|
||||||
|
on_demand_percentage_above_base_capacity = 0
|
||||||
|
spot_allocation_strategy = "capacity-optimized"
|
||||||
|
}
|
||||||
|
|
||||||
|
launch_template {
|
||||||
|
launch_template_specification {
|
||||||
|
launch_template_id = "REPLACE_WITH_LAUNCH_TEMPLATE_ID"
|
||||||
|
version = "$$Latest"
|
||||||
|
}
|
||||||
|
|
||||||
|
%s
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
provider "aws" {
|
||||||
|
region = "us-east-1"
|
||||||
|
}
|
||||||
|
`, overridesString)
|
||||||
|
|
||||||
|
return []string{asgResource}
|
||||||
|
}
|
||||||
|
|
||||||
|
// CloudFormationSpotMixedInstancesPolicyYAMLOutput is an OutputFn which returns an ASG MixedInstancePolicy in CloudFormation YAML syntax
|
||||||
|
func CloudFormationSpotMixedInstancesPolicyYAMLOutput(instanceTypeInfoSlice []*ec2.InstanceTypeInfo) []string {
|
||||||
|
instanceTypeOverrides := instanceTypeInfoToOverrides(instanceTypeInfoSlice)
|
||||||
|
cfnMig := getCfnMIGResources(instanceTypeOverrides)
|
||||||
|
cfnMigYAML, err := yaml.Marshal(cfnMig)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("Unable to create CloudFormation YAML: %v\n", err)
|
||||||
|
}
|
||||||
|
return []string{string(cfnMigYAML)}
|
||||||
|
}
|
||||||
|
|
||||||
|
// CloudFormationSpotMixedInstancesPolicyJSONOutput is an OutputFn which returns an MixedInstancePolicy in CloudFormation JSON syntax
|
||||||
|
func CloudFormationSpotMixedInstancesPolicyJSONOutput(instanceTypeInfoSlice []*ec2.InstanceTypeInfo) []string {
|
||||||
|
instanceTypeOverrides := instanceTypeInfoToOverrides(instanceTypeInfoSlice)
|
||||||
|
cfnMig := getCfnMIGResources(instanceTypeOverrides)
|
||||||
|
cfnJSONMig, err := json.MarshalIndent(cfnMig, "", " ")
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("Unable to create CloudFormation JSON: %v\n", err)
|
||||||
|
return []string{}
|
||||||
|
}
|
||||||
|
return []string{string(cfnJSONMig)}
|
||||||
|
}
|
||||||
|
|
||||||
|
func getCfnMIGResources(instanceTypeOverrides []InstanceTypeOverride) Resources {
|
||||||
|
resources := map[string]AutoScalingGroup{}
|
||||||
|
resources["AutoScalingGroupMIG"] = AutoScalingGroup{
|
||||||
|
Type: typeASG,
|
||||||
|
Properties: AutoScalingGroupProperties{
|
||||||
|
AutoScalingGroupName: "REPLACE_WITH_NAME",
|
||||||
|
VPCZoneIdentifier: []string{"replace-with-subnet-ids"},
|
||||||
|
MixedInstancesPolicy: MixedInstancesPolicy{
|
||||||
|
InstancesDistribution: InstancesDistribution{
|
||||||
|
OnDemandBaseCapacity: 0,
|
||||||
|
OnDemandPercentageAboveBaseCapacity: 0,
|
||||||
|
SpotAllocationStrategy: capacityOptimized,
|
||||||
|
},
|
||||||
|
LaunchTemplate: LaunchTemplate{
|
||||||
|
LaunchTemplateSpecification: LaunchTemplateSpecification{
|
||||||
|
LaunchTemplateID: "REPLACE_WITH_LAUNCH_TEMPLATE_ID",
|
||||||
|
Version: "REPLACE_WITH_VERSION",
|
||||||
|
},
|
||||||
|
Overrides: instanceTypeOverrides,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
return Resources{Resources: resources}
|
||||||
|
}
|
||||||
|
|
||||||
|
func instanceTypeInfoToOverrides(instanceTypeInfoSlice []*ec2.InstanceTypeInfo) []InstanceTypeOverride {
|
||||||
|
instanceTypeOverrides := []InstanceTypeOverride{}
|
||||||
|
for _, instanceTypeInfo := range instanceTypeInfoSlice {
|
||||||
|
instanceTypeOverrides = append(instanceTypeOverrides, InstanceTypeOverride{InstanceType: *instanceTypeInfo.InstanceType})
|
||||||
|
}
|
||||||
|
return instanceTypeOverrides
|
||||||
|
}
|
||||||
|
|
||||||
|
// TableOutputShort is an OutputFn which returns a CLI table for easy reading
|
||||||
|
func TableOutputShort(instanceTypeInfoSlice []*ec2.InstanceTypeInfo) []string {
|
||||||
|
if instanceTypeInfoSlice == nil || len(instanceTypeInfoSlice) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
w := new(tabwriter.Writer)
|
||||||
|
buf := new(bytes.Buffer)
|
||||||
|
w.Init(buf, 8, 8, 8, ' ', 0)
|
||||||
|
defer w.Flush()
|
||||||
|
|
||||||
|
headers := []interface{}{
|
||||||
|
"Instance Type",
|
||||||
|
"VCPUs",
|
||||||
|
"Mem (GiB)",
|
||||||
|
}
|
||||||
|
separators := []interface{}{}
|
||||||
|
|
||||||
|
headerFormat := ""
|
||||||
|
for _, header := range headers {
|
||||||
|
headerFormat = headerFormat + "%s\t"
|
||||||
|
separators = append(separators, strings.Repeat("-", len(header.(string))))
|
||||||
|
}
|
||||||
|
fmt.Fprintf(w, headerFormat, headers...)
|
||||||
|
fmt.Fprintf(w, "\n"+headerFormat, separators...)
|
||||||
|
|
||||||
|
for _, instanceTypeInfo := range instanceTypeInfoSlice {
|
||||||
|
fmt.Fprintf(w, "\n%s\t%d\t%.3f\t",
|
||||||
|
*instanceTypeInfo.InstanceType,
|
||||||
|
*instanceTypeInfo.VCpuInfo.DefaultVCpus,
|
||||||
|
float64(*instanceTypeInfo.MemoryInfo.SizeInMiB)/1024.0,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
w.Flush()
|
||||||
|
return []string{buf.String()}
|
||||||
|
}
|
||||||
|
|
||||||
|
// TableOutputWide is an OutputFn which returns a detailed CLI table for easy reading
|
||||||
|
func TableOutputWide(instanceTypeInfoSlice []*ec2.InstanceTypeInfo) []string {
|
||||||
|
if instanceTypeInfoSlice == nil || len(instanceTypeInfoSlice) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
w := new(tabwriter.Writer)
|
||||||
|
buf := new(bytes.Buffer)
|
||||||
|
none := "none"
|
||||||
|
w.Init(buf, 8, 8, 2, ' ', 0)
|
||||||
|
defer w.Flush()
|
||||||
|
|
||||||
|
headers := []interface{}{
|
||||||
|
"Instance Type",
|
||||||
|
"VCPUs",
|
||||||
|
"Mem (GiB)",
|
||||||
|
"Hypervisor",
|
||||||
|
"Current Gen",
|
||||||
|
"Hibernation Support",
|
||||||
|
"CPU Arch",
|
||||||
|
"Network Performance",
|
||||||
|
"ENIs",
|
||||||
|
"GPUs",
|
||||||
|
"GPU Mem (GiB)",
|
||||||
|
"GPU Info",
|
||||||
|
}
|
||||||
|
separators := []interface{}{}
|
||||||
|
|
||||||
|
headerFormat := ""
|
||||||
|
for _, header := range headers {
|
||||||
|
headerFormat = headerFormat + "%s\t"
|
||||||
|
separators = append(separators, strings.Repeat("-", len(header.(string))))
|
||||||
|
}
|
||||||
|
fmt.Fprintf(w, headerFormat, headers...)
|
||||||
|
fmt.Fprintf(w, "\n"+headerFormat, separators...)
|
||||||
|
|
||||||
|
for _, instanceTypeInfo := range instanceTypeInfoSlice {
|
||||||
|
hypervisor := instanceTypeInfo.Hypervisor
|
||||||
|
if hypervisor == nil {
|
||||||
|
hypervisor = &none
|
||||||
|
}
|
||||||
|
cpuArchitectures := []string{}
|
||||||
|
for _, cpuArch := range instanceTypeInfo.ProcessorInfo.SupportedArchitectures {
|
||||||
|
cpuArchitectures = append(cpuArchitectures, *cpuArch)
|
||||||
|
}
|
||||||
|
gpus := int64(0)
|
||||||
|
gpuMemory := int64(0)
|
||||||
|
gpuType := []string{}
|
||||||
|
if instanceTypeInfo.GpuInfo != nil {
|
||||||
|
gpuMemory = *instanceTypeInfo.GpuInfo.TotalGpuMemoryInMiB
|
||||||
|
for _, gpuInfo := range instanceTypeInfo.GpuInfo.Gpus {
|
||||||
|
gpus = gpus + *gpuInfo.Count
|
||||||
|
gpuType = append(gpuType, *gpuInfo.Manufacturer+" "+*gpuInfo.Name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Fprintf(w, "\n%s\t%d\t%.3f\t%s\t%t\t%t\t%s\t%s\t%d\t%d\t%.2f\t%s\t",
|
||||||
|
*instanceTypeInfo.InstanceType,
|
||||||
|
*instanceTypeInfo.VCpuInfo.DefaultVCpus,
|
||||||
|
float64(*instanceTypeInfo.MemoryInfo.SizeInMiB)/1024.0,
|
||||||
|
*hypervisor,
|
||||||
|
*instanceTypeInfo.CurrentGeneration,
|
||||||
|
*instanceTypeInfo.HibernationSupported,
|
||||||
|
strings.Join(cpuArchitectures, ", "),
|
||||||
|
*instanceTypeInfo.NetworkInfo.NetworkPerformance,
|
||||||
|
*instanceTypeInfo.NetworkInfo.MaximumNetworkInterfaces,
|
||||||
|
gpus,
|
||||||
|
float64(gpuMemory)/1024.0,
|
||||||
|
strings.Join(gpuType, ", "),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
w.Flush()
|
||||||
|
return []string{buf.String()}
|
||||||
|
}
|
||||||
75
vendor/github.com/aws/amazon-ec2-instance-selector/v2/pkg/selector/outputs/types.go
generated
vendored
Normal file
75
vendor/github.com/aws/amazon-ec2-instance-selector/v2/pkg/selector/outputs/types.go
generated
vendored
Normal file
|
|
@ -0,0 +1,75 @@
|
||||||
|
// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License"). You may
|
||||||
|
// not use this file except in compliance with the License. A copy of the
|
||||||
|
// License is located at
|
||||||
|
//
|
||||||
|
// http://aws.amazon.com/apache2.0/
|
||||||
|
//
|
||||||
|
// or in the "license" file accompanying this file. This file is distributed
|
||||||
|
// on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||||
|
// express or implied. See the License for the specific language governing
|
||||||
|
// permissions and limitations under the License.
|
||||||
|
|
||||||
|
package outputs
|
||||||
|
|
||||||
|
const (
|
||||||
|
capacityOptimized = "capacity-optimized"
|
||||||
|
typeASG = "AWS::AutoScaling::AutoScalingGroup"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Resources is a struct to represent json for a cloudformation Resources definition block.
|
||||||
|
type Resources struct {
|
||||||
|
Resources map[string]AutoScalingGroup `json:"Resources"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// AutoScalingGroup is a struct to represent json for a cloudformation ASG definition
|
||||||
|
type AutoScalingGroup struct {
|
||||||
|
Type string `json:"Type"`
|
||||||
|
Properties AutoScalingGroupProperties `json:"Properties"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// AutoScalingGroupProperties is a struct to represent json for a cloudformation ASG Properties definition
|
||||||
|
type AutoScalingGroupProperties struct {
|
||||||
|
AutoScalingGroupName string `json:"AutoScalingGroupName"`
|
||||||
|
MinSize int `json:"MinSize,string"`
|
||||||
|
MaxSize int `json:"MaxSize,string"`
|
||||||
|
DesiredCapacity int `json:"DesiredCapacity,string"`
|
||||||
|
VPCZoneIdentifier []string `json:"VPCZoneIdentifier"`
|
||||||
|
MixedInstancesPolicy MixedInstancesPolicy `json:"MixedInstancesPolicy"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// MixedInstancesPolicy is a struct to represent json for a cloudformation ASG MixedInstancesPolicy definition
|
||||||
|
type MixedInstancesPolicy struct {
|
||||||
|
InstancesDistribution InstancesDistribution `json:"InstancesDistribution"`
|
||||||
|
LaunchTemplate LaunchTemplate `json:"LaunchTemplate"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// InstancesDistribution is a struct to represent json for a cloudformation ASG MixedInstancesPolicy InstancesDistribution definition
|
||||||
|
type InstancesDistribution struct {
|
||||||
|
OnDemandAllocationStrategy string `json:"OnDemandAllocationStrategy,omitempty"`
|
||||||
|
OnDemandBaseCapacity int `json:"OnDemandBaseCapacity"`
|
||||||
|
OnDemandPercentageAboveBaseCapacity int `json:"OnDemandPercentageAboveBaseCapacity"`
|
||||||
|
SpotAllocationStrategy string `json:"SpotAllocationStrategy,omitempty"`
|
||||||
|
SpotInstancePools int `json:"SpotInstancePools,omitempty"`
|
||||||
|
SpotMaxPrice string `json:"SpotMaxPrice,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// LaunchTemplate is a struct to represent json for a cloudformation LaunchTemplate definition
|
||||||
|
type LaunchTemplate struct {
|
||||||
|
LaunchTemplateSpecification LaunchTemplateSpecification `json:"LaunchTemplateSpecification"`
|
||||||
|
Overrides []InstanceTypeOverride `json:"Overrides"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// LaunchTemplateSpecification is a struct to represent json for a cloudformation LaunchTemplate LaunchTemplateSpecification definition
|
||||||
|
type LaunchTemplateSpecification struct {
|
||||||
|
LaunchTemplateID string `json:"LaunchTemplateId,omitempty"`
|
||||||
|
LaunchTemplateName string `json:"LaunchTemplateName,omitempty"`
|
||||||
|
Version string `json:"Version"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// InstanceTypeOverride is a struct to represent json for a cloudformation LaunchTemplate LaunchTemplateSpecification InstanceTypeOverrides definition
|
||||||
|
type InstanceTypeOverride struct {
|
||||||
|
InstanceType string `json:"InstanceType"`
|
||||||
|
WeightedCapacity int `json:"WeightedCapacity,omitempty"`
|
||||||
|
}
|
||||||
417
vendor/github.com/aws/amazon-ec2-instance-selector/v2/pkg/selector/selector.go
generated
vendored
Normal file
417
vendor/github.com/aws/amazon-ec2-instance-selector/v2/pkg/selector/selector.go
generated
vendored
Normal file
|
|
@ -0,0 +1,417 @@
|
||||||
|
// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License"). You may
|
||||||
|
// not use this file except in compliance with the License. A copy of the
|
||||||
|
// License is located at
|
||||||
|
//
|
||||||
|
// http://aws.amazon.com/apache2.0/
|
||||||
|
//
|
||||||
|
// or in the "license" file accompanying this file. This file is distributed
|
||||||
|
// on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||||
|
// express or implied. See the License for the specific language governing
|
||||||
|
// permissions and limitations under the License.
|
||||||
|
|
||||||
|
// Package selector provides filtering logic for Amazon EC2 Instance Types based on declarative resource specfications.
|
||||||
|
package selector
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"reflect"
|
||||||
|
"regexp"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/aws/amazon-ec2-instance-selector/v2/pkg/selector/outputs"
|
||||||
|
"github.com/aws/aws-sdk-go/aws"
|
||||||
|
"github.com/aws/aws-sdk-go/aws/request"
|
||||||
|
"github.com/aws/aws-sdk-go/aws/session"
|
||||||
|
"github.com/aws/aws-sdk-go/service/ec2"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
// Version is overridden at compilation with the version based on the git tag
|
||||||
|
versionID = "dev"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
locationFilterKey = "location"
|
||||||
|
zoneIDLocationType = "availability-zone-id"
|
||||||
|
zoneNameLocationType = "availability-zone"
|
||||||
|
regionNameLocationType = "region"
|
||||||
|
sdkName = "instance-selector"
|
||||||
|
|
||||||
|
// Filter Keys
|
||||||
|
|
||||||
|
cpuArchitecture = "cpuArchitecture"
|
||||||
|
usageClass = "usageClass"
|
||||||
|
rootDeviceType = "rootDeviceType"
|
||||||
|
hibernationSupported = "hibernationSupported"
|
||||||
|
vcpusRange = "vcpusRange"
|
||||||
|
memoryRange = "memoryRange"
|
||||||
|
gpuMemoryRange = "gpuMemoryRange"
|
||||||
|
gpusRange = "gpusRange"
|
||||||
|
placementGroupStrategy = "placementGroupStrategy"
|
||||||
|
hypervisor = "hypervisor"
|
||||||
|
baremetal = "baremetal"
|
||||||
|
burstable = "burstable"
|
||||||
|
fpga = "fpga"
|
||||||
|
enaSupport = "enaSupport"
|
||||||
|
vcpusToMemoryRatio = "vcpusToMemoryRatio"
|
||||||
|
currentGeneration = "currentGeneration"
|
||||||
|
networkInterfaces = "networkInterfaces"
|
||||||
|
networkPerformance = "networkPerformance"
|
||||||
|
allowList = "allowList"
|
||||||
|
denyList = "denyList"
|
||||||
|
|
||||||
|
cpuArchitectureAMD64 = "amd64"
|
||||||
|
cpuArchitectureX8664 = "x86_64"
|
||||||
|
)
|
||||||
|
|
||||||
|
// New creates an instance of Selector provided an aws session
|
||||||
|
func New(sess *session.Session) *Selector {
|
||||||
|
userAgentTag := fmt.Sprintf("%s-v%s", sdkName, versionID)
|
||||||
|
userAgentHandler := request.MakeAddToUserAgentFreeFormHandler(userAgentTag)
|
||||||
|
sess.Handlers.Build.PushBack(userAgentHandler)
|
||||||
|
return &Selector{
|
||||||
|
EC2: ec2.New(sess),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Filter accepts a Filters struct which is used to select the available instance types
|
||||||
|
// matching the criteria within Filters and returns a simple list of instance type strings
|
||||||
|
func (itf Selector) Filter(filters Filters) ([]string, error) {
|
||||||
|
outputFn := InstanceTypesOutputFn(outputs.SimpleInstanceTypeOutput)
|
||||||
|
return itf.FilterWithOutput(filters, outputFn)
|
||||||
|
}
|
||||||
|
|
||||||
|
// FilterVerbose accepts a Filters struct which is used to select the available instance types
|
||||||
|
// matching the criteria within Filters and returns a list instanceTypeInfo
|
||||||
|
func (itf Selector) FilterVerbose(filters Filters) ([]*ec2.InstanceTypeInfo, error) {
|
||||||
|
instanceTypeInfoSlice, err := itf.rawFilter(filters)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
instanceTypeInfoSlice = itf.truncateResults(filters.MaxResults, instanceTypeInfoSlice)
|
||||||
|
return instanceTypeInfoSlice, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// FilterWithOutput accepts a Filters struct which is used to select the available instance types
|
||||||
|
// matching the criteria within Filters and returns a list of strings based on the custom outputFn
|
||||||
|
func (itf Selector) FilterWithOutput(filters Filters, outputFn InstanceTypesOutput) ([]string, error) {
|
||||||
|
instanceTypeInfoSlice, err := itf.rawFilter(filters)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
instanceTypeInfoSlice = itf.truncateResults(filters.MaxResults, instanceTypeInfoSlice)
|
||||||
|
output := outputFn.Output(instanceTypeInfoSlice)
|
||||||
|
return output, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (itf Selector) truncateResults(maxResults *int, instanceTypeInfoSlice []*ec2.InstanceTypeInfo) []*ec2.InstanceTypeInfo {
|
||||||
|
if maxResults == nil {
|
||||||
|
return instanceTypeInfoSlice
|
||||||
|
}
|
||||||
|
upperIndex := *maxResults
|
||||||
|
if *maxResults > len(instanceTypeInfoSlice) {
|
||||||
|
upperIndex = len(instanceTypeInfoSlice)
|
||||||
|
}
|
||||||
|
return instanceTypeInfoSlice[0:upperIndex]
|
||||||
|
}
|
||||||
|
|
||||||
|
// AggregateFilterTransform takes higher level filters which are used to affect multiple raw filters in an opinionated way.
|
||||||
|
func (itf Selector) AggregateFilterTransform(filters Filters) (Filters, error) {
|
||||||
|
transforms := []FiltersTransform{
|
||||||
|
TransformFn(itf.TransformBaseInstanceType),
|
||||||
|
TransformFn(itf.TransformFlexible),
|
||||||
|
}
|
||||||
|
var err error
|
||||||
|
for _, transform := range transforms {
|
||||||
|
filters, err = transform.Transform(filters)
|
||||||
|
if err != nil {
|
||||||
|
return filters, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return filters, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// rawFilter accepts a Filters struct which is used to select the available instance types
|
||||||
|
// matching the criteria within Filters and returns the detailed specs of matching instance types
|
||||||
|
func (itf Selector) rawFilter(filters Filters) ([]*ec2.InstanceTypeInfo, error) {
|
||||||
|
filters, err := itf.AggregateFilterTransform(filters)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
var locations []string
|
||||||
|
|
||||||
|
if filters.CPUArchitecture != nil && *filters.CPUArchitecture == cpuArchitectureAMD64 {
|
||||||
|
*filters.CPUArchitecture = cpuArchitectureX8664
|
||||||
|
}
|
||||||
|
|
||||||
|
if filters.AvailabilityZones != nil {
|
||||||
|
locations = *filters.AvailabilityZones
|
||||||
|
} else if filters.Region != nil {
|
||||||
|
locations = []string{*filters.Region}
|
||||||
|
}
|
||||||
|
locationInstanceOfferings, err := itf.RetrieveInstanceTypesSupportedInLocations(locations)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
instanceTypesInput := &ec2.DescribeInstanceTypesInput{}
|
||||||
|
instanceTypeCandidates := map[string]*ec2.InstanceTypeInfo{}
|
||||||
|
// innerErr will hold any error while processing DescribeInstanceTypes pages
|
||||||
|
var innerErr error
|
||||||
|
|
||||||
|
err = itf.EC2.DescribeInstanceTypesPages(instanceTypesInput, func(page *ec2.DescribeInstanceTypesOutput, lastPage bool) bool {
|
||||||
|
for _, instanceTypeInfo := range page.InstanceTypes {
|
||||||
|
instanceTypeName := *instanceTypeInfo.InstanceType
|
||||||
|
instanceTypeCandidates[instanceTypeName] = instanceTypeInfo
|
||||||
|
isFpga := instanceTypeInfo.FpgaInfo != nil
|
||||||
|
|
||||||
|
// filterToInstanceSpecMappingPairs is a map of filter name [key] to filter pair [value].
|
||||||
|
// A filter pair includes user input filter value and instance spec value retrieved from DescribeInstanceTypes
|
||||||
|
filterToInstanceSpecMappingPairs := map[string]filterPair{
|
||||||
|
cpuArchitecture: {filters.CPUArchitecture, instanceTypeInfo.ProcessorInfo.SupportedArchitectures},
|
||||||
|
usageClass: {filters.UsageClass, instanceTypeInfo.SupportedUsageClasses},
|
||||||
|
rootDeviceType: {filters.RootDeviceType, instanceTypeInfo.SupportedRootDeviceTypes},
|
||||||
|
hibernationSupported: {filters.HibernationSupported, instanceTypeInfo.HibernationSupported},
|
||||||
|
vcpusRange: {filters.VCpusRange, instanceTypeInfo.VCpuInfo.DefaultVCpus},
|
||||||
|
memoryRange: {filters.MemoryRange, instanceTypeInfo.MemoryInfo.SizeInMiB},
|
||||||
|
gpuMemoryRange: {filters.GpuMemoryRange, getTotalGpuMemory(instanceTypeInfo.GpuInfo)},
|
||||||
|
gpusRange: {filters.GpusRange, getTotalGpusCount(instanceTypeInfo.GpuInfo)},
|
||||||
|
placementGroupStrategy: {filters.PlacementGroupStrategy, instanceTypeInfo.PlacementGroupInfo.SupportedStrategies},
|
||||||
|
hypervisor: {filters.Hypervisor, instanceTypeInfo.Hypervisor},
|
||||||
|
baremetal: {filters.BareMetal, instanceTypeInfo.BareMetal},
|
||||||
|
burstable: {filters.Burstable, instanceTypeInfo.BurstablePerformanceSupported},
|
||||||
|
fpga: {filters.Fpga, &isFpga},
|
||||||
|
enaSupport: {filters.EnaSupport, supportSyntaxToBool(instanceTypeInfo.NetworkInfo.EnaSupport)},
|
||||||
|
vcpusToMemoryRatio: {filters.VCpusToMemoryRatio, calculateVCpusToMemoryRatio(instanceTypeInfo.VCpuInfo.DefaultVCpus, instanceTypeInfo.MemoryInfo.SizeInMiB)},
|
||||||
|
currentGeneration: {filters.CurrentGeneration, instanceTypeInfo.CurrentGeneration},
|
||||||
|
networkInterfaces: {filters.NetworkInterfaces, instanceTypeInfo.NetworkInfo.MaximumNetworkInterfaces},
|
||||||
|
networkPerformance: {filters.NetworkPerformance, getNetworkPerformance(instanceTypeInfo.NetworkInfo.NetworkPerformance)},
|
||||||
|
}
|
||||||
|
|
||||||
|
if isInDenyList(filters.DenyList, instanceTypeName) || !isInAllowList(filters.AllowList, instanceTypeName) {
|
||||||
|
delete(instanceTypeCandidates, instanceTypeName)
|
||||||
|
}
|
||||||
|
|
||||||
|
if !isSupportedInLocation(locationInstanceOfferings, instanceTypeName) {
|
||||||
|
delete(instanceTypeCandidates, instanceTypeName)
|
||||||
|
}
|
||||||
|
|
||||||
|
var isInstanceSupported bool
|
||||||
|
isInstanceSupported, innerErr = itf.executeFilters(filterToInstanceSpecMappingPairs, instanceTypeName)
|
||||||
|
if innerErr != nil {
|
||||||
|
// stops paging through instance types
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if !isInstanceSupported {
|
||||||
|
delete(instanceTypeCandidates, instanceTypeName)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// continue paging through instance types
|
||||||
|
return true
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if innerErr != nil {
|
||||||
|
return nil, innerErr
|
||||||
|
}
|
||||||
|
|
||||||
|
instanceTypeInfoSlice := []*ec2.InstanceTypeInfo{}
|
||||||
|
for _, instanceTypeInfo := range instanceTypeCandidates {
|
||||||
|
instanceTypeInfoSlice = append(instanceTypeInfoSlice, instanceTypeInfo)
|
||||||
|
}
|
||||||
|
return sortInstanceTypeInfo(instanceTypeInfoSlice), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// sortInstanceTypeInfo will sort based on instance type info alpha-numerically
|
||||||
|
func sortInstanceTypeInfo(instanceTypeInfoSlice []*ec2.InstanceTypeInfo) []*ec2.InstanceTypeInfo {
|
||||||
|
sort.Slice(instanceTypeInfoSlice, func(i, j int) bool {
|
||||||
|
iInstanceInfo := instanceTypeInfoSlice[i]
|
||||||
|
jInstanceInfo := instanceTypeInfoSlice[j]
|
||||||
|
return strings.Compare(*iInstanceInfo.InstanceType, *jInstanceInfo.InstanceType) <= 0
|
||||||
|
})
|
||||||
|
return instanceTypeInfoSlice
|
||||||
|
}
|
||||||
|
|
||||||
|
// executeFilters accepts a mapping of filter name to filter pairs which are iterated through
|
||||||
|
// to determine if the instance type matches the filter values.
|
||||||
|
func (itf Selector) executeFilters(filterToInstanceSpecMapping map[string]filterPair, instanceType string) (bool, error) {
|
||||||
|
for filterName, filterPair := range filterToInstanceSpecMapping {
|
||||||
|
filterVal := filterPair.filterValue
|
||||||
|
instanceSpec := filterPair.instanceSpec
|
||||||
|
// if filter is nil, user did not specify a filter, so skip evaluation
|
||||||
|
if reflect.ValueOf(filterVal).IsNil() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
instanceSpecType := reflect.ValueOf(instanceSpec).Type()
|
||||||
|
filterType := reflect.ValueOf(filterVal).Type()
|
||||||
|
filterDetailsMsg := fmt.Sprintf("filter (%s: %s => %s) corresponding to instance spec (%s => %s) for instance type %s", filterName, filterVal, filterType, instanceSpec, instanceSpecType, instanceType)
|
||||||
|
invalidInstanceSpecTypeMsg := fmt.Sprintf("Unable to process for %s", filterDetailsMsg)
|
||||||
|
|
||||||
|
// Determine appropriate filter comparator by switching on filter type
|
||||||
|
switch filter := filterVal.(type) {
|
||||||
|
case *string:
|
||||||
|
switch iSpec := instanceSpec.(type) {
|
||||||
|
case []*string:
|
||||||
|
if !isSupportedFromStrings(iSpec, filter) {
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
case *string:
|
||||||
|
if !isSupportedFromString(iSpec, filter) {
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
return false, fmt.Errorf(invalidInstanceSpecTypeMsg)
|
||||||
|
}
|
||||||
|
case *bool:
|
||||||
|
switch iSpec := instanceSpec.(type) {
|
||||||
|
case *bool:
|
||||||
|
if !isSupportedWithBool(iSpec, filter) {
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
return false, fmt.Errorf(invalidInstanceSpecTypeMsg)
|
||||||
|
}
|
||||||
|
case *IntRangeFilter:
|
||||||
|
switch iSpec := instanceSpec.(type) {
|
||||||
|
case *int64:
|
||||||
|
if !isSupportedWithRangeInt64(iSpec, filter) {
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
case *int:
|
||||||
|
if !isSupportedWithRangeInt(iSpec, filter) {
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
return false, fmt.Errorf(invalidInstanceSpecTypeMsg)
|
||||||
|
}
|
||||||
|
case *ByteQuantityRangeFilter:
|
||||||
|
mibRange := Uint64RangeFilter{
|
||||||
|
LowerBound: filter.LowerBound.Quantity,
|
||||||
|
UpperBound: filter.UpperBound.Quantity,
|
||||||
|
}
|
||||||
|
switch iSpec := instanceSpec.(type) {
|
||||||
|
case *int:
|
||||||
|
var iSpec64 *int64
|
||||||
|
if iSpec != nil {
|
||||||
|
iSpecVal := int64(*iSpec)
|
||||||
|
iSpec64 = &iSpecVal
|
||||||
|
}
|
||||||
|
if !isSupportedWithRangeUint64(iSpec64, &mibRange) {
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
case *int64:
|
||||||
|
mibRange := Uint64RangeFilter{
|
||||||
|
LowerBound: filter.LowerBound.Quantity,
|
||||||
|
UpperBound: filter.UpperBound.Quantity,
|
||||||
|
}
|
||||||
|
if !isSupportedWithRangeUint64(iSpec, &mibRange) {
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
return false, fmt.Errorf(invalidInstanceSpecTypeMsg)
|
||||||
|
}
|
||||||
|
case *float64:
|
||||||
|
switch iSpec := instanceSpec.(type) {
|
||||||
|
case *float64:
|
||||||
|
if !isSupportedWithFloat64(iSpec, filter) {
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
return false, fmt.Errorf(invalidInstanceSpecTypeMsg)
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
return false, fmt.Errorf("No filter handler found for %s", filterDetailsMsg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// RetrieveInstanceTypesSupportedInLocations returns a map of instance type -> AZ or Region for all instance types supported in the intersected locations passed in
|
||||||
|
// The location can be a zone-id (ie. use1-az1), a zone-name (us-east-1a), or a region name (us-east-1).
|
||||||
|
// Note that zone names are not necessarily the same across accounts
|
||||||
|
func (itf Selector) RetrieveInstanceTypesSupportedInLocations(locations []string) (map[string]string, error) {
|
||||||
|
if len(locations) == 0 {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
availableInstanceTypes := map[string]int{}
|
||||||
|
for _, location := range locations {
|
||||||
|
instanceTypeOfferingsInput := &ec2.DescribeInstanceTypeOfferingsInput{
|
||||||
|
Filters: []*ec2.Filter{
|
||||||
|
{
|
||||||
|
Name: aws.String(locationFilterKey),
|
||||||
|
Values: []*string{aws.String(location)},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
locationType, err := itf.getLocationType(location)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
instanceTypeOfferingsInput.SetLocationType(locationType)
|
||||||
|
|
||||||
|
err = itf.EC2.DescribeInstanceTypeOfferingsPages(instanceTypeOfferingsInput, func(page *ec2.DescribeInstanceTypeOfferingsOutput, lastPage bool) bool {
|
||||||
|
for _, instanceType := range page.InstanceTypeOfferings {
|
||||||
|
if i, ok := availableInstanceTypes[*instanceType.InstanceType]; !ok {
|
||||||
|
availableInstanceTypes[*instanceType.InstanceType] = 1
|
||||||
|
} else {
|
||||||
|
availableInstanceTypes[*instanceType.InstanceType] = i + 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("Encountered an error when describing instance type offerings: %w", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
availableInstanceTypesAllLocations := map[string]string{}
|
||||||
|
for instanceType, locationsSupported := range availableInstanceTypes {
|
||||||
|
if locationsSupported == len(locations) {
|
||||||
|
availableInstanceTypesAllLocations[instanceType] = ""
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return availableInstanceTypesAllLocations, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (itf Selector) getLocationType(location string) (string, error) {
|
||||||
|
azs, err := itf.EC2.DescribeAvailabilityZones(&ec2.DescribeAvailabilityZonesInput{})
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
for _, zone := range azs.AvailabilityZones {
|
||||||
|
if location == *zone.RegionName {
|
||||||
|
return regionNameLocationType, nil
|
||||||
|
} else if location == *zone.ZoneName {
|
||||||
|
return zoneNameLocationType, nil
|
||||||
|
} else if location == *zone.ZoneId {
|
||||||
|
return zoneIDLocationType, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return "", fmt.Errorf("The location passed in (%s) is not a valid zone-id, zone-name, or region name", location)
|
||||||
|
}
|
||||||
|
|
||||||
|
func isSupportedInLocation(instanceOfferings map[string]string, instanceType string) bool {
|
||||||
|
if instanceOfferings == nil {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
_, ok := instanceOfferings[instanceType]
|
||||||
|
return ok
|
||||||
|
}
|
||||||
|
|
||||||
|
func isInDenyList(denyRegex *regexp.Regexp, instanceTypeName string) bool {
|
||||||
|
if denyRegex == nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return denyRegex.MatchString(instanceTypeName)
|
||||||
|
}
|
||||||
|
|
||||||
|
func isInAllowList(allowRegex *regexp.Regexp, instanceTypeName string) bool {
|
||||||
|
if allowRegex == nil {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return allowRegex.MatchString(instanceTypeName)
|
||||||
|
}
|
||||||
182
vendor/github.com/aws/amazon-ec2-instance-selector/v2/pkg/selector/types.go
generated
vendored
Normal file
182
vendor/github.com/aws/amazon-ec2-instance-selector/v2/pkg/selector/types.go
generated
vendored
Normal file
|
|
@ -0,0 +1,182 @@
|
||||||
|
// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License"). You may
|
||||||
|
// not use this file except in compliance with the License. A copy of the
|
||||||
|
// License is located at
|
||||||
|
//
|
||||||
|
// http://aws.amazon.com/apache2.0/
|
||||||
|
//
|
||||||
|
// or in the "license" file accompanying this file. This file is distributed
|
||||||
|
// on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||||
|
// express or implied. See the License for the specific language governing
|
||||||
|
// permissions and limitations under the License.
|
||||||
|
|
||||||
|
package selector
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"regexp"
|
||||||
|
|
||||||
|
"github.com/aws/amazon-ec2-instance-selector/v2/pkg/bytequantity"
|
||||||
|
"github.com/aws/aws-sdk-go/service/ec2"
|
||||||
|
"github.com/aws/aws-sdk-go/service/ec2/ec2iface"
|
||||||
|
)
|
||||||
|
|
||||||
|
// InstanceTypesOutput can be implemented to provide custom output to instance type results
|
||||||
|
type InstanceTypesOutput interface {
|
||||||
|
Output([]*ec2.InstanceTypeInfo) []string
|
||||||
|
}
|
||||||
|
|
||||||
|
// InstanceTypesOutputFn is the func type definition for InstanceTypesOuput
|
||||||
|
type InstanceTypesOutputFn func([]*ec2.InstanceTypeInfo) []string
|
||||||
|
|
||||||
|
// Output implements InstanceTypesOutput interface on InstanceTypesOutputFn
|
||||||
|
// This allows any InstanceTypesOutputFn to be passed into funcs accepting InstanceTypesOutput interface
|
||||||
|
func (fn InstanceTypesOutputFn) Output(instanceTypes []*ec2.InstanceTypeInfo) []string {
|
||||||
|
return fn(instanceTypes)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Selector is used to filter instance type resource specs
|
||||||
|
type Selector struct {
|
||||||
|
EC2 ec2iface.EC2API
|
||||||
|
}
|
||||||
|
|
||||||
|
// IntRangeFilter holds an upper and lower bound int
|
||||||
|
// The lower and upper bound are used to range filter resource specs
|
||||||
|
type IntRangeFilter struct {
|
||||||
|
UpperBound int
|
||||||
|
LowerBound int
|
||||||
|
}
|
||||||
|
|
||||||
|
// Uint64RangeFilter holds an upper and lower bound uint64
|
||||||
|
// The lower and upper bound are used to range filter resource specs
|
||||||
|
type Uint64RangeFilter struct {
|
||||||
|
UpperBound uint64
|
||||||
|
LowerBound uint64
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByteQuantityRangeFilter holds an upper and lower bound byte quantity
|
||||||
|
// The lower and upper bound are used to range filter resource specs
|
||||||
|
type ByteQuantityRangeFilter struct {
|
||||||
|
UpperBound bytequantity.ByteQuantity
|
||||||
|
LowerBound bytequantity.ByteQuantity
|
||||||
|
}
|
||||||
|
|
||||||
|
// filterPair holds a tuple of the passed in filter value and the instance resource spec value
|
||||||
|
type filterPair struct {
|
||||||
|
filterValue interface{}
|
||||||
|
instanceSpec interface{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func getRegexpString(r *regexp.Regexp) *string {
|
||||||
|
if r == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
rStr := r.String()
|
||||||
|
return &rStr
|
||||||
|
}
|
||||||
|
|
||||||
|
// MarshalIndent is used to return a pretty-print json representation of a Filters struct
|
||||||
|
func (f *Filters) MarshalIndent(prefix, indent string) ([]byte, error) {
|
||||||
|
type Alias Filters
|
||||||
|
return json.MarshalIndent(&struct {
|
||||||
|
AllowList *string
|
||||||
|
DenyList *string
|
||||||
|
*Alias
|
||||||
|
}{
|
||||||
|
AllowList: getRegexpString(f.AllowList),
|
||||||
|
DenyList: getRegexpString(f.DenyList),
|
||||||
|
Alias: (*Alias)(f),
|
||||||
|
}, prefix, indent)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Filters is used to group instance type resource attributes for filtering
|
||||||
|
type Filters struct {
|
||||||
|
// AvailabilityZones is the AWS Availability Zones where instances will be provisioned.
|
||||||
|
// Instance type capacity can vary between availability zones.
|
||||||
|
// Will accept zone names or ids
|
||||||
|
// Example: us-east-1a, us-east-1b, us-east-2a, etc. OR use1-az1, use2-az2, etc.
|
||||||
|
AvailabilityZones *[]string
|
||||||
|
|
||||||
|
// BareMetal is used to only return bare metal instance type results
|
||||||
|
BareMetal *bool
|
||||||
|
|
||||||
|
// Burstable is used to only return burstable instance type results like the t* series
|
||||||
|
Burstable *bool
|
||||||
|
|
||||||
|
// CPUArchitecture of the EC2 instance type
|
||||||
|
// Possible values are: x86_64/amd64 or arm64
|
||||||
|
CPUArchitecture *string
|
||||||
|
|
||||||
|
// CurrentGeneration returns the latest generation of instance types
|
||||||
|
CurrentGeneration *bool
|
||||||
|
|
||||||
|
// EnaSupport returns instances that can support an Elastic Network Adapter.
|
||||||
|
EnaSupport *bool
|
||||||
|
|
||||||
|
// FPGA is used to only return FPGA instance type results
|
||||||
|
Fpga *bool
|
||||||
|
|
||||||
|
// GpusRange filter is a range of acceptable GPU count available to an EC2 instance type
|
||||||
|
GpusRange *IntRangeFilter
|
||||||
|
|
||||||
|
// GpuMemoryRange filter is a range of acceptable GPU memory in Gibibytes (GiB) available to an EC2 instance type in aggreagte across all GPUs.
|
||||||
|
GpuMemoryRange *ByteQuantityRangeFilter
|
||||||
|
|
||||||
|
// HibernationSupported denotes whether EC2 hibernate is supported
|
||||||
|
// Possible values are: true or false
|
||||||
|
HibernationSupported *bool
|
||||||
|
|
||||||
|
// Hypervisor is used to return only a specific hypervisor backed instance type
|
||||||
|
// Possibly values are: xen or nitro
|
||||||
|
Hypervisor *string
|
||||||
|
|
||||||
|
// MaxResults is the maximum number of instance types to return that match the filter criteria
|
||||||
|
MaxResults *int
|
||||||
|
|
||||||
|
// MemoryRange filter is a range of acceptable DRAM memory in Gibibytes (GiB) for the instance type
|
||||||
|
MemoryRange *ByteQuantityRangeFilter
|
||||||
|
|
||||||
|
// NetworkInterfaces filter is a range of the number of ENI attachments an instance type can support
|
||||||
|
NetworkInterfaces *IntRangeFilter
|
||||||
|
|
||||||
|
// NetworkPerformance filter is a range of network bandwidth an instance type can support
|
||||||
|
NetworkPerformance *IntRangeFilter
|
||||||
|
|
||||||
|
// PlacementGroupStrategy is used to return instance types based on its support
|
||||||
|
// for a specific placement group strategy
|
||||||
|
// Possible values are: cluster, spread, or partition
|
||||||
|
PlacementGroupStrategy *string
|
||||||
|
|
||||||
|
// Region is the AWS Region where instances will be provisioned.
|
||||||
|
// Instance type availability can vary between AWS Regions.
|
||||||
|
// Example: us-east-1, us-east-2, eu-west-1, etc.
|
||||||
|
Region *string
|
||||||
|
|
||||||
|
// RootDeviceType is the backing device of the root storage volume
|
||||||
|
// Possible values are: instance-store or ebs
|
||||||
|
RootDeviceType *string
|
||||||
|
|
||||||
|
// UsageClass of the instance EC2 instance type
|
||||||
|
// Possible values are: spot or on-demand
|
||||||
|
UsageClass *string
|
||||||
|
|
||||||
|
// VCpusRange filter is a range of acceptable VCpus for the instance type
|
||||||
|
VCpusRange *IntRangeFilter
|
||||||
|
|
||||||
|
// VcpusToMemoryRatio is a ratio of vcpus to memory expressed as a floating point
|
||||||
|
VCpusToMemoryRatio *float64
|
||||||
|
|
||||||
|
// AllowList is a regex of allowed instance types
|
||||||
|
AllowList *regexp.Regexp
|
||||||
|
|
||||||
|
// DenyList is a regex of excluded instance types
|
||||||
|
DenyList *regexp.Regexp
|
||||||
|
|
||||||
|
// InstanceTypeBase is a base instance type which is used to retrieve similarly spec'd instance types
|
||||||
|
InstanceTypeBase *string
|
||||||
|
|
||||||
|
// Flexible finds an opinionated set of general (c, m, r, t, a, etc.) instance types that match a criteria specified
|
||||||
|
// or defaults to 4 vcpus
|
||||||
|
Flexible *bool
|
||||||
|
}
|
||||||
|
|
@ -1,20 +0,0 @@
|
||||||
package md2man
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/russross/blackfriday"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Render converts a markdown document into a roff formatted document.
|
|
||||||
func Render(doc []byte) []byte {
|
|
||||||
renderer := RoffRenderer(0)
|
|
||||||
extensions := 0
|
|
||||||
extensions |= blackfriday.EXTENSION_NO_INTRA_EMPHASIS
|
|
||||||
extensions |= blackfriday.EXTENSION_TABLES
|
|
||||||
extensions |= blackfriday.EXTENSION_FENCED_CODE
|
|
||||||
extensions |= blackfriday.EXTENSION_AUTOLINK
|
|
||||||
extensions |= blackfriday.EXTENSION_SPACE_HEADERS
|
|
||||||
extensions |= blackfriday.EXTENSION_FOOTNOTES
|
|
||||||
extensions |= blackfriday.EXTENSION_TITLEBLOCK
|
|
||||||
|
|
||||||
return blackfriday.Markdown(doc, renderer, extensions)
|
|
||||||
}
|
|
||||||
|
|
@ -1,285 +0,0 @@
|
||||||
package md2man
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"fmt"
|
|
||||||
"html"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/russross/blackfriday"
|
|
||||||
)
|
|
||||||
|
|
||||||
type roffRenderer struct {
|
|
||||||
ListCounters []int
|
|
||||||
}
|
|
||||||
|
|
||||||
// RoffRenderer creates a new blackfriday Renderer for generating roff documents
|
|
||||||
// from markdown
|
|
||||||
func RoffRenderer(flags int) blackfriday.Renderer {
|
|
||||||
return &roffRenderer{}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *roffRenderer) GetFlags() int {
|
|
||||||
return 0
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *roffRenderer) TitleBlock(out *bytes.Buffer, text []byte) {
|
|
||||||
out.WriteString(".TH ")
|
|
||||||
|
|
||||||
splitText := bytes.Split(text, []byte("\n"))
|
|
||||||
for i, line := range splitText {
|
|
||||||
line = bytes.TrimPrefix(line, []byte("% "))
|
|
||||||
if i == 0 {
|
|
||||||
line = bytes.Replace(line, []byte("("), []byte("\" \""), 1)
|
|
||||||
line = bytes.Replace(line, []byte(")"), []byte("\" \""), 1)
|
|
||||||
}
|
|
||||||
line = append([]byte("\""), line...)
|
|
||||||
line = append(line, []byte("\" ")...)
|
|
||||||
out.Write(line)
|
|
||||||
}
|
|
||||||
out.WriteString("\n")
|
|
||||||
|
|
||||||
// disable hyphenation
|
|
||||||
out.WriteString(".nh\n")
|
|
||||||
// disable justification (adjust text to left margin only)
|
|
||||||
out.WriteString(".ad l\n")
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *roffRenderer) BlockCode(out *bytes.Buffer, text []byte, lang string) {
|
|
||||||
out.WriteString("\n.PP\n.RS\n\n.nf\n")
|
|
||||||
escapeSpecialChars(out, text)
|
|
||||||
out.WriteString("\n.fi\n.RE\n")
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *roffRenderer) BlockQuote(out *bytes.Buffer, text []byte) {
|
|
||||||
out.WriteString("\n.PP\n.RS\n")
|
|
||||||
out.Write(text)
|
|
||||||
out.WriteString("\n.RE\n")
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *roffRenderer) BlockHtml(out *bytes.Buffer, text []byte) { // nolint: golint
|
|
||||||
out.Write(text)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *roffRenderer) Header(out *bytes.Buffer, text func() bool, level int, id string) {
|
|
||||||
marker := out.Len()
|
|
||||||
|
|
||||||
switch {
|
|
||||||
case marker == 0:
|
|
||||||
// This is the doc header
|
|
||||||
out.WriteString(".TH ")
|
|
||||||
case level == 1:
|
|
||||||
out.WriteString("\n\n.SH ")
|
|
||||||
case level == 2:
|
|
||||||
out.WriteString("\n.SH ")
|
|
||||||
default:
|
|
||||||
out.WriteString("\n.SS ")
|
|
||||||
}
|
|
||||||
|
|
||||||
if !text() {
|
|
||||||
out.Truncate(marker)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *roffRenderer) HRule(out *bytes.Buffer) {
|
|
||||||
out.WriteString("\n.ti 0\n\\l'\\n(.lu'\n")
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *roffRenderer) List(out *bytes.Buffer, text func() bool, flags int) {
|
|
||||||
marker := out.Len()
|
|
||||||
r.ListCounters = append(r.ListCounters, 1)
|
|
||||||
out.WriteString("\n.RS\n")
|
|
||||||
if !text() {
|
|
||||||
out.Truncate(marker)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
r.ListCounters = r.ListCounters[:len(r.ListCounters)-1]
|
|
||||||
out.WriteString("\n.RE\n")
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *roffRenderer) ListItem(out *bytes.Buffer, text []byte, flags int) {
|
|
||||||
if flags&blackfriday.LIST_TYPE_ORDERED != 0 {
|
|
||||||
out.WriteString(fmt.Sprintf(".IP \"%3d.\" 5\n", r.ListCounters[len(r.ListCounters)-1]))
|
|
||||||
r.ListCounters[len(r.ListCounters)-1]++
|
|
||||||
} else {
|
|
||||||
out.WriteString(".IP \\(bu 2\n")
|
|
||||||
}
|
|
||||||
out.Write(text)
|
|
||||||
out.WriteString("\n")
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *roffRenderer) Paragraph(out *bytes.Buffer, text func() bool) {
|
|
||||||
marker := out.Len()
|
|
||||||
out.WriteString("\n.PP\n")
|
|
||||||
if !text() {
|
|
||||||
out.Truncate(marker)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if marker != 0 {
|
|
||||||
out.WriteString("\n")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *roffRenderer) Table(out *bytes.Buffer, header []byte, body []byte, columnData []int) {
|
|
||||||
out.WriteString("\n.TS\nallbox;\n")
|
|
||||||
|
|
||||||
maxDelims := 0
|
|
||||||
lines := strings.Split(strings.TrimRight(string(header), "\n")+"\n"+strings.TrimRight(string(body), "\n"), "\n")
|
|
||||||
for _, w := range lines {
|
|
||||||
curDelims := strings.Count(w, "\t")
|
|
||||||
if curDelims > maxDelims {
|
|
||||||
maxDelims = curDelims
|
|
||||||
}
|
|
||||||
}
|
|
||||||
out.Write([]byte(strings.Repeat("l ", maxDelims+1) + "\n"))
|
|
||||||
out.Write([]byte(strings.Repeat("l ", maxDelims+1) + ".\n"))
|
|
||||||
out.Write(header)
|
|
||||||
if len(header) > 0 {
|
|
||||||
out.Write([]byte("\n"))
|
|
||||||
}
|
|
||||||
|
|
||||||
out.Write(body)
|
|
||||||
out.WriteString("\n.TE\n")
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *roffRenderer) TableRow(out *bytes.Buffer, text []byte) {
|
|
||||||
if out.Len() > 0 {
|
|
||||||
out.WriteString("\n")
|
|
||||||
}
|
|
||||||
out.Write(text)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *roffRenderer) TableHeaderCell(out *bytes.Buffer, text []byte, align int) {
|
|
||||||
if out.Len() > 0 {
|
|
||||||
out.WriteString("\t")
|
|
||||||
}
|
|
||||||
if len(text) == 0 {
|
|
||||||
text = []byte{' '}
|
|
||||||
}
|
|
||||||
out.Write([]byte("\\fB\\fC" + string(text) + "\\fR"))
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *roffRenderer) TableCell(out *bytes.Buffer, text []byte, align int) {
|
|
||||||
if out.Len() > 0 {
|
|
||||||
out.WriteString("\t")
|
|
||||||
}
|
|
||||||
if len(text) > 30 {
|
|
||||||
text = append([]byte("T{\n"), text...)
|
|
||||||
text = append(text, []byte("\nT}")...)
|
|
||||||
}
|
|
||||||
if len(text) == 0 {
|
|
||||||
text = []byte{' '}
|
|
||||||
}
|
|
||||||
out.Write(text)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *roffRenderer) Footnotes(out *bytes.Buffer, text func() bool) {
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *roffRenderer) FootnoteItem(out *bytes.Buffer, name, text []byte, flags int) {
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *roffRenderer) AutoLink(out *bytes.Buffer, link []byte, kind int) {
|
|
||||||
out.WriteString("\n\\[la]")
|
|
||||||
out.Write(link)
|
|
||||||
out.WriteString("\\[ra]")
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *roffRenderer) CodeSpan(out *bytes.Buffer, text []byte) {
|
|
||||||
out.WriteString("\\fB\\fC")
|
|
||||||
escapeSpecialChars(out, text)
|
|
||||||
out.WriteString("\\fR")
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *roffRenderer) DoubleEmphasis(out *bytes.Buffer, text []byte) {
|
|
||||||
out.WriteString("\\fB")
|
|
||||||
out.Write(text)
|
|
||||||
out.WriteString("\\fP")
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *roffRenderer) Emphasis(out *bytes.Buffer, text []byte) {
|
|
||||||
out.WriteString("\\fI")
|
|
||||||
out.Write(text)
|
|
||||||
out.WriteString("\\fP")
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *roffRenderer) Image(out *bytes.Buffer, link []byte, title []byte, alt []byte) {
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *roffRenderer) LineBreak(out *bytes.Buffer) {
|
|
||||||
out.WriteString("\n.br\n")
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *roffRenderer) Link(out *bytes.Buffer, link []byte, title []byte, content []byte) {
|
|
||||||
out.Write(content)
|
|
||||||
r.AutoLink(out, link, 0)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *roffRenderer) RawHtmlTag(out *bytes.Buffer, tag []byte) { // nolint: golint
|
|
||||||
out.Write(tag)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *roffRenderer) TripleEmphasis(out *bytes.Buffer, text []byte) {
|
|
||||||
out.WriteString("\\s+2")
|
|
||||||
out.Write(text)
|
|
||||||
out.WriteString("\\s-2")
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *roffRenderer) StrikeThrough(out *bytes.Buffer, text []byte) {
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *roffRenderer) FootnoteRef(out *bytes.Buffer, ref []byte, id int) {
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *roffRenderer) Entity(out *bytes.Buffer, entity []byte) {
|
|
||||||
out.WriteString(html.UnescapeString(string(entity)))
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *roffRenderer) NormalText(out *bytes.Buffer, text []byte) {
|
|
||||||
escapeSpecialChars(out, text)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *roffRenderer) DocumentHeader(out *bytes.Buffer) {
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *roffRenderer) DocumentFooter(out *bytes.Buffer) {
|
|
||||||
}
|
|
||||||
|
|
||||||
func needsBackslash(c byte) bool {
|
|
||||||
for _, r := range []byte("-_&\\~") {
|
|
||||||
if c == r {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func escapeSpecialChars(out *bytes.Buffer, text []byte) {
|
|
||||||
for i := 0; i < len(text); i++ {
|
|
||||||
// escape initial apostrophe or period
|
|
||||||
if len(text) >= 1 && (text[0] == '\'' || text[0] == '.') {
|
|
||||||
out.WriteString("\\&")
|
|
||||||
}
|
|
||||||
|
|
||||||
// directly copy normal characters
|
|
||||||
org := i
|
|
||||||
|
|
||||||
for i < len(text) && !needsBackslash(text[i]) {
|
|
||||||
i++
|
|
||||||
}
|
|
||||||
if i > org {
|
|
||||||
out.Write(text[org:i])
|
|
||||||
}
|
|
||||||
|
|
||||||
// escape a character
|
|
||||||
if i >= len(text) {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
out.WriteByte('\\')
|
|
||||||
out.WriteByte(text[i])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -6,8 +6,8 @@ go_library(
|
||||||
"md2man.go",
|
"md2man.go",
|
||||||
"roff.go",
|
"roff.go",
|
||||||
],
|
],
|
||||||
importmap = "k8s.io/kops/vendor/github.com/cpuguy83/go-md2man/md2man",
|
importmap = "k8s.io/kops/vendor/github.com/cpuguy83/go-md2man/v2/md2man",
|
||||||
importpath = "github.com/cpuguy83/go-md2man/md2man",
|
importpath = "github.com/cpuguy83/go-md2man/v2/md2man",
|
||||||
visibility = ["//visibility:public"],
|
visibility = ["//visibility:public"],
|
||||||
deps = ["//vendor/github.com/russross/blackfriday:go_default_library"],
|
deps = ["//vendor/github.com/russross/blackfriday/v2:go_default_library"],
|
||||||
)
|
)
|
||||||
|
|
@ -0,0 +1,14 @@
|
||||||
|
package md2man
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/russross/blackfriday/v2"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Render converts a markdown document into a roff formatted document.
|
||||||
|
func Render(doc []byte) []byte {
|
||||||
|
renderer := NewRoffRenderer()
|
||||||
|
|
||||||
|
return blackfriday.Run(doc,
|
||||||
|
[]blackfriday.Option{blackfriday.WithRenderer(renderer),
|
||||||
|
blackfriday.WithExtensions(renderer.GetExtensions())}...)
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,345 @@
|
||||||
|
package md2man
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"os"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/russross/blackfriday/v2"
|
||||||
|
)
|
||||||
|
|
||||||
|
// roffRenderer implements the blackfriday.Renderer interface for creating
|
||||||
|
// roff format (manpages) from markdown text
|
||||||
|
type roffRenderer struct {
|
||||||
|
extensions blackfriday.Extensions
|
||||||
|
listCounters []int
|
||||||
|
firstHeader bool
|
||||||
|
defineTerm bool
|
||||||
|
listDepth int
|
||||||
|
}
|
||||||
|
|
||||||
|
const (
|
||||||
|
titleHeader = ".TH "
|
||||||
|
topLevelHeader = "\n\n.SH "
|
||||||
|
secondLevelHdr = "\n.SH "
|
||||||
|
otherHeader = "\n.SS "
|
||||||
|
crTag = "\n"
|
||||||
|
emphTag = "\\fI"
|
||||||
|
emphCloseTag = "\\fP"
|
||||||
|
strongTag = "\\fB"
|
||||||
|
strongCloseTag = "\\fP"
|
||||||
|
breakTag = "\n.br\n"
|
||||||
|
paraTag = "\n.PP\n"
|
||||||
|
hruleTag = "\n.ti 0\n\\l'\\n(.lu'\n"
|
||||||
|
linkTag = "\n\\[la]"
|
||||||
|
linkCloseTag = "\\[ra]"
|
||||||
|
codespanTag = "\\fB\\fC"
|
||||||
|
codespanCloseTag = "\\fR"
|
||||||
|
codeTag = "\n.PP\n.RS\n\n.nf\n"
|
||||||
|
codeCloseTag = "\n.fi\n.RE\n"
|
||||||
|
quoteTag = "\n.PP\n.RS\n"
|
||||||
|
quoteCloseTag = "\n.RE\n"
|
||||||
|
listTag = "\n.RS\n"
|
||||||
|
listCloseTag = "\n.RE\n"
|
||||||
|
arglistTag = "\n.TP\n"
|
||||||
|
tableStart = "\n.TS\nallbox;\n"
|
||||||
|
tableEnd = ".TE\n"
|
||||||
|
tableCellStart = "T{\n"
|
||||||
|
tableCellEnd = "\nT}\n"
|
||||||
|
)
|
||||||
|
|
||||||
|
// NewRoffRenderer creates a new blackfriday Renderer for generating roff documents
|
||||||
|
// from markdown
|
||||||
|
func NewRoffRenderer() *roffRenderer { // nolint: golint
|
||||||
|
var extensions blackfriday.Extensions
|
||||||
|
|
||||||
|
extensions |= blackfriday.NoIntraEmphasis
|
||||||
|
extensions |= blackfriday.Tables
|
||||||
|
extensions |= blackfriday.FencedCode
|
||||||
|
extensions |= blackfriday.SpaceHeadings
|
||||||
|
extensions |= blackfriday.Footnotes
|
||||||
|
extensions |= blackfriday.Titleblock
|
||||||
|
extensions |= blackfriday.DefinitionLists
|
||||||
|
return &roffRenderer{
|
||||||
|
extensions: extensions,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetExtensions returns the list of extensions used by this renderer implementation
|
||||||
|
func (r *roffRenderer) GetExtensions() blackfriday.Extensions {
|
||||||
|
return r.extensions
|
||||||
|
}
|
||||||
|
|
||||||
|
// RenderHeader handles outputting the header at document start
|
||||||
|
func (r *roffRenderer) RenderHeader(w io.Writer, ast *blackfriday.Node) {
|
||||||
|
// disable hyphenation
|
||||||
|
out(w, ".nh\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
// RenderFooter handles outputting the footer at the document end; the roff
|
||||||
|
// renderer has no footer information
|
||||||
|
func (r *roffRenderer) RenderFooter(w io.Writer, ast *blackfriday.Node) {
|
||||||
|
}
|
||||||
|
|
||||||
|
// RenderNode is called for each node in a markdown document; based on the node
|
||||||
|
// type the equivalent roff output is sent to the writer
|
||||||
|
func (r *roffRenderer) RenderNode(w io.Writer, node *blackfriday.Node, entering bool) blackfriday.WalkStatus {
|
||||||
|
|
||||||
|
var walkAction = blackfriday.GoToNext
|
||||||
|
|
||||||
|
switch node.Type {
|
||||||
|
case blackfriday.Text:
|
||||||
|
r.handleText(w, node, entering)
|
||||||
|
case blackfriday.Softbreak:
|
||||||
|
out(w, crTag)
|
||||||
|
case blackfriday.Hardbreak:
|
||||||
|
out(w, breakTag)
|
||||||
|
case blackfriday.Emph:
|
||||||
|
if entering {
|
||||||
|
out(w, emphTag)
|
||||||
|
} else {
|
||||||
|
out(w, emphCloseTag)
|
||||||
|
}
|
||||||
|
case blackfriday.Strong:
|
||||||
|
if entering {
|
||||||
|
out(w, strongTag)
|
||||||
|
} else {
|
||||||
|
out(w, strongCloseTag)
|
||||||
|
}
|
||||||
|
case blackfriday.Link:
|
||||||
|
if !entering {
|
||||||
|
out(w, linkTag+string(node.LinkData.Destination)+linkCloseTag)
|
||||||
|
}
|
||||||
|
case blackfriday.Image:
|
||||||
|
// ignore images
|
||||||
|
walkAction = blackfriday.SkipChildren
|
||||||
|
case blackfriday.Code:
|
||||||
|
out(w, codespanTag)
|
||||||
|
escapeSpecialChars(w, node.Literal)
|
||||||
|
out(w, codespanCloseTag)
|
||||||
|
case blackfriday.Document:
|
||||||
|
break
|
||||||
|
case blackfriday.Paragraph:
|
||||||
|
// roff .PP markers break lists
|
||||||
|
if r.listDepth > 0 {
|
||||||
|
return blackfriday.GoToNext
|
||||||
|
}
|
||||||
|
if entering {
|
||||||
|
out(w, paraTag)
|
||||||
|
} else {
|
||||||
|
out(w, crTag)
|
||||||
|
}
|
||||||
|
case blackfriday.BlockQuote:
|
||||||
|
if entering {
|
||||||
|
out(w, quoteTag)
|
||||||
|
} else {
|
||||||
|
out(w, quoteCloseTag)
|
||||||
|
}
|
||||||
|
case blackfriday.Heading:
|
||||||
|
r.handleHeading(w, node, entering)
|
||||||
|
case blackfriday.HorizontalRule:
|
||||||
|
out(w, hruleTag)
|
||||||
|
case blackfriday.List:
|
||||||
|
r.handleList(w, node, entering)
|
||||||
|
case blackfriday.Item:
|
||||||
|
r.handleItem(w, node, entering)
|
||||||
|
case blackfriday.CodeBlock:
|
||||||
|
out(w, codeTag)
|
||||||
|
escapeSpecialChars(w, node.Literal)
|
||||||
|
out(w, codeCloseTag)
|
||||||
|
case blackfriday.Table:
|
||||||
|
r.handleTable(w, node, entering)
|
||||||
|
case blackfriday.TableCell:
|
||||||
|
r.handleTableCell(w, node, entering)
|
||||||
|
case blackfriday.TableHead:
|
||||||
|
case blackfriday.TableBody:
|
||||||
|
case blackfriday.TableRow:
|
||||||
|
// no action as cell entries do all the nroff formatting
|
||||||
|
return blackfriday.GoToNext
|
||||||
|
default:
|
||||||
|
fmt.Fprintln(os.Stderr, "WARNING: go-md2man does not handle node type "+node.Type.String())
|
||||||
|
}
|
||||||
|
return walkAction
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *roffRenderer) handleText(w io.Writer, node *blackfriday.Node, entering bool) {
|
||||||
|
var (
|
||||||
|
start, end string
|
||||||
|
)
|
||||||
|
// handle special roff table cell text encapsulation
|
||||||
|
if node.Parent.Type == blackfriday.TableCell {
|
||||||
|
if len(node.Literal) > 30 {
|
||||||
|
start = tableCellStart
|
||||||
|
end = tableCellEnd
|
||||||
|
} else {
|
||||||
|
// end rows that aren't terminated by "tableCellEnd" with a cr if end of row
|
||||||
|
if node.Parent.Next == nil && !node.Parent.IsHeader {
|
||||||
|
end = crTag
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
out(w, start)
|
||||||
|
escapeSpecialChars(w, node.Literal)
|
||||||
|
out(w, end)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *roffRenderer) handleHeading(w io.Writer, node *blackfriday.Node, entering bool) {
|
||||||
|
if entering {
|
||||||
|
switch node.Level {
|
||||||
|
case 1:
|
||||||
|
if !r.firstHeader {
|
||||||
|
out(w, titleHeader)
|
||||||
|
r.firstHeader = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
out(w, topLevelHeader)
|
||||||
|
case 2:
|
||||||
|
out(w, secondLevelHdr)
|
||||||
|
default:
|
||||||
|
out(w, otherHeader)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *roffRenderer) handleList(w io.Writer, node *blackfriday.Node, entering bool) {
|
||||||
|
openTag := listTag
|
||||||
|
closeTag := listCloseTag
|
||||||
|
if node.ListFlags&blackfriday.ListTypeDefinition != 0 {
|
||||||
|
// tags for definition lists handled within Item node
|
||||||
|
openTag = ""
|
||||||
|
closeTag = ""
|
||||||
|
}
|
||||||
|
if entering {
|
||||||
|
r.listDepth++
|
||||||
|
if node.ListFlags&blackfriday.ListTypeOrdered != 0 {
|
||||||
|
r.listCounters = append(r.listCounters, 1)
|
||||||
|
}
|
||||||
|
out(w, openTag)
|
||||||
|
} else {
|
||||||
|
if node.ListFlags&blackfriday.ListTypeOrdered != 0 {
|
||||||
|
r.listCounters = r.listCounters[:len(r.listCounters)-1]
|
||||||
|
}
|
||||||
|
out(w, closeTag)
|
||||||
|
r.listDepth--
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *roffRenderer) handleItem(w io.Writer, node *blackfriday.Node, entering bool) {
|
||||||
|
if entering {
|
||||||
|
if node.ListFlags&blackfriday.ListTypeOrdered != 0 {
|
||||||
|
out(w, fmt.Sprintf(".IP \"%3d.\" 5\n", r.listCounters[len(r.listCounters)-1]))
|
||||||
|
r.listCounters[len(r.listCounters)-1]++
|
||||||
|
} else if node.ListFlags&blackfriday.ListTypeDefinition != 0 {
|
||||||
|
// state machine for handling terms and following definitions
|
||||||
|
// since blackfriday does not distinguish them properly, nor
|
||||||
|
// does it seperate them into separate lists as it should
|
||||||
|
if !r.defineTerm {
|
||||||
|
out(w, arglistTag)
|
||||||
|
r.defineTerm = true
|
||||||
|
} else {
|
||||||
|
r.defineTerm = false
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
out(w, ".IP \\(bu 2\n")
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
out(w, "\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *roffRenderer) handleTable(w io.Writer, node *blackfriday.Node, entering bool) {
|
||||||
|
if entering {
|
||||||
|
out(w, tableStart)
|
||||||
|
//call walker to count cells (and rows?) so format section can be produced
|
||||||
|
columns := countColumns(node)
|
||||||
|
out(w, strings.Repeat("l ", columns)+"\n")
|
||||||
|
out(w, strings.Repeat("l ", columns)+".\n")
|
||||||
|
} else {
|
||||||
|
out(w, tableEnd)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *roffRenderer) handleTableCell(w io.Writer, node *blackfriday.Node, entering bool) {
|
||||||
|
var (
|
||||||
|
start, end string
|
||||||
|
)
|
||||||
|
if node.IsHeader {
|
||||||
|
start = codespanTag
|
||||||
|
end = codespanCloseTag
|
||||||
|
}
|
||||||
|
if entering {
|
||||||
|
if node.Prev != nil && node.Prev.Type == blackfriday.TableCell {
|
||||||
|
out(w, "\t"+start)
|
||||||
|
} else {
|
||||||
|
out(w, start)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// need to carriage return if we are at the end of the header row
|
||||||
|
if node.IsHeader && node.Next == nil {
|
||||||
|
end = end + crTag
|
||||||
|
}
|
||||||
|
out(w, end)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// because roff format requires knowing the column count before outputting any table
|
||||||
|
// data we need to walk a table tree and count the columns
|
||||||
|
func countColumns(node *blackfriday.Node) int {
|
||||||
|
var columns int
|
||||||
|
|
||||||
|
node.Walk(func(node *blackfriday.Node, entering bool) blackfriday.WalkStatus {
|
||||||
|
switch node.Type {
|
||||||
|
case blackfriday.TableRow:
|
||||||
|
if !entering {
|
||||||
|
return blackfriday.Terminate
|
||||||
|
}
|
||||||
|
case blackfriday.TableCell:
|
||||||
|
if entering {
|
||||||
|
columns++
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
}
|
||||||
|
return blackfriday.GoToNext
|
||||||
|
})
|
||||||
|
return columns
|
||||||
|
}
|
||||||
|
|
||||||
|
func out(w io.Writer, output string) {
|
||||||
|
io.WriteString(w, output) // nolint: errcheck
|
||||||
|
}
|
||||||
|
|
||||||
|
func needsBackslash(c byte) bool {
|
||||||
|
for _, r := range []byte("-_&\\~") {
|
||||||
|
if c == r {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func escapeSpecialChars(w io.Writer, text []byte) {
|
||||||
|
for i := 0; i < len(text); i++ {
|
||||||
|
// escape initial apostrophe or period
|
||||||
|
if len(text) >= 1 && (text[0] == '\'' || text[0] == '.') {
|
||||||
|
out(w, "\\&")
|
||||||
|
}
|
||||||
|
|
||||||
|
// directly copy normal characters
|
||||||
|
org := i
|
||||||
|
|
||||||
|
for i < len(text) && !needsBackslash(text[i]) {
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
if i > org {
|
||||||
|
w.Write(text[org:i]) // nolint: errcheck
|
||||||
|
}
|
||||||
|
|
||||||
|
// escape a character
|
||||||
|
if i >= len(text) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
w.Write([]byte{'\\', text[i]}) // nolint: errcheck
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,8 @@
|
||||||
|
*.out
|
||||||
|
*.swp
|
||||||
|
*.8
|
||||||
|
*.6
|
||||||
|
_obj
|
||||||
|
_test*
|
||||||
|
markdown
|
||||||
|
tags
|
||||||
|
|
@ -0,0 +1,17 @@
|
||||||
|
sudo: false
|
||||||
|
language: go
|
||||||
|
go:
|
||||||
|
- "1.10.x"
|
||||||
|
- "1.11.x"
|
||||||
|
- tip
|
||||||
|
matrix:
|
||||||
|
fast_finish: true
|
||||||
|
allow_failures:
|
||||||
|
- go: tip
|
||||||
|
install:
|
||||||
|
- # Do nothing. This is needed to prevent default install action "go get -t -v ./..." from happening here (we want it to happen inside script step).
|
||||||
|
script:
|
||||||
|
- go get -t -v ./...
|
||||||
|
- diff -u <(echo -n) <(gofmt -d -s .)
|
||||||
|
- go tool vet .
|
||||||
|
- go test -v ./...
|
||||||
|
|
@ -0,0 +1,19 @@
|
||||||
|
load("@io_bazel_rules_go//go:def.bzl", "go_library")
|
||||||
|
|
||||||
|
go_library(
|
||||||
|
name = "go_default_library",
|
||||||
|
srcs = [
|
||||||
|
"block.go",
|
||||||
|
"doc.go",
|
||||||
|
"esc.go",
|
||||||
|
"html.go",
|
||||||
|
"inline.go",
|
||||||
|
"markdown.go",
|
||||||
|
"node.go",
|
||||||
|
"smartypants.go",
|
||||||
|
],
|
||||||
|
importmap = "k8s.io/kops/vendor/github.com/russross/blackfriday/v2",
|
||||||
|
importpath = "github.com/russross/blackfriday/v2",
|
||||||
|
visibility = ["//visibility:public"],
|
||||||
|
deps = ["//vendor/github.com/shurcooL/sanitized_anchor_name:go_default_library"],
|
||||||
|
)
|
||||||
|
|
@ -0,0 +1,29 @@
|
||||||
|
Blackfriday is distributed under the Simplified BSD License:
|
||||||
|
|
||||||
|
> Copyright © 2011 Russ Ross
|
||||||
|
> All rights reserved.
|
||||||
|
>
|
||||||
|
> Redistribution and use in source and binary forms, with or without
|
||||||
|
> modification, are permitted provided that the following conditions
|
||||||
|
> are met:
|
||||||
|
>
|
||||||
|
> 1. Redistributions of source code must retain the above copyright
|
||||||
|
> notice, this list of conditions and the following disclaimer.
|
||||||
|
>
|
||||||
|
> 2. Redistributions in binary form must reproduce the above
|
||||||
|
> copyright notice, this list of conditions and the following
|
||||||
|
> disclaimer in the documentation and/or other materials provided with
|
||||||
|
> the distribution.
|
||||||
|
>
|
||||||
|
> THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
> "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
> LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
|
||||||
|
> FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
|
||||||
|
> COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||||
|
> INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
|
||||||
|
> BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||||
|
> LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||||
|
> CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||||
|
> LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
|
||||||
|
> ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||||
|
> POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
@ -0,0 +1,291 @@
|
||||||
|
Blackfriday [](https://travis-ci.org/russross/blackfriday)
|
||||||
|
===========
|
||||||
|
|
||||||
|
Blackfriday is a [Markdown][1] processor implemented in [Go][2]. It
|
||||||
|
is paranoid about its input (so you can safely feed it user-supplied
|
||||||
|
data), it is fast, it supports common extensions (tables, smart
|
||||||
|
punctuation substitutions, etc.), and it is safe for all utf-8
|
||||||
|
(unicode) input.
|
||||||
|
|
||||||
|
HTML output is currently supported, along with Smartypants
|
||||||
|
extensions.
|
||||||
|
|
||||||
|
It started as a translation from C of [Sundown][3].
|
||||||
|
|
||||||
|
|
||||||
|
Installation
|
||||||
|
------------
|
||||||
|
|
||||||
|
Blackfriday is compatible with any modern Go release. With Go 1.7 and git
|
||||||
|
installed:
|
||||||
|
|
||||||
|
go get gopkg.in/russross/blackfriday.v2
|
||||||
|
|
||||||
|
will download, compile, and install the package into your `$GOPATH`
|
||||||
|
directory hierarchy. Alternatively, you can achieve the same if you
|
||||||
|
import it into a project:
|
||||||
|
|
||||||
|
import "gopkg.in/russross/blackfriday.v2"
|
||||||
|
|
||||||
|
and `go get` without parameters.
|
||||||
|
|
||||||
|
|
||||||
|
Versions
|
||||||
|
--------
|
||||||
|
|
||||||
|
Currently maintained and recommended version of Blackfriday is `v2`. It's being
|
||||||
|
developed on its own branch: https://github.com/russross/blackfriday/tree/v2 and the
|
||||||
|
documentation is available at
|
||||||
|
https://godoc.org/gopkg.in/russross/blackfriday.v2.
|
||||||
|
|
||||||
|
It is `go get`-able via via [gopkg.in][6] at `gopkg.in/russross/blackfriday.v2`,
|
||||||
|
but we highly recommend using package management tool like [dep][7] or
|
||||||
|
[Glide][8] and make use of semantic versioning. With package management you
|
||||||
|
should import `github.com/russross/blackfriday` and specify that you're using
|
||||||
|
version 2.0.0.
|
||||||
|
|
||||||
|
Version 2 offers a number of improvements over v1:
|
||||||
|
|
||||||
|
* Cleaned up API
|
||||||
|
* A separate call to [`Parse`][4], which produces an abstract syntax tree for
|
||||||
|
the document
|
||||||
|
* Latest bug fixes
|
||||||
|
* Flexibility to easily add your own rendering extensions
|
||||||
|
|
||||||
|
Potential drawbacks:
|
||||||
|
|
||||||
|
* Our benchmarks show v2 to be slightly slower than v1. Currently in the
|
||||||
|
ballpark of around 15%.
|
||||||
|
* API breakage. If you can't afford modifying your code to adhere to the new API
|
||||||
|
and don't care too much about the new features, v2 is probably not for you.
|
||||||
|
* Several bug fixes are trailing behind and still need to be forward-ported to
|
||||||
|
v2. See issue [#348](https://github.com/russross/blackfriday/issues/348) for
|
||||||
|
tracking.
|
||||||
|
|
||||||
|
Usage
|
||||||
|
-----
|
||||||
|
|
||||||
|
For the most sensible markdown processing, it is as simple as getting your input
|
||||||
|
into a byte slice and calling:
|
||||||
|
|
||||||
|
```go
|
||||||
|
output := blackfriday.Run(input)
|
||||||
|
```
|
||||||
|
|
||||||
|
Your input will be parsed and the output rendered with a set of most popular
|
||||||
|
extensions enabled. If you want the most basic feature set, corresponding with
|
||||||
|
the bare Markdown specification, use:
|
||||||
|
|
||||||
|
```go
|
||||||
|
output := blackfriday.Run(input, blackfriday.WithNoExtensions())
|
||||||
|
```
|
||||||
|
|
||||||
|
### Sanitize untrusted content
|
||||||
|
|
||||||
|
Blackfriday itself does nothing to protect against malicious content. If you are
|
||||||
|
dealing with user-supplied markdown, we recommend running Blackfriday's output
|
||||||
|
through HTML sanitizer such as [Bluemonday][5].
|
||||||
|
|
||||||
|
Here's an example of simple usage of Blackfriday together with Bluemonday:
|
||||||
|
|
||||||
|
```go
|
||||||
|
import (
|
||||||
|
"github.com/microcosm-cc/bluemonday"
|
||||||
|
"github.com/russross/blackfriday"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ...
|
||||||
|
unsafe := blackfriday.Run(input)
|
||||||
|
html := bluemonday.UGCPolicy().SanitizeBytes(unsafe)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Custom options
|
||||||
|
|
||||||
|
If you want to customize the set of options, use `blackfriday.WithExtensions`,
|
||||||
|
`blackfriday.WithRenderer` and `blackfriday.WithRefOverride`.
|
||||||
|
|
||||||
|
You can also check out `blackfriday-tool` for a more complete example
|
||||||
|
of how to use it. Download and install it using:
|
||||||
|
|
||||||
|
go get github.com/russross/blackfriday-tool
|
||||||
|
|
||||||
|
This is a simple command-line tool that allows you to process a
|
||||||
|
markdown file using a standalone program. You can also browse the
|
||||||
|
source directly on github if you are just looking for some example
|
||||||
|
code:
|
||||||
|
|
||||||
|
* <http://github.com/russross/blackfriday-tool>
|
||||||
|
|
||||||
|
Note that if you have not already done so, installing
|
||||||
|
`blackfriday-tool` will be sufficient to download and install
|
||||||
|
blackfriday in addition to the tool itself. The tool binary will be
|
||||||
|
installed in `$GOPATH/bin`. This is a statically-linked binary that
|
||||||
|
can be copied to wherever you need it without worrying about
|
||||||
|
dependencies and library versions.
|
||||||
|
|
||||||
|
|
||||||
|
Features
|
||||||
|
--------
|
||||||
|
|
||||||
|
All features of Sundown are supported, including:
|
||||||
|
|
||||||
|
* **Compatibility**. The Markdown v1.0.3 test suite passes with
|
||||||
|
the `--tidy` option. Without `--tidy`, the differences are
|
||||||
|
mostly in whitespace and entity escaping, where blackfriday is
|
||||||
|
more consistent and cleaner.
|
||||||
|
|
||||||
|
* **Common extensions**, including table support, fenced code
|
||||||
|
blocks, autolinks, strikethroughs, non-strict emphasis, etc.
|
||||||
|
|
||||||
|
* **Safety**. Blackfriday is paranoid when parsing, making it safe
|
||||||
|
to feed untrusted user input without fear of bad things
|
||||||
|
happening. The test suite stress tests this and there are no
|
||||||
|
known inputs that make it crash. If you find one, please let me
|
||||||
|
know and send me the input that does it.
|
||||||
|
|
||||||
|
NOTE: "safety" in this context means *runtime safety only*. In order to
|
||||||
|
protect yourself against JavaScript injection in untrusted content, see
|
||||||
|
[this example](https://github.com/russross/blackfriday#sanitize-untrusted-content).
|
||||||
|
|
||||||
|
* **Fast processing**. It is fast enough to render on-demand in
|
||||||
|
most web applications without having to cache the output.
|
||||||
|
|
||||||
|
* **Thread safety**. You can run multiple parsers in different
|
||||||
|
goroutines without ill effect. There is no dependence on global
|
||||||
|
shared state.
|
||||||
|
|
||||||
|
* **Minimal dependencies**. Blackfriday only depends on standard
|
||||||
|
library packages in Go. The source code is pretty
|
||||||
|
self-contained, so it is easy to add to any project, including
|
||||||
|
Google App Engine projects.
|
||||||
|
|
||||||
|
* **Standards compliant**. Output successfully validates using the
|
||||||
|
W3C validation tool for HTML 4.01 and XHTML 1.0 Transitional.
|
||||||
|
|
||||||
|
|
||||||
|
Extensions
|
||||||
|
----------
|
||||||
|
|
||||||
|
In addition to the standard markdown syntax, this package
|
||||||
|
implements the following extensions:
|
||||||
|
|
||||||
|
* **Intra-word emphasis supression**. The `_` character is
|
||||||
|
commonly used inside words when discussing code, so having
|
||||||
|
markdown interpret it as an emphasis command is usually the
|
||||||
|
wrong thing. Blackfriday lets you treat all emphasis markers as
|
||||||
|
normal characters when they occur inside a word.
|
||||||
|
|
||||||
|
* **Tables**. Tables can be created by drawing them in the input
|
||||||
|
using a simple syntax:
|
||||||
|
|
||||||
|
```
|
||||||
|
Name | Age
|
||||||
|
--------|------
|
||||||
|
Bob | 27
|
||||||
|
Alice | 23
|
||||||
|
```
|
||||||
|
|
||||||
|
* **Fenced code blocks**. In addition to the normal 4-space
|
||||||
|
indentation to mark code blocks, you can explicitly mark them
|
||||||
|
and supply a language (to make syntax highlighting simple). Just
|
||||||
|
mark it like this:
|
||||||
|
|
||||||
|
```go
|
||||||
|
func getTrue() bool {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
You can use 3 or more backticks to mark the beginning of the
|
||||||
|
block, and the same number to mark the end of the block.
|
||||||
|
|
||||||
|
* **Definition lists**. A simple definition list is made of a single-line
|
||||||
|
term followed by a colon and the definition for that term.
|
||||||
|
|
||||||
|
Cat
|
||||||
|
: Fluffy animal everyone likes
|
||||||
|
|
||||||
|
Internet
|
||||||
|
: Vector of transmission for pictures of cats
|
||||||
|
|
||||||
|
Terms must be separated from the previous definition by a blank line.
|
||||||
|
|
||||||
|
* **Footnotes**. A marker in the text that will become a superscript number;
|
||||||
|
a footnote definition that will be placed in a list of footnotes at the
|
||||||
|
end of the document. A footnote looks like this:
|
||||||
|
|
||||||
|
This is a footnote.[^1]
|
||||||
|
|
||||||
|
[^1]: the footnote text.
|
||||||
|
|
||||||
|
* **Autolinking**. Blackfriday can find URLs that have not been
|
||||||
|
explicitly marked as links and turn them into links.
|
||||||
|
|
||||||
|
* **Strikethrough**. Use two tildes (`~~`) to mark text that
|
||||||
|
should be crossed out.
|
||||||
|
|
||||||
|
* **Hard line breaks**. With this extension enabled newlines in the input
|
||||||
|
translate into line breaks in the output. This extension is off by default.
|
||||||
|
|
||||||
|
* **Smart quotes**. Smartypants-style punctuation substitution is
|
||||||
|
supported, turning normal double- and single-quote marks into
|
||||||
|
curly quotes, etc.
|
||||||
|
|
||||||
|
* **LaTeX-style dash parsing** is an additional option, where `--`
|
||||||
|
is translated into `–`, and `---` is translated into
|
||||||
|
`—`. This differs from most smartypants processors, which
|
||||||
|
turn a single hyphen into an ndash and a double hyphen into an
|
||||||
|
mdash.
|
||||||
|
|
||||||
|
* **Smart fractions**, where anything that looks like a fraction
|
||||||
|
is translated into suitable HTML (instead of just a few special
|
||||||
|
cases like most smartypant processors). For example, `4/5`
|
||||||
|
becomes `<sup>4</sup>⁄<sub>5</sub>`, which renders as
|
||||||
|
<sup>4</sup>⁄<sub>5</sub>.
|
||||||
|
|
||||||
|
|
||||||
|
Other renderers
|
||||||
|
---------------
|
||||||
|
|
||||||
|
Blackfriday is structured to allow alternative rendering engines. Here
|
||||||
|
are a few of note:
|
||||||
|
|
||||||
|
* [github_flavored_markdown](https://godoc.org/github.com/shurcooL/github_flavored_markdown):
|
||||||
|
provides a GitHub Flavored Markdown renderer with fenced code block
|
||||||
|
highlighting, clickable heading anchor links.
|
||||||
|
|
||||||
|
It's not customizable, and its goal is to produce HTML output
|
||||||
|
equivalent to the [GitHub Markdown API endpoint](https://developer.github.com/v3/markdown/#render-a-markdown-document-in-raw-mode),
|
||||||
|
except the rendering is performed locally.
|
||||||
|
|
||||||
|
* [markdownfmt](https://github.com/shurcooL/markdownfmt): like gofmt,
|
||||||
|
but for markdown.
|
||||||
|
|
||||||
|
* [LaTeX output](https://github.com/Ambrevar/Blackfriday-LaTeX):
|
||||||
|
renders output as LaTeX.
|
||||||
|
|
||||||
|
* [Blackfriday-Confluence](https://github.com/kentaro-m/blackfriday-confluence): provides a [Confluence Wiki Markup](https://confluence.atlassian.com/doc/confluence-wiki-markup-251003035.html) renderer.
|
||||||
|
|
||||||
|
|
||||||
|
Todo
|
||||||
|
----
|
||||||
|
|
||||||
|
* More unit testing
|
||||||
|
* Improve unicode support. It does not understand all unicode
|
||||||
|
rules (about what constitutes a letter, a punctuation symbol,
|
||||||
|
etc.), so it may fail to detect word boundaries correctly in
|
||||||
|
some instances. It is safe on all utf-8 input.
|
||||||
|
|
||||||
|
|
||||||
|
License
|
||||||
|
-------
|
||||||
|
|
||||||
|
[Blackfriday is distributed under the Simplified BSD License](LICENSE.txt)
|
||||||
|
|
||||||
|
|
||||||
|
[1]: https://daringfireball.net/projects/markdown/ "Markdown"
|
||||||
|
[2]: https://golang.org/ "Go Language"
|
||||||
|
[3]: https://github.com/vmg/sundown "Sundown"
|
||||||
|
[4]: https://godoc.org/gopkg.in/russross/blackfriday.v2#Parse "Parse func"
|
||||||
|
[5]: https://github.com/microcosm-cc/bluemonday "Bluemonday"
|
||||||
|
[6]: https://labix.org/gopkg.in "gopkg.in"
|
||||||
File diff suppressed because it is too large
Load Diff
|
|
@ -0,0 +1,18 @@
|
||||||
|
// Package blackfriday is a markdown processor.
|
||||||
|
//
|
||||||
|
// It translates plain text with simple formatting rules into an AST, which can
|
||||||
|
// then be further processed to HTML (provided by Blackfriday itself) or other
|
||||||
|
// formats (provided by the community).
|
||||||
|
//
|
||||||
|
// The simplest way to invoke Blackfriday is to call the Run function. It will
|
||||||
|
// take a text input and produce a text output in HTML (or other format).
|
||||||
|
//
|
||||||
|
// A slightly more sophisticated way to use Blackfriday is to create a Markdown
|
||||||
|
// processor and to call Parse, which returns a syntax tree for the input
|
||||||
|
// document. You can leverage Blackfriday's parsing for content extraction from
|
||||||
|
// markdown documents. You can assign a custom renderer and set various options
|
||||||
|
// to the Markdown processor.
|
||||||
|
//
|
||||||
|
// If you're interested in calling Blackfriday from command line, see
|
||||||
|
// https://github.com/russross/blackfriday-tool.
|
||||||
|
package blackfriday
|
||||||
|
|
@ -0,0 +1,34 @@
|
||||||
|
package blackfriday
|
||||||
|
|
||||||
|
import (
|
||||||
|
"html"
|
||||||
|
"io"
|
||||||
|
)
|
||||||
|
|
||||||
|
var htmlEscaper = [256][]byte{
|
||||||
|
'&': []byte("&"),
|
||||||
|
'<': []byte("<"),
|
||||||
|
'>': []byte(">"),
|
||||||
|
'"': []byte("""),
|
||||||
|
}
|
||||||
|
|
||||||
|
func escapeHTML(w io.Writer, s []byte) {
|
||||||
|
var start, end int
|
||||||
|
for end < len(s) {
|
||||||
|
escSeq := htmlEscaper[s[end]]
|
||||||
|
if escSeq != nil {
|
||||||
|
w.Write(s[start:end])
|
||||||
|
w.Write(escSeq)
|
||||||
|
start = end + 1
|
||||||
|
}
|
||||||
|
end++
|
||||||
|
}
|
||||||
|
if start < len(s) && end <= len(s) {
|
||||||
|
w.Write(s[start:end])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func escLink(w io.Writer, text []byte) {
|
||||||
|
unesc := html.UnescapeString(string(text))
|
||||||
|
escapeHTML(w, []byte(unesc))
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1 @@
|
||||||
|
module github.com/russross/blackfriday/v2
|
||||||
|
|
@ -0,0 +1,949 @@
|
||||||
|
//
|
||||||
|
// Blackfriday Markdown Processor
|
||||||
|
// Available at http://github.com/russross/blackfriday
|
||||||
|
//
|
||||||
|
// Copyright © 2011 Russ Ross <russ@russross.com>.
|
||||||
|
// Distributed under the Simplified BSD License.
|
||||||
|
// See README.md for details.
|
||||||
|
//
|
||||||
|
|
||||||
|
//
|
||||||
|
//
|
||||||
|
// HTML rendering backend
|
||||||
|
//
|
||||||
|
//
|
||||||
|
|
||||||
|
package blackfriday
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
// HTMLFlags control optional behavior of HTML renderer.
|
||||||
|
type HTMLFlags int
|
||||||
|
|
||||||
|
// HTML renderer configuration options.
|
||||||
|
const (
|
||||||
|
HTMLFlagsNone HTMLFlags = 0
|
||||||
|
SkipHTML HTMLFlags = 1 << iota // Skip preformatted HTML blocks
|
||||||
|
SkipImages // Skip embedded images
|
||||||
|
SkipLinks // Skip all links
|
||||||
|
Safelink // Only link to trusted protocols
|
||||||
|
NofollowLinks // Only link with rel="nofollow"
|
||||||
|
NoreferrerLinks // Only link with rel="noreferrer"
|
||||||
|
NoopenerLinks // Only link with rel="noopener"
|
||||||
|
HrefTargetBlank // Add a blank target
|
||||||
|
CompletePage // Generate a complete HTML page
|
||||||
|
UseXHTML // Generate XHTML output instead of HTML
|
||||||
|
FootnoteReturnLinks // Generate a link at the end of a footnote to return to the source
|
||||||
|
Smartypants // Enable smart punctuation substitutions
|
||||||
|
SmartypantsFractions // Enable smart fractions (with Smartypants)
|
||||||
|
SmartypantsDashes // Enable smart dashes (with Smartypants)
|
||||||
|
SmartypantsLatexDashes // Enable LaTeX-style dashes (with Smartypants)
|
||||||
|
SmartypantsAngledQuotes // Enable angled double quotes (with Smartypants) for double quotes rendering
|
||||||
|
SmartypantsQuotesNBSP // Enable « French guillemets » (with Smartypants)
|
||||||
|
TOC // Generate a table of contents
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
htmlTagRe = regexp.MustCompile("(?i)^" + htmlTag)
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
htmlTag = "(?:" + openTag + "|" + closeTag + "|" + htmlComment + "|" +
|
||||||
|
processingInstruction + "|" + declaration + "|" + cdata + ")"
|
||||||
|
closeTag = "</" + tagName + "\\s*[>]"
|
||||||
|
openTag = "<" + tagName + attribute + "*" + "\\s*/?>"
|
||||||
|
attribute = "(?:" + "\\s+" + attributeName + attributeValueSpec + "?)"
|
||||||
|
attributeValue = "(?:" + unquotedValue + "|" + singleQuotedValue + "|" + doubleQuotedValue + ")"
|
||||||
|
attributeValueSpec = "(?:" + "\\s*=" + "\\s*" + attributeValue + ")"
|
||||||
|
attributeName = "[a-zA-Z_:][a-zA-Z0-9:._-]*"
|
||||||
|
cdata = "<!\\[CDATA\\[[\\s\\S]*?\\]\\]>"
|
||||||
|
declaration = "<![A-Z]+" + "\\s+[^>]*>"
|
||||||
|
doubleQuotedValue = "\"[^\"]*\""
|
||||||
|
htmlComment = "<!---->|<!--(?:-?[^>-])(?:-?[^-])*-->"
|
||||||
|
processingInstruction = "[<][?].*?[?][>]"
|
||||||
|
singleQuotedValue = "'[^']*'"
|
||||||
|
tagName = "[A-Za-z][A-Za-z0-9-]*"
|
||||||
|
unquotedValue = "[^\"'=<>`\\x00-\\x20]+"
|
||||||
|
)
|
||||||
|
|
||||||
|
// HTMLRendererParameters is a collection of supplementary parameters tweaking
|
||||||
|
// the behavior of various parts of HTML renderer.
|
||||||
|
type HTMLRendererParameters struct {
|
||||||
|
// Prepend this text to each relative URL.
|
||||||
|
AbsolutePrefix string
|
||||||
|
// Add this text to each footnote anchor, to ensure uniqueness.
|
||||||
|
FootnoteAnchorPrefix string
|
||||||
|
// Show this text inside the <a> tag for a footnote return link, if the
|
||||||
|
// HTML_FOOTNOTE_RETURN_LINKS flag is enabled. If blank, the string
|
||||||
|
// <sup>[return]</sup> is used.
|
||||||
|
FootnoteReturnLinkContents string
|
||||||
|
// If set, add this text to the front of each Heading ID, to ensure
|
||||||
|
// uniqueness.
|
||||||
|
HeadingIDPrefix string
|
||||||
|
// If set, add this text to the back of each Heading ID, to ensure uniqueness.
|
||||||
|
HeadingIDSuffix string
|
||||||
|
// Increase heading levels: if the offset is 1, <h1> becomes <h2> etc.
|
||||||
|
// Negative offset is also valid.
|
||||||
|
// Resulting levels are clipped between 1 and 6.
|
||||||
|
HeadingLevelOffset int
|
||||||
|
|
||||||
|
Title string // Document title (used if CompletePage is set)
|
||||||
|
CSS string // Optional CSS file URL (used if CompletePage is set)
|
||||||
|
Icon string // Optional icon file URL (used if CompletePage is set)
|
||||||
|
|
||||||
|
Flags HTMLFlags // Flags allow customizing this renderer's behavior
|
||||||
|
}
|
||||||
|
|
||||||
|
// HTMLRenderer is a type that implements the Renderer interface for HTML output.
|
||||||
|
//
|
||||||
|
// Do not create this directly, instead use the NewHTMLRenderer function.
|
||||||
|
type HTMLRenderer struct {
|
||||||
|
HTMLRendererParameters
|
||||||
|
|
||||||
|
closeTag string // how to end singleton tags: either " />" or ">"
|
||||||
|
|
||||||
|
// Track heading IDs to prevent ID collision in a single generation.
|
||||||
|
headingIDs map[string]int
|
||||||
|
|
||||||
|
lastOutputLen int
|
||||||
|
disableTags int
|
||||||
|
|
||||||
|
sr *SPRenderer
|
||||||
|
}
|
||||||
|
|
||||||
|
const (
|
||||||
|
xhtmlClose = " />"
|
||||||
|
htmlClose = ">"
|
||||||
|
)
|
||||||
|
|
||||||
|
// NewHTMLRenderer creates and configures an HTMLRenderer object, which
|
||||||
|
// satisfies the Renderer interface.
|
||||||
|
func NewHTMLRenderer(params HTMLRendererParameters) *HTMLRenderer {
|
||||||
|
// configure the rendering engine
|
||||||
|
closeTag := htmlClose
|
||||||
|
if params.Flags&UseXHTML != 0 {
|
||||||
|
closeTag = xhtmlClose
|
||||||
|
}
|
||||||
|
|
||||||
|
if params.FootnoteReturnLinkContents == "" {
|
||||||
|
params.FootnoteReturnLinkContents = `<sup>[return]</sup>`
|
||||||
|
}
|
||||||
|
|
||||||
|
return &HTMLRenderer{
|
||||||
|
HTMLRendererParameters: params,
|
||||||
|
|
||||||
|
closeTag: closeTag,
|
||||||
|
headingIDs: make(map[string]int),
|
||||||
|
|
||||||
|
sr: NewSmartypantsRenderer(params.Flags),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func isHTMLTag(tag []byte, tagname string) bool {
|
||||||
|
found, _ := findHTMLTagPos(tag, tagname)
|
||||||
|
return found
|
||||||
|
}
|
||||||
|
|
||||||
|
// Look for a character, but ignore it when it's in any kind of quotes, it
|
||||||
|
// might be JavaScript
|
||||||
|
func skipUntilCharIgnoreQuotes(html []byte, start int, char byte) int {
|
||||||
|
inSingleQuote := false
|
||||||
|
inDoubleQuote := false
|
||||||
|
inGraveQuote := false
|
||||||
|
i := start
|
||||||
|
for i < len(html) {
|
||||||
|
switch {
|
||||||
|
case html[i] == char && !inSingleQuote && !inDoubleQuote && !inGraveQuote:
|
||||||
|
return i
|
||||||
|
case html[i] == '\'':
|
||||||
|
inSingleQuote = !inSingleQuote
|
||||||
|
case html[i] == '"':
|
||||||
|
inDoubleQuote = !inDoubleQuote
|
||||||
|
case html[i] == '`':
|
||||||
|
inGraveQuote = !inGraveQuote
|
||||||
|
}
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
return start
|
||||||
|
}
|
||||||
|
|
||||||
|
func findHTMLTagPos(tag []byte, tagname string) (bool, int) {
|
||||||
|
i := 0
|
||||||
|
if i < len(tag) && tag[0] != '<' {
|
||||||
|
return false, -1
|
||||||
|
}
|
||||||
|
i++
|
||||||
|
i = skipSpace(tag, i)
|
||||||
|
|
||||||
|
if i < len(tag) && tag[i] == '/' {
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
|
||||||
|
i = skipSpace(tag, i)
|
||||||
|
j := 0
|
||||||
|
for ; i < len(tag); i, j = i+1, j+1 {
|
||||||
|
if j >= len(tagname) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
if strings.ToLower(string(tag[i]))[0] != tagname[j] {
|
||||||
|
return false, -1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if i == len(tag) {
|
||||||
|
return false, -1
|
||||||
|
}
|
||||||
|
|
||||||
|
rightAngle := skipUntilCharIgnoreQuotes(tag, i, '>')
|
||||||
|
if rightAngle >= i {
|
||||||
|
return true, rightAngle
|
||||||
|
}
|
||||||
|
|
||||||
|
return false, -1
|
||||||
|
}
|
||||||
|
|
||||||
|
func skipSpace(tag []byte, i int) int {
|
||||||
|
for i < len(tag) && isspace(tag[i]) {
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
return i
|
||||||
|
}
|
||||||
|
|
||||||
|
func isRelativeLink(link []byte) (yes bool) {
|
||||||
|
// a tag begin with '#'
|
||||||
|
if link[0] == '#' {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// link begin with '/' but not '//', the second maybe a protocol relative link
|
||||||
|
if len(link) >= 2 && link[0] == '/' && link[1] != '/' {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// only the root '/'
|
||||||
|
if len(link) == 1 && link[0] == '/' {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// current directory : begin with "./"
|
||||||
|
if bytes.HasPrefix(link, []byte("./")) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// parent directory : begin with "../"
|
||||||
|
if bytes.HasPrefix(link, []byte("../")) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *HTMLRenderer) ensureUniqueHeadingID(id string) string {
|
||||||
|
for count, found := r.headingIDs[id]; found; count, found = r.headingIDs[id] {
|
||||||
|
tmp := fmt.Sprintf("%s-%d", id, count+1)
|
||||||
|
|
||||||
|
if _, tmpFound := r.headingIDs[tmp]; !tmpFound {
|
||||||
|
r.headingIDs[id] = count + 1
|
||||||
|
id = tmp
|
||||||
|
} else {
|
||||||
|
id = id + "-1"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, found := r.headingIDs[id]; !found {
|
||||||
|
r.headingIDs[id] = 0
|
||||||
|
}
|
||||||
|
|
||||||
|
return id
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *HTMLRenderer) addAbsPrefix(link []byte) []byte {
|
||||||
|
if r.AbsolutePrefix != "" && isRelativeLink(link) && link[0] != '.' {
|
||||||
|
newDest := r.AbsolutePrefix
|
||||||
|
if link[0] != '/' {
|
||||||
|
newDest += "/"
|
||||||
|
}
|
||||||
|
newDest += string(link)
|
||||||
|
return []byte(newDest)
|
||||||
|
}
|
||||||
|
return link
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendLinkAttrs(attrs []string, flags HTMLFlags, link []byte) []string {
|
||||||
|
if isRelativeLink(link) {
|
||||||
|
return attrs
|
||||||
|
}
|
||||||
|
val := []string{}
|
||||||
|
if flags&NofollowLinks != 0 {
|
||||||
|
val = append(val, "nofollow")
|
||||||
|
}
|
||||||
|
if flags&NoreferrerLinks != 0 {
|
||||||
|
val = append(val, "noreferrer")
|
||||||
|
}
|
||||||
|
if flags&NoopenerLinks != 0 {
|
||||||
|
val = append(val, "noopener")
|
||||||
|
}
|
||||||
|
if flags&HrefTargetBlank != 0 {
|
||||||
|
attrs = append(attrs, "target=\"_blank\"")
|
||||||
|
}
|
||||||
|
if len(val) == 0 {
|
||||||
|
return attrs
|
||||||
|
}
|
||||||
|
attr := fmt.Sprintf("rel=%q", strings.Join(val, " "))
|
||||||
|
return append(attrs, attr)
|
||||||
|
}
|
||||||
|
|
||||||
|
func isMailto(link []byte) bool {
|
||||||
|
return bytes.HasPrefix(link, []byte("mailto:"))
|
||||||
|
}
|
||||||
|
|
||||||
|
func needSkipLink(flags HTMLFlags, dest []byte) bool {
|
||||||
|
if flags&SkipLinks != 0 {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return flags&Safelink != 0 && !isSafeLink(dest) && !isMailto(dest)
|
||||||
|
}
|
||||||
|
|
||||||
|
func isSmartypantable(node *Node) bool {
|
||||||
|
pt := node.Parent.Type
|
||||||
|
return pt != Link && pt != CodeBlock && pt != Code
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendLanguageAttr(attrs []string, info []byte) []string {
|
||||||
|
if len(info) == 0 {
|
||||||
|
return attrs
|
||||||
|
}
|
||||||
|
endOfLang := bytes.IndexAny(info, "\t ")
|
||||||
|
if endOfLang < 0 {
|
||||||
|
endOfLang = len(info)
|
||||||
|
}
|
||||||
|
return append(attrs, fmt.Sprintf("class=\"language-%s\"", info[:endOfLang]))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *HTMLRenderer) tag(w io.Writer, name []byte, attrs []string) {
|
||||||
|
w.Write(name)
|
||||||
|
if len(attrs) > 0 {
|
||||||
|
w.Write(spaceBytes)
|
||||||
|
w.Write([]byte(strings.Join(attrs, " ")))
|
||||||
|
}
|
||||||
|
w.Write(gtBytes)
|
||||||
|
r.lastOutputLen = 1
|
||||||
|
}
|
||||||
|
|
||||||
|
func footnoteRef(prefix string, node *Node) []byte {
|
||||||
|
urlFrag := prefix + string(slugify(node.Destination))
|
||||||
|
anchor := fmt.Sprintf(`<a href="#fn:%s">%d</a>`, urlFrag, node.NoteID)
|
||||||
|
return []byte(fmt.Sprintf(`<sup class="footnote-ref" id="fnref:%s">%s</sup>`, urlFrag, anchor))
|
||||||
|
}
|
||||||
|
|
||||||
|
func footnoteItem(prefix string, slug []byte) []byte {
|
||||||
|
return []byte(fmt.Sprintf(`<li id="fn:%s%s">`, prefix, slug))
|
||||||
|
}
|
||||||
|
|
||||||
|
func footnoteReturnLink(prefix, returnLink string, slug []byte) []byte {
|
||||||
|
const format = ` <a class="footnote-return" href="#fnref:%s%s">%s</a>`
|
||||||
|
return []byte(fmt.Sprintf(format, prefix, slug, returnLink))
|
||||||
|
}
|
||||||
|
|
||||||
|
func itemOpenCR(node *Node) bool {
|
||||||
|
if node.Prev == nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
ld := node.Parent.ListData
|
||||||
|
return !ld.Tight && ld.ListFlags&ListTypeDefinition == 0
|
||||||
|
}
|
||||||
|
|
||||||
|
func skipParagraphTags(node *Node) bool {
|
||||||
|
grandparent := node.Parent.Parent
|
||||||
|
if grandparent == nil || grandparent.Type != List {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
tightOrTerm := grandparent.Tight || node.Parent.ListFlags&ListTypeTerm != 0
|
||||||
|
return grandparent.Type == List && tightOrTerm
|
||||||
|
}
|
||||||
|
|
||||||
|
func cellAlignment(align CellAlignFlags) string {
|
||||||
|
switch align {
|
||||||
|
case TableAlignmentLeft:
|
||||||
|
return "left"
|
||||||
|
case TableAlignmentRight:
|
||||||
|
return "right"
|
||||||
|
case TableAlignmentCenter:
|
||||||
|
return "center"
|
||||||
|
default:
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *HTMLRenderer) out(w io.Writer, text []byte) {
|
||||||
|
if r.disableTags > 0 {
|
||||||
|
w.Write(htmlTagRe.ReplaceAll(text, []byte{}))
|
||||||
|
} else {
|
||||||
|
w.Write(text)
|
||||||
|
}
|
||||||
|
r.lastOutputLen = len(text)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *HTMLRenderer) cr(w io.Writer) {
|
||||||
|
if r.lastOutputLen > 0 {
|
||||||
|
r.out(w, nlBytes)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
nlBytes = []byte{'\n'}
|
||||||
|
gtBytes = []byte{'>'}
|
||||||
|
spaceBytes = []byte{' '}
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
brTag = []byte("<br>")
|
||||||
|
brXHTMLTag = []byte("<br />")
|
||||||
|
emTag = []byte("<em>")
|
||||||
|
emCloseTag = []byte("</em>")
|
||||||
|
strongTag = []byte("<strong>")
|
||||||
|
strongCloseTag = []byte("</strong>")
|
||||||
|
delTag = []byte("<del>")
|
||||||
|
delCloseTag = []byte("</del>")
|
||||||
|
ttTag = []byte("<tt>")
|
||||||
|
ttCloseTag = []byte("</tt>")
|
||||||
|
aTag = []byte("<a")
|
||||||
|
aCloseTag = []byte("</a>")
|
||||||
|
preTag = []byte("<pre>")
|
||||||
|
preCloseTag = []byte("</pre>")
|
||||||
|
codeTag = []byte("<code>")
|
||||||
|
codeCloseTag = []byte("</code>")
|
||||||
|
pTag = []byte("<p>")
|
||||||
|
pCloseTag = []byte("</p>")
|
||||||
|
blockquoteTag = []byte("<blockquote>")
|
||||||
|
blockquoteCloseTag = []byte("</blockquote>")
|
||||||
|
hrTag = []byte("<hr>")
|
||||||
|
hrXHTMLTag = []byte("<hr />")
|
||||||
|
ulTag = []byte("<ul>")
|
||||||
|
ulCloseTag = []byte("</ul>")
|
||||||
|
olTag = []byte("<ol>")
|
||||||
|
olCloseTag = []byte("</ol>")
|
||||||
|
dlTag = []byte("<dl>")
|
||||||
|
dlCloseTag = []byte("</dl>")
|
||||||
|
liTag = []byte("<li>")
|
||||||
|
liCloseTag = []byte("</li>")
|
||||||
|
ddTag = []byte("<dd>")
|
||||||
|
ddCloseTag = []byte("</dd>")
|
||||||
|
dtTag = []byte("<dt>")
|
||||||
|
dtCloseTag = []byte("</dt>")
|
||||||
|
tableTag = []byte("<table>")
|
||||||
|
tableCloseTag = []byte("</table>")
|
||||||
|
tdTag = []byte("<td")
|
||||||
|
tdCloseTag = []byte("</td>")
|
||||||
|
thTag = []byte("<th")
|
||||||
|
thCloseTag = []byte("</th>")
|
||||||
|
theadTag = []byte("<thead>")
|
||||||
|
theadCloseTag = []byte("</thead>")
|
||||||
|
tbodyTag = []byte("<tbody>")
|
||||||
|
tbodyCloseTag = []byte("</tbody>")
|
||||||
|
trTag = []byte("<tr>")
|
||||||
|
trCloseTag = []byte("</tr>")
|
||||||
|
h1Tag = []byte("<h1")
|
||||||
|
h1CloseTag = []byte("</h1>")
|
||||||
|
h2Tag = []byte("<h2")
|
||||||
|
h2CloseTag = []byte("</h2>")
|
||||||
|
h3Tag = []byte("<h3")
|
||||||
|
h3CloseTag = []byte("</h3>")
|
||||||
|
h4Tag = []byte("<h4")
|
||||||
|
h4CloseTag = []byte("</h4>")
|
||||||
|
h5Tag = []byte("<h5")
|
||||||
|
h5CloseTag = []byte("</h5>")
|
||||||
|
h6Tag = []byte("<h6")
|
||||||
|
h6CloseTag = []byte("</h6>")
|
||||||
|
|
||||||
|
footnotesDivBytes = []byte("\n<div class=\"footnotes\">\n\n")
|
||||||
|
footnotesCloseDivBytes = []byte("\n</div>\n")
|
||||||
|
)
|
||||||
|
|
||||||
|
func headingTagsFromLevel(level int) ([]byte, []byte) {
|
||||||
|
if level <= 1 {
|
||||||
|
return h1Tag, h1CloseTag
|
||||||
|
}
|
||||||
|
switch level {
|
||||||
|
case 2:
|
||||||
|
return h2Tag, h2CloseTag
|
||||||
|
case 3:
|
||||||
|
return h3Tag, h3CloseTag
|
||||||
|
case 4:
|
||||||
|
return h4Tag, h4CloseTag
|
||||||
|
case 5:
|
||||||
|
return h5Tag, h5CloseTag
|
||||||
|
}
|
||||||
|
return h6Tag, h6CloseTag
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *HTMLRenderer) outHRTag(w io.Writer) {
|
||||||
|
if r.Flags&UseXHTML == 0 {
|
||||||
|
r.out(w, hrTag)
|
||||||
|
} else {
|
||||||
|
r.out(w, hrXHTMLTag)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// RenderNode is a default renderer of a single node of a syntax tree. For
|
||||||
|
// block nodes it will be called twice: first time with entering=true, second
|
||||||
|
// time with entering=false, so that it could know when it's working on an open
|
||||||
|
// tag and when on close. It writes the result to w.
|
||||||
|
//
|
||||||
|
// The return value is a way to tell the calling walker to adjust its walk
|
||||||
|
// pattern: e.g. it can terminate the traversal by returning Terminate. Or it
|
||||||
|
// can ask the walker to skip a subtree of this node by returning SkipChildren.
|
||||||
|
// The typical behavior is to return GoToNext, which asks for the usual
|
||||||
|
// traversal to the next node.
|
||||||
|
func (r *HTMLRenderer) RenderNode(w io.Writer, node *Node, entering bool) WalkStatus {
|
||||||
|
attrs := []string{}
|
||||||
|
switch node.Type {
|
||||||
|
case Text:
|
||||||
|
if r.Flags&Smartypants != 0 {
|
||||||
|
var tmp bytes.Buffer
|
||||||
|
escapeHTML(&tmp, node.Literal)
|
||||||
|
r.sr.Process(w, tmp.Bytes())
|
||||||
|
} else {
|
||||||
|
if node.Parent.Type == Link {
|
||||||
|
escLink(w, node.Literal)
|
||||||
|
} else {
|
||||||
|
escapeHTML(w, node.Literal)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case Softbreak:
|
||||||
|
r.cr(w)
|
||||||
|
// TODO: make it configurable via out(renderer.softbreak)
|
||||||
|
case Hardbreak:
|
||||||
|
if r.Flags&UseXHTML == 0 {
|
||||||
|
r.out(w, brTag)
|
||||||
|
} else {
|
||||||
|
r.out(w, brXHTMLTag)
|
||||||
|
}
|
||||||
|
r.cr(w)
|
||||||
|
case Emph:
|
||||||
|
if entering {
|
||||||
|
r.out(w, emTag)
|
||||||
|
} else {
|
||||||
|
r.out(w, emCloseTag)
|
||||||
|
}
|
||||||
|
case Strong:
|
||||||
|
if entering {
|
||||||
|
r.out(w, strongTag)
|
||||||
|
} else {
|
||||||
|
r.out(w, strongCloseTag)
|
||||||
|
}
|
||||||
|
case Del:
|
||||||
|
if entering {
|
||||||
|
r.out(w, delTag)
|
||||||
|
} else {
|
||||||
|
r.out(w, delCloseTag)
|
||||||
|
}
|
||||||
|
case HTMLSpan:
|
||||||
|
if r.Flags&SkipHTML != 0 {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
r.out(w, node.Literal)
|
||||||
|
case Link:
|
||||||
|
// mark it but don't link it if it is not a safe link: no smartypants
|
||||||
|
dest := node.LinkData.Destination
|
||||||
|
if needSkipLink(r.Flags, dest) {
|
||||||
|
if entering {
|
||||||
|
r.out(w, ttTag)
|
||||||
|
} else {
|
||||||
|
r.out(w, ttCloseTag)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if entering {
|
||||||
|
dest = r.addAbsPrefix(dest)
|
||||||
|
var hrefBuf bytes.Buffer
|
||||||
|
hrefBuf.WriteString("href=\"")
|
||||||
|
escLink(&hrefBuf, dest)
|
||||||
|
hrefBuf.WriteByte('"')
|
||||||
|
attrs = append(attrs, hrefBuf.String())
|
||||||
|
if node.NoteID != 0 {
|
||||||
|
r.out(w, footnoteRef(r.FootnoteAnchorPrefix, node))
|
||||||
|
break
|
||||||
|
}
|
||||||
|
attrs = appendLinkAttrs(attrs, r.Flags, dest)
|
||||||
|
if len(node.LinkData.Title) > 0 {
|
||||||
|
var titleBuff bytes.Buffer
|
||||||
|
titleBuff.WriteString("title=\"")
|
||||||
|
escapeHTML(&titleBuff, node.LinkData.Title)
|
||||||
|
titleBuff.WriteByte('"')
|
||||||
|
attrs = append(attrs, titleBuff.String())
|
||||||
|
}
|
||||||
|
r.tag(w, aTag, attrs)
|
||||||
|
} else {
|
||||||
|
if node.NoteID != 0 {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
r.out(w, aCloseTag)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case Image:
|
||||||
|
if r.Flags&SkipImages != 0 {
|
||||||
|
return SkipChildren
|
||||||
|
}
|
||||||
|
if entering {
|
||||||
|
dest := node.LinkData.Destination
|
||||||
|
dest = r.addAbsPrefix(dest)
|
||||||
|
if r.disableTags == 0 {
|
||||||
|
//if options.safe && potentiallyUnsafe(dest) {
|
||||||
|
//out(w, `<img src="" alt="`)
|
||||||
|
//} else {
|
||||||
|
r.out(w, []byte(`<img src="`))
|
||||||
|
escLink(w, dest)
|
||||||
|
r.out(w, []byte(`" alt="`))
|
||||||
|
//}
|
||||||
|
}
|
||||||
|
r.disableTags++
|
||||||
|
} else {
|
||||||
|
r.disableTags--
|
||||||
|
if r.disableTags == 0 {
|
||||||
|
if node.LinkData.Title != nil {
|
||||||
|
r.out(w, []byte(`" title="`))
|
||||||
|
escapeHTML(w, node.LinkData.Title)
|
||||||
|
}
|
||||||
|
r.out(w, []byte(`" />`))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case Code:
|
||||||
|
r.out(w, codeTag)
|
||||||
|
escapeHTML(w, node.Literal)
|
||||||
|
r.out(w, codeCloseTag)
|
||||||
|
case Document:
|
||||||
|
break
|
||||||
|
case Paragraph:
|
||||||
|
if skipParagraphTags(node) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if entering {
|
||||||
|
// TODO: untangle this clusterfuck about when the newlines need
|
||||||
|
// to be added and when not.
|
||||||
|
if node.Prev != nil {
|
||||||
|
switch node.Prev.Type {
|
||||||
|
case HTMLBlock, List, Paragraph, Heading, CodeBlock, BlockQuote, HorizontalRule:
|
||||||
|
r.cr(w)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if node.Parent.Type == BlockQuote && node.Prev == nil {
|
||||||
|
r.cr(w)
|
||||||
|
}
|
||||||
|
r.out(w, pTag)
|
||||||
|
} else {
|
||||||
|
r.out(w, pCloseTag)
|
||||||
|
if !(node.Parent.Type == Item && node.Next == nil) {
|
||||||
|
r.cr(w)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case BlockQuote:
|
||||||
|
if entering {
|
||||||
|
r.cr(w)
|
||||||
|
r.out(w, blockquoteTag)
|
||||||
|
} else {
|
||||||
|
r.out(w, blockquoteCloseTag)
|
||||||
|
r.cr(w)
|
||||||
|
}
|
||||||
|
case HTMLBlock:
|
||||||
|
if r.Flags&SkipHTML != 0 {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
r.cr(w)
|
||||||
|
r.out(w, node.Literal)
|
||||||
|
r.cr(w)
|
||||||
|
case Heading:
|
||||||
|
headingLevel := r.HTMLRendererParameters.HeadingLevelOffset + node.Level
|
||||||
|
openTag, closeTag := headingTagsFromLevel(headingLevel)
|
||||||
|
if entering {
|
||||||
|
if node.IsTitleblock {
|
||||||
|
attrs = append(attrs, `class="title"`)
|
||||||
|
}
|
||||||
|
if node.HeadingID != "" {
|
||||||
|
id := r.ensureUniqueHeadingID(node.HeadingID)
|
||||||
|
if r.HeadingIDPrefix != "" {
|
||||||
|
id = r.HeadingIDPrefix + id
|
||||||
|
}
|
||||||
|
if r.HeadingIDSuffix != "" {
|
||||||
|
id = id + r.HeadingIDSuffix
|
||||||
|
}
|
||||||
|
attrs = append(attrs, fmt.Sprintf(`id="%s"`, id))
|
||||||
|
}
|
||||||
|
r.cr(w)
|
||||||
|
r.tag(w, openTag, attrs)
|
||||||
|
} else {
|
||||||
|
r.out(w, closeTag)
|
||||||
|
if !(node.Parent.Type == Item && node.Next == nil) {
|
||||||
|
r.cr(w)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case HorizontalRule:
|
||||||
|
r.cr(w)
|
||||||
|
r.outHRTag(w)
|
||||||
|
r.cr(w)
|
||||||
|
case List:
|
||||||
|
openTag := ulTag
|
||||||
|
closeTag := ulCloseTag
|
||||||
|
if node.ListFlags&ListTypeOrdered != 0 {
|
||||||
|
openTag = olTag
|
||||||
|
closeTag = olCloseTag
|
||||||
|
}
|
||||||
|
if node.ListFlags&ListTypeDefinition != 0 {
|
||||||
|
openTag = dlTag
|
||||||
|
closeTag = dlCloseTag
|
||||||
|
}
|
||||||
|
if entering {
|
||||||
|
if node.IsFootnotesList {
|
||||||
|
r.out(w, footnotesDivBytes)
|
||||||
|
r.outHRTag(w)
|
||||||
|
r.cr(w)
|
||||||
|
}
|
||||||
|
r.cr(w)
|
||||||
|
if node.Parent.Type == Item && node.Parent.Parent.Tight {
|
||||||
|
r.cr(w)
|
||||||
|
}
|
||||||
|
r.tag(w, openTag[:len(openTag)-1], attrs)
|
||||||
|
r.cr(w)
|
||||||
|
} else {
|
||||||
|
r.out(w, closeTag)
|
||||||
|
//cr(w)
|
||||||
|
//if node.parent.Type != Item {
|
||||||
|
// cr(w)
|
||||||
|
//}
|
||||||
|
if node.Parent.Type == Item && node.Next != nil {
|
||||||
|
r.cr(w)
|
||||||
|
}
|
||||||
|
if node.Parent.Type == Document || node.Parent.Type == BlockQuote {
|
||||||
|
r.cr(w)
|
||||||
|
}
|
||||||
|
if node.IsFootnotesList {
|
||||||
|
r.out(w, footnotesCloseDivBytes)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case Item:
|
||||||
|
openTag := liTag
|
||||||
|
closeTag := liCloseTag
|
||||||
|
if node.ListFlags&ListTypeDefinition != 0 {
|
||||||
|
openTag = ddTag
|
||||||
|
closeTag = ddCloseTag
|
||||||
|
}
|
||||||
|
if node.ListFlags&ListTypeTerm != 0 {
|
||||||
|
openTag = dtTag
|
||||||
|
closeTag = dtCloseTag
|
||||||
|
}
|
||||||
|
if entering {
|
||||||
|
if itemOpenCR(node) {
|
||||||
|
r.cr(w)
|
||||||
|
}
|
||||||
|
if node.ListData.RefLink != nil {
|
||||||
|
slug := slugify(node.ListData.RefLink)
|
||||||
|
r.out(w, footnoteItem(r.FootnoteAnchorPrefix, slug))
|
||||||
|
break
|
||||||
|
}
|
||||||
|
r.out(w, openTag)
|
||||||
|
} else {
|
||||||
|
if node.ListData.RefLink != nil {
|
||||||
|
slug := slugify(node.ListData.RefLink)
|
||||||
|
if r.Flags&FootnoteReturnLinks != 0 {
|
||||||
|
r.out(w, footnoteReturnLink(r.FootnoteAnchorPrefix, r.FootnoteReturnLinkContents, slug))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
r.out(w, closeTag)
|
||||||
|
r.cr(w)
|
||||||
|
}
|
||||||
|
case CodeBlock:
|
||||||
|
attrs = appendLanguageAttr(attrs, node.Info)
|
||||||
|
r.cr(w)
|
||||||
|
r.out(w, preTag)
|
||||||
|
r.tag(w, codeTag[:len(codeTag)-1], attrs)
|
||||||
|
escapeHTML(w, node.Literal)
|
||||||
|
r.out(w, codeCloseTag)
|
||||||
|
r.out(w, preCloseTag)
|
||||||
|
if node.Parent.Type != Item {
|
||||||
|
r.cr(w)
|
||||||
|
}
|
||||||
|
case Table:
|
||||||
|
if entering {
|
||||||
|
r.cr(w)
|
||||||
|
r.out(w, tableTag)
|
||||||
|
} else {
|
||||||
|
r.out(w, tableCloseTag)
|
||||||
|
r.cr(w)
|
||||||
|
}
|
||||||
|
case TableCell:
|
||||||
|
openTag := tdTag
|
||||||
|
closeTag := tdCloseTag
|
||||||
|
if node.IsHeader {
|
||||||
|
openTag = thTag
|
||||||
|
closeTag = thCloseTag
|
||||||
|
}
|
||||||
|
if entering {
|
||||||
|
align := cellAlignment(node.Align)
|
||||||
|
if align != "" {
|
||||||
|
attrs = append(attrs, fmt.Sprintf(`align="%s"`, align))
|
||||||
|
}
|
||||||
|
if node.Prev == nil {
|
||||||
|
r.cr(w)
|
||||||
|
}
|
||||||
|
r.tag(w, openTag, attrs)
|
||||||
|
} else {
|
||||||
|
r.out(w, closeTag)
|
||||||
|
r.cr(w)
|
||||||
|
}
|
||||||
|
case TableHead:
|
||||||
|
if entering {
|
||||||
|
r.cr(w)
|
||||||
|
r.out(w, theadTag)
|
||||||
|
} else {
|
||||||
|
r.out(w, theadCloseTag)
|
||||||
|
r.cr(w)
|
||||||
|
}
|
||||||
|
case TableBody:
|
||||||
|
if entering {
|
||||||
|
r.cr(w)
|
||||||
|
r.out(w, tbodyTag)
|
||||||
|
// XXX: this is to adhere to a rather silly test. Should fix test.
|
||||||
|
if node.FirstChild == nil {
|
||||||
|
r.cr(w)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
r.out(w, tbodyCloseTag)
|
||||||
|
r.cr(w)
|
||||||
|
}
|
||||||
|
case TableRow:
|
||||||
|
if entering {
|
||||||
|
r.cr(w)
|
||||||
|
r.out(w, trTag)
|
||||||
|
} else {
|
||||||
|
r.out(w, trCloseTag)
|
||||||
|
r.cr(w)
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
panic("Unknown node type " + node.Type.String())
|
||||||
|
}
|
||||||
|
return GoToNext
|
||||||
|
}
|
||||||
|
|
||||||
|
// RenderHeader writes HTML document preamble and TOC if requested.
|
||||||
|
func (r *HTMLRenderer) RenderHeader(w io.Writer, ast *Node) {
|
||||||
|
r.writeDocumentHeader(w)
|
||||||
|
if r.Flags&TOC != 0 {
|
||||||
|
r.writeTOC(w, ast)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// RenderFooter writes HTML document footer.
|
||||||
|
func (r *HTMLRenderer) RenderFooter(w io.Writer, ast *Node) {
|
||||||
|
if r.Flags&CompletePage == 0 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
io.WriteString(w, "\n</body>\n</html>\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *HTMLRenderer) writeDocumentHeader(w io.Writer) {
|
||||||
|
if r.Flags&CompletePage == 0 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
ending := ""
|
||||||
|
if r.Flags&UseXHTML != 0 {
|
||||||
|
io.WriteString(w, "<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Transitional//EN\" ")
|
||||||
|
io.WriteString(w, "\"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\">\n")
|
||||||
|
io.WriteString(w, "<html xmlns=\"http://www.w3.org/1999/xhtml\">\n")
|
||||||
|
ending = " /"
|
||||||
|
} else {
|
||||||
|
io.WriteString(w, "<!DOCTYPE html>\n")
|
||||||
|
io.WriteString(w, "<html>\n")
|
||||||
|
}
|
||||||
|
io.WriteString(w, "<head>\n")
|
||||||
|
io.WriteString(w, " <title>")
|
||||||
|
if r.Flags&Smartypants != 0 {
|
||||||
|
r.sr.Process(w, []byte(r.Title))
|
||||||
|
} else {
|
||||||
|
escapeHTML(w, []byte(r.Title))
|
||||||
|
}
|
||||||
|
io.WriteString(w, "</title>\n")
|
||||||
|
io.WriteString(w, " <meta name=\"GENERATOR\" content=\"Blackfriday Markdown Processor v")
|
||||||
|
io.WriteString(w, Version)
|
||||||
|
io.WriteString(w, "\"")
|
||||||
|
io.WriteString(w, ending)
|
||||||
|
io.WriteString(w, ">\n")
|
||||||
|
io.WriteString(w, " <meta charset=\"utf-8\"")
|
||||||
|
io.WriteString(w, ending)
|
||||||
|
io.WriteString(w, ">\n")
|
||||||
|
if r.CSS != "" {
|
||||||
|
io.WriteString(w, " <link rel=\"stylesheet\" type=\"text/css\" href=\"")
|
||||||
|
escapeHTML(w, []byte(r.CSS))
|
||||||
|
io.WriteString(w, "\"")
|
||||||
|
io.WriteString(w, ending)
|
||||||
|
io.WriteString(w, ">\n")
|
||||||
|
}
|
||||||
|
if r.Icon != "" {
|
||||||
|
io.WriteString(w, " <link rel=\"icon\" type=\"image/x-icon\" href=\"")
|
||||||
|
escapeHTML(w, []byte(r.Icon))
|
||||||
|
io.WriteString(w, "\"")
|
||||||
|
io.WriteString(w, ending)
|
||||||
|
io.WriteString(w, ">\n")
|
||||||
|
}
|
||||||
|
io.WriteString(w, "</head>\n")
|
||||||
|
io.WriteString(w, "<body>\n\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *HTMLRenderer) writeTOC(w io.Writer, ast *Node) {
|
||||||
|
buf := bytes.Buffer{}
|
||||||
|
|
||||||
|
inHeading := false
|
||||||
|
tocLevel := 0
|
||||||
|
headingCount := 0
|
||||||
|
|
||||||
|
ast.Walk(func(node *Node, entering bool) WalkStatus {
|
||||||
|
if node.Type == Heading && !node.HeadingData.IsTitleblock {
|
||||||
|
inHeading = entering
|
||||||
|
if entering {
|
||||||
|
node.HeadingID = fmt.Sprintf("toc_%d", headingCount)
|
||||||
|
if node.Level == tocLevel {
|
||||||
|
buf.WriteString("</li>\n\n<li>")
|
||||||
|
} else if node.Level < tocLevel {
|
||||||
|
for node.Level < tocLevel {
|
||||||
|
tocLevel--
|
||||||
|
buf.WriteString("</li>\n</ul>")
|
||||||
|
}
|
||||||
|
buf.WriteString("</li>\n\n<li>")
|
||||||
|
} else {
|
||||||
|
for node.Level > tocLevel {
|
||||||
|
tocLevel++
|
||||||
|
buf.WriteString("\n<ul>\n<li>")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Fprintf(&buf, `<a href="#toc_%d">`, headingCount)
|
||||||
|
headingCount++
|
||||||
|
} else {
|
||||||
|
buf.WriteString("</a>")
|
||||||
|
}
|
||||||
|
return GoToNext
|
||||||
|
}
|
||||||
|
|
||||||
|
if inHeading {
|
||||||
|
return r.RenderNode(&buf, node, entering)
|
||||||
|
}
|
||||||
|
|
||||||
|
return GoToNext
|
||||||
|
})
|
||||||
|
|
||||||
|
for ; tocLevel > 0; tocLevel-- {
|
||||||
|
buf.WriteString("</li>\n</ul>")
|
||||||
|
}
|
||||||
|
|
||||||
|
if buf.Len() > 0 {
|
||||||
|
io.WriteString(w, "<nav>\n")
|
||||||
|
w.Write(buf.Bytes())
|
||||||
|
io.WriteString(w, "\n\n</nav>\n")
|
||||||
|
}
|
||||||
|
r.lastOutputLen = buf.Len()
|
||||||
|
}
|
||||||
File diff suppressed because it is too large
Load Diff
|
|
@ -0,0 +1,950 @@
|
||||||
|
// Blackfriday Markdown Processor
|
||||||
|
// Available at http://github.com/russross/blackfriday
|
||||||
|
//
|
||||||
|
// Copyright © 2011 Russ Ross <russ@russross.com>.
|
||||||
|
// Distributed under the Simplified BSD License.
|
||||||
|
// See README.md for details.
|
||||||
|
|
||||||
|
package blackfriday
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"strings"
|
||||||
|
"unicode/utf8"
|
||||||
|
)
|
||||||
|
|
||||||
|
//
|
||||||
|
// Markdown parsing and processing
|
||||||
|
//
|
||||||
|
|
||||||
|
// Version string of the package. Appears in the rendered document when
|
||||||
|
// CompletePage flag is on.
|
||||||
|
const Version = "2.0"
|
||||||
|
|
||||||
|
// Extensions is a bitwise or'ed collection of enabled Blackfriday's
|
||||||
|
// extensions.
|
||||||
|
type Extensions int
|
||||||
|
|
||||||
|
// These are the supported markdown parsing extensions.
|
||||||
|
// OR these values together to select multiple extensions.
|
||||||
|
const (
|
||||||
|
NoExtensions Extensions = 0
|
||||||
|
NoIntraEmphasis Extensions = 1 << iota // Ignore emphasis markers inside words
|
||||||
|
Tables // Render tables
|
||||||
|
FencedCode // Render fenced code blocks
|
||||||
|
Autolink // Detect embedded URLs that are not explicitly marked
|
||||||
|
Strikethrough // Strikethrough text using ~~test~~
|
||||||
|
LaxHTMLBlocks // Loosen up HTML block parsing rules
|
||||||
|
SpaceHeadings // Be strict about prefix heading rules
|
||||||
|
HardLineBreak // Translate newlines into line breaks
|
||||||
|
TabSizeEight // Expand tabs to eight spaces instead of four
|
||||||
|
Footnotes // Pandoc-style footnotes
|
||||||
|
NoEmptyLineBeforeBlock // No need to insert an empty line to start a (code, quote, ordered list, unordered list) block
|
||||||
|
HeadingIDs // specify heading IDs with {#id}
|
||||||
|
Titleblock // Titleblock ala pandoc
|
||||||
|
AutoHeadingIDs // Create the heading ID from the text
|
||||||
|
BackslashLineBreak // Translate trailing backslashes into line breaks
|
||||||
|
DefinitionLists // Render definition lists
|
||||||
|
|
||||||
|
CommonHTMLFlags HTMLFlags = UseXHTML | Smartypants |
|
||||||
|
SmartypantsFractions | SmartypantsDashes | SmartypantsLatexDashes
|
||||||
|
|
||||||
|
CommonExtensions Extensions = NoIntraEmphasis | Tables | FencedCode |
|
||||||
|
Autolink | Strikethrough | SpaceHeadings | HeadingIDs |
|
||||||
|
BackslashLineBreak | DefinitionLists
|
||||||
|
)
|
||||||
|
|
||||||
|
// ListType contains bitwise or'ed flags for list and list item objects.
|
||||||
|
type ListType int
|
||||||
|
|
||||||
|
// These are the possible flag values for the ListItem renderer.
|
||||||
|
// Multiple flag values may be ORed together.
|
||||||
|
// These are mostly of interest if you are writing a new output format.
|
||||||
|
const (
|
||||||
|
ListTypeOrdered ListType = 1 << iota
|
||||||
|
ListTypeDefinition
|
||||||
|
ListTypeTerm
|
||||||
|
|
||||||
|
ListItemContainsBlock
|
||||||
|
ListItemBeginningOfList // TODO: figure out if this is of any use now
|
||||||
|
ListItemEndOfList
|
||||||
|
)
|
||||||
|
|
||||||
|
// CellAlignFlags holds a type of alignment in a table cell.
|
||||||
|
type CellAlignFlags int
|
||||||
|
|
||||||
|
// These are the possible flag values for the table cell renderer.
|
||||||
|
// Only a single one of these values will be used; they are not ORed together.
|
||||||
|
// These are mostly of interest if you are writing a new output format.
|
||||||
|
const (
|
||||||
|
TableAlignmentLeft CellAlignFlags = 1 << iota
|
||||||
|
TableAlignmentRight
|
||||||
|
TableAlignmentCenter = (TableAlignmentLeft | TableAlignmentRight)
|
||||||
|
)
|
||||||
|
|
||||||
|
// The size of a tab stop.
|
||||||
|
const (
|
||||||
|
TabSizeDefault = 4
|
||||||
|
TabSizeDouble = 8
|
||||||
|
)
|
||||||
|
|
||||||
|
// blockTags is a set of tags that are recognized as HTML block tags.
|
||||||
|
// Any of these can be included in markdown text without special escaping.
|
||||||
|
var blockTags = map[string]struct{}{
|
||||||
|
"blockquote": {},
|
||||||
|
"del": {},
|
||||||
|
"div": {},
|
||||||
|
"dl": {},
|
||||||
|
"fieldset": {},
|
||||||
|
"form": {},
|
||||||
|
"h1": {},
|
||||||
|
"h2": {},
|
||||||
|
"h3": {},
|
||||||
|
"h4": {},
|
||||||
|
"h5": {},
|
||||||
|
"h6": {},
|
||||||
|
"iframe": {},
|
||||||
|
"ins": {},
|
||||||
|
"math": {},
|
||||||
|
"noscript": {},
|
||||||
|
"ol": {},
|
||||||
|
"pre": {},
|
||||||
|
"p": {},
|
||||||
|
"script": {},
|
||||||
|
"style": {},
|
||||||
|
"table": {},
|
||||||
|
"ul": {},
|
||||||
|
|
||||||
|
// HTML5
|
||||||
|
"address": {},
|
||||||
|
"article": {},
|
||||||
|
"aside": {},
|
||||||
|
"canvas": {},
|
||||||
|
"figcaption": {},
|
||||||
|
"figure": {},
|
||||||
|
"footer": {},
|
||||||
|
"header": {},
|
||||||
|
"hgroup": {},
|
||||||
|
"main": {},
|
||||||
|
"nav": {},
|
||||||
|
"output": {},
|
||||||
|
"progress": {},
|
||||||
|
"section": {},
|
||||||
|
"video": {},
|
||||||
|
}
|
||||||
|
|
||||||
|
// Renderer is the rendering interface. This is mostly of interest if you are
|
||||||
|
// implementing a new rendering format.
|
||||||
|
//
|
||||||
|
// Only an HTML implementation is provided in this repository, see the README
|
||||||
|
// for external implementations.
|
||||||
|
type Renderer interface {
|
||||||
|
// RenderNode is the main rendering method. It will be called once for
|
||||||
|
// every leaf node and twice for every non-leaf node (first with
|
||||||
|
// entering=true, then with entering=false). The method should write its
|
||||||
|
// rendition of the node to the supplied writer w.
|
||||||
|
RenderNode(w io.Writer, node *Node, entering bool) WalkStatus
|
||||||
|
|
||||||
|
// RenderHeader is a method that allows the renderer to produce some
|
||||||
|
// content preceding the main body of the output document. The header is
|
||||||
|
// understood in the broad sense here. For example, the default HTML
|
||||||
|
// renderer will write not only the HTML document preamble, but also the
|
||||||
|
// table of contents if it was requested.
|
||||||
|
//
|
||||||
|
// The method will be passed an entire document tree, in case a particular
|
||||||
|
// implementation needs to inspect it to produce output.
|
||||||
|
//
|
||||||
|
// The output should be written to the supplied writer w. If your
|
||||||
|
// implementation has no header to write, supply an empty implementation.
|
||||||
|
RenderHeader(w io.Writer, ast *Node)
|
||||||
|
|
||||||
|
// RenderFooter is a symmetric counterpart of RenderHeader.
|
||||||
|
RenderFooter(w io.Writer, ast *Node)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Callback functions for inline parsing. One such function is defined
|
||||||
|
// for each character that triggers a response when parsing inline data.
|
||||||
|
type inlineParser func(p *Markdown, data []byte, offset int) (int, *Node)
|
||||||
|
|
||||||
|
// Markdown is a type that holds extensions and the runtime state used by
|
||||||
|
// Parse, and the renderer. You can not use it directly, construct it with New.
|
||||||
|
type Markdown struct {
|
||||||
|
renderer Renderer
|
||||||
|
referenceOverride ReferenceOverrideFunc
|
||||||
|
refs map[string]*reference
|
||||||
|
inlineCallback [256]inlineParser
|
||||||
|
extensions Extensions
|
||||||
|
nesting int
|
||||||
|
maxNesting int
|
||||||
|
insideLink bool
|
||||||
|
|
||||||
|
// Footnotes need to be ordered as well as available to quickly check for
|
||||||
|
// presence. If a ref is also a footnote, it's stored both in refs and here
|
||||||
|
// in notes. Slice is nil if footnotes not enabled.
|
||||||
|
notes []*reference
|
||||||
|
|
||||||
|
doc *Node
|
||||||
|
tip *Node // = doc
|
||||||
|
oldTip *Node
|
||||||
|
lastMatchedContainer *Node // = doc
|
||||||
|
allClosed bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *Markdown) getRef(refid string) (ref *reference, found bool) {
|
||||||
|
if p.referenceOverride != nil {
|
||||||
|
r, overridden := p.referenceOverride(refid)
|
||||||
|
if overridden {
|
||||||
|
if r == nil {
|
||||||
|
return nil, false
|
||||||
|
}
|
||||||
|
return &reference{
|
||||||
|
link: []byte(r.Link),
|
||||||
|
title: []byte(r.Title),
|
||||||
|
noteID: 0,
|
||||||
|
hasBlock: false,
|
||||||
|
text: []byte(r.Text)}, true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// refs are case insensitive
|
||||||
|
ref, found = p.refs[strings.ToLower(refid)]
|
||||||
|
return ref, found
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *Markdown) finalize(block *Node) {
|
||||||
|
above := block.Parent
|
||||||
|
block.open = false
|
||||||
|
p.tip = above
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *Markdown) addChild(node NodeType, offset uint32) *Node {
|
||||||
|
return p.addExistingChild(NewNode(node), offset)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *Markdown) addExistingChild(node *Node, offset uint32) *Node {
|
||||||
|
for !p.tip.canContain(node.Type) {
|
||||||
|
p.finalize(p.tip)
|
||||||
|
}
|
||||||
|
p.tip.AppendChild(node)
|
||||||
|
p.tip = node
|
||||||
|
return node
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *Markdown) closeUnmatchedBlocks() {
|
||||||
|
if !p.allClosed {
|
||||||
|
for p.oldTip != p.lastMatchedContainer {
|
||||||
|
parent := p.oldTip.Parent
|
||||||
|
p.finalize(p.oldTip)
|
||||||
|
p.oldTip = parent
|
||||||
|
}
|
||||||
|
p.allClosed = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
//
|
||||||
|
// Public interface
|
||||||
|
//
|
||||||
|
//
|
||||||
|
|
||||||
|
// Reference represents the details of a link.
|
||||||
|
// See the documentation in Options for more details on use-case.
|
||||||
|
type Reference struct {
|
||||||
|
// Link is usually the URL the reference points to.
|
||||||
|
Link string
|
||||||
|
// Title is the alternate text describing the link in more detail.
|
||||||
|
Title string
|
||||||
|
// Text is the optional text to override the ref with if the syntax used was
|
||||||
|
// [refid][]
|
||||||
|
Text string
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReferenceOverrideFunc is expected to be called with a reference string and
|
||||||
|
// return either a valid Reference type that the reference string maps to or
|
||||||
|
// nil. If overridden is false, the default reference logic will be executed.
|
||||||
|
// See the documentation in Options for more details on use-case.
|
||||||
|
type ReferenceOverrideFunc func(reference string) (ref *Reference, overridden bool)
|
||||||
|
|
||||||
|
// New constructs a Markdown processor. You can use the same With* functions as
|
||||||
|
// for Run() to customize parser's behavior and the renderer.
|
||||||
|
func New(opts ...Option) *Markdown {
|
||||||
|
var p Markdown
|
||||||
|
for _, opt := range opts {
|
||||||
|
opt(&p)
|
||||||
|
}
|
||||||
|
p.refs = make(map[string]*reference)
|
||||||
|
p.maxNesting = 16
|
||||||
|
p.insideLink = false
|
||||||
|
docNode := NewNode(Document)
|
||||||
|
p.doc = docNode
|
||||||
|
p.tip = docNode
|
||||||
|
p.oldTip = docNode
|
||||||
|
p.lastMatchedContainer = docNode
|
||||||
|
p.allClosed = true
|
||||||
|
// register inline parsers
|
||||||
|
p.inlineCallback[' '] = maybeLineBreak
|
||||||
|
p.inlineCallback['*'] = emphasis
|
||||||
|
p.inlineCallback['_'] = emphasis
|
||||||
|
if p.extensions&Strikethrough != 0 {
|
||||||
|
p.inlineCallback['~'] = emphasis
|
||||||
|
}
|
||||||
|
p.inlineCallback['`'] = codeSpan
|
||||||
|
p.inlineCallback['\n'] = lineBreak
|
||||||
|
p.inlineCallback['['] = link
|
||||||
|
p.inlineCallback['<'] = leftAngle
|
||||||
|
p.inlineCallback['\\'] = escape
|
||||||
|
p.inlineCallback['&'] = entity
|
||||||
|
p.inlineCallback['!'] = maybeImage
|
||||||
|
p.inlineCallback['^'] = maybeInlineFootnote
|
||||||
|
if p.extensions&Autolink != 0 {
|
||||||
|
p.inlineCallback['h'] = maybeAutoLink
|
||||||
|
p.inlineCallback['m'] = maybeAutoLink
|
||||||
|
p.inlineCallback['f'] = maybeAutoLink
|
||||||
|
p.inlineCallback['H'] = maybeAutoLink
|
||||||
|
p.inlineCallback['M'] = maybeAutoLink
|
||||||
|
p.inlineCallback['F'] = maybeAutoLink
|
||||||
|
}
|
||||||
|
if p.extensions&Footnotes != 0 {
|
||||||
|
p.notes = make([]*reference, 0)
|
||||||
|
}
|
||||||
|
return &p
|
||||||
|
}
|
||||||
|
|
||||||
|
// Option customizes the Markdown processor's default behavior.
|
||||||
|
type Option func(*Markdown)
|
||||||
|
|
||||||
|
// WithRenderer allows you to override the default renderer.
|
||||||
|
func WithRenderer(r Renderer) Option {
|
||||||
|
return func(p *Markdown) {
|
||||||
|
p.renderer = r
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// WithExtensions allows you to pick some of the many extensions provided by
|
||||||
|
// Blackfriday. You can bitwise OR them.
|
||||||
|
func WithExtensions(e Extensions) Option {
|
||||||
|
return func(p *Markdown) {
|
||||||
|
p.extensions = e
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// WithNoExtensions turns off all extensions and custom behavior.
|
||||||
|
func WithNoExtensions() Option {
|
||||||
|
return func(p *Markdown) {
|
||||||
|
p.extensions = NoExtensions
|
||||||
|
p.renderer = NewHTMLRenderer(HTMLRendererParameters{
|
||||||
|
Flags: HTMLFlagsNone,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// WithRefOverride sets an optional function callback that is called every
|
||||||
|
// time a reference is resolved.
|
||||||
|
//
|
||||||
|
// In Markdown, the link reference syntax can be made to resolve a link to
|
||||||
|
// a reference instead of an inline URL, in one of the following ways:
|
||||||
|
//
|
||||||
|
// * [link text][refid]
|
||||||
|
// * [refid][]
|
||||||
|
//
|
||||||
|
// Usually, the refid is defined at the bottom of the Markdown document. If
|
||||||
|
// this override function is provided, the refid is passed to the override
|
||||||
|
// function first, before consulting the defined refids at the bottom. If
|
||||||
|
// the override function indicates an override did not occur, the refids at
|
||||||
|
// the bottom will be used to fill in the link details.
|
||||||
|
func WithRefOverride(o ReferenceOverrideFunc) Option {
|
||||||
|
return func(p *Markdown) {
|
||||||
|
p.referenceOverride = o
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Run is the main entry point to Blackfriday. It parses and renders a
|
||||||
|
// block of markdown-encoded text.
|
||||||
|
//
|
||||||
|
// The simplest invocation of Run takes one argument, input:
|
||||||
|
// output := Run(input)
|
||||||
|
// This will parse the input with CommonExtensions enabled and render it with
|
||||||
|
// the default HTMLRenderer (with CommonHTMLFlags).
|
||||||
|
//
|
||||||
|
// Variadic arguments opts can customize the default behavior. Since Markdown
|
||||||
|
// type does not contain exported fields, you can not use it directly. Instead,
|
||||||
|
// use the With* functions. For example, this will call the most basic
|
||||||
|
// functionality, with no extensions:
|
||||||
|
// output := Run(input, WithNoExtensions())
|
||||||
|
//
|
||||||
|
// You can use any number of With* arguments, even contradicting ones. They
|
||||||
|
// will be applied in order of appearance and the latter will override the
|
||||||
|
// former:
|
||||||
|
// output := Run(input, WithNoExtensions(), WithExtensions(exts),
|
||||||
|
// WithRenderer(yourRenderer))
|
||||||
|
func Run(input []byte, opts ...Option) []byte {
|
||||||
|
r := NewHTMLRenderer(HTMLRendererParameters{
|
||||||
|
Flags: CommonHTMLFlags,
|
||||||
|
})
|
||||||
|
optList := []Option{WithRenderer(r), WithExtensions(CommonExtensions)}
|
||||||
|
optList = append(optList, opts...)
|
||||||
|
parser := New(optList...)
|
||||||
|
ast := parser.Parse(input)
|
||||||
|
var buf bytes.Buffer
|
||||||
|
parser.renderer.RenderHeader(&buf, ast)
|
||||||
|
ast.Walk(func(node *Node, entering bool) WalkStatus {
|
||||||
|
return parser.renderer.RenderNode(&buf, node, entering)
|
||||||
|
})
|
||||||
|
parser.renderer.RenderFooter(&buf, ast)
|
||||||
|
return buf.Bytes()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse is an entry point to the parsing part of Blackfriday. It takes an
|
||||||
|
// input markdown document and produces a syntax tree for its contents. This
|
||||||
|
// tree can then be rendered with a default or custom renderer, or
|
||||||
|
// analyzed/transformed by the caller to whatever non-standard needs they have.
|
||||||
|
// The return value is the root node of the syntax tree.
|
||||||
|
func (p *Markdown) Parse(input []byte) *Node {
|
||||||
|
p.block(input)
|
||||||
|
// Walk the tree and finish up some of unfinished blocks
|
||||||
|
for p.tip != nil {
|
||||||
|
p.finalize(p.tip)
|
||||||
|
}
|
||||||
|
// Walk the tree again and process inline markdown in each block
|
||||||
|
p.doc.Walk(func(node *Node, entering bool) WalkStatus {
|
||||||
|
if node.Type == Paragraph || node.Type == Heading || node.Type == TableCell {
|
||||||
|
p.inline(node, node.content)
|
||||||
|
node.content = nil
|
||||||
|
}
|
||||||
|
return GoToNext
|
||||||
|
})
|
||||||
|
p.parseRefsToAST()
|
||||||
|
return p.doc
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *Markdown) parseRefsToAST() {
|
||||||
|
if p.extensions&Footnotes == 0 || len(p.notes) == 0 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
p.tip = p.doc
|
||||||
|
block := p.addBlock(List, nil)
|
||||||
|
block.IsFootnotesList = true
|
||||||
|
block.ListFlags = ListTypeOrdered
|
||||||
|
flags := ListItemBeginningOfList
|
||||||
|
// Note: this loop is intentionally explicit, not range-form. This is
|
||||||
|
// because the body of the loop will append nested footnotes to p.notes and
|
||||||
|
// we need to process those late additions. Range form would only walk over
|
||||||
|
// the fixed initial set.
|
||||||
|
for i := 0; i < len(p.notes); i++ {
|
||||||
|
ref := p.notes[i]
|
||||||
|
p.addExistingChild(ref.footnote, 0)
|
||||||
|
block := ref.footnote
|
||||||
|
block.ListFlags = flags | ListTypeOrdered
|
||||||
|
block.RefLink = ref.link
|
||||||
|
if ref.hasBlock {
|
||||||
|
flags |= ListItemContainsBlock
|
||||||
|
p.block(ref.title)
|
||||||
|
} else {
|
||||||
|
p.inline(block, ref.title)
|
||||||
|
}
|
||||||
|
flags &^= ListItemBeginningOfList | ListItemContainsBlock
|
||||||
|
}
|
||||||
|
above := block.Parent
|
||||||
|
finalizeList(block)
|
||||||
|
p.tip = above
|
||||||
|
block.Walk(func(node *Node, entering bool) WalkStatus {
|
||||||
|
if node.Type == Paragraph || node.Type == Heading {
|
||||||
|
p.inline(node, node.content)
|
||||||
|
node.content = nil
|
||||||
|
}
|
||||||
|
return GoToNext
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Link references
|
||||||
|
//
|
||||||
|
// This section implements support for references that (usually) appear
|
||||||
|
// as footnotes in a document, and can be referenced anywhere in the document.
|
||||||
|
// The basic format is:
|
||||||
|
//
|
||||||
|
// [1]: http://www.google.com/ "Google"
|
||||||
|
// [2]: http://www.github.com/ "Github"
|
||||||
|
//
|
||||||
|
// Anywhere in the document, the reference can be linked by referring to its
|
||||||
|
// label, i.e., 1 and 2 in this example, as in:
|
||||||
|
//
|
||||||
|
// This library is hosted on [Github][2], a git hosting site.
|
||||||
|
//
|
||||||
|
// Actual footnotes as specified in Pandoc and supported by some other Markdown
|
||||||
|
// libraries such as php-markdown are also taken care of. They look like this:
|
||||||
|
//
|
||||||
|
// This sentence needs a bit of further explanation.[^note]
|
||||||
|
//
|
||||||
|
// [^note]: This is the explanation.
|
||||||
|
//
|
||||||
|
// Footnotes should be placed at the end of the document in an ordered list.
|
||||||
|
// Finally, there are inline footnotes such as:
|
||||||
|
//
|
||||||
|
// Inline footnotes^[Also supported.] provide a quick inline explanation,
|
||||||
|
// but are rendered at the bottom of the document.
|
||||||
|
//
|
||||||
|
|
||||||
|
// reference holds all information necessary for a reference-style links or
|
||||||
|
// footnotes.
|
||||||
|
//
|
||||||
|
// Consider this markdown with reference-style links:
|
||||||
|
//
|
||||||
|
// [link][ref]
|
||||||
|
//
|
||||||
|
// [ref]: /url/ "tooltip title"
|
||||||
|
//
|
||||||
|
// It will be ultimately converted to this HTML:
|
||||||
|
//
|
||||||
|
// <p><a href=\"/url/\" title=\"title\">link</a></p>
|
||||||
|
//
|
||||||
|
// And a reference structure will be populated as follows:
|
||||||
|
//
|
||||||
|
// p.refs["ref"] = &reference{
|
||||||
|
// link: "/url/",
|
||||||
|
// title: "tooltip title",
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// Alternatively, reference can contain information about a footnote. Consider
|
||||||
|
// this markdown:
|
||||||
|
//
|
||||||
|
// Text needing a footnote.[^a]
|
||||||
|
//
|
||||||
|
// [^a]: This is the note
|
||||||
|
//
|
||||||
|
// A reference structure will be populated as follows:
|
||||||
|
//
|
||||||
|
// p.refs["a"] = &reference{
|
||||||
|
// link: "a",
|
||||||
|
// title: "This is the note",
|
||||||
|
// noteID: <some positive int>,
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// TODO: As you can see, it begs for splitting into two dedicated structures
|
||||||
|
// for refs and for footnotes.
|
||||||
|
type reference struct {
|
||||||
|
link []byte
|
||||||
|
title []byte
|
||||||
|
noteID int // 0 if not a footnote ref
|
||||||
|
hasBlock bool
|
||||||
|
footnote *Node // a link to the Item node within a list of footnotes
|
||||||
|
|
||||||
|
text []byte // only gets populated by refOverride feature with Reference.Text
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *reference) String() string {
|
||||||
|
return fmt.Sprintf("{link: %q, title: %q, text: %q, noteID: %d, hasBlock: %v}",
|
||||||
|
r.link, r.title, r.text, r.noteID, r.hasBlock)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check whether or not data starts with a reference link.
|
||||||
|
// If so, it is parsed and stored in the list of references
|
||||||
|
// (in the render struct).
|
||||||
|
// Returns the number of bytes to skip to move past it,
|
||||||
|
// or zero if the first line is not a reference.
|
||||||
|
func isReference(p *Markdown, data []byte, tabSize int) int {
|
||||||
|
// up to 3 optional leading spaces
|
||||||
|
if len(data) < 4 {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
i := 0
|
||||||
|
for i < 3 && data[i] == ' ' {
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
|
||||||
|
noteID := 0
|
||||||
|
|
||||||
|
// id part: anything but a newline between brackets
|
||||||
|
if data[i] != '[' {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
i++
|
||||||
|
if p.extensions&Footnotes != 0 {
|
||||||
|
if i < len(data) && data[i] == '^' {
|
||||||
|
// we can set it to anything here because the proper noteIds will
|
||||||
|
// be assigned later during the second pass. It just has to be != 0
|
||||||
|
noteID = 1
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
idOffset := i
|
||||||
|
for i < len(data) && data[i] != '\n' && data[i] != '\r' && data[i] != ']' {
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
if i >= len(data) || data[i] != ']' {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
idEnd := i
|
||||||
|
// footnotes can have empty ID, like this: [^], but a reference can not be
|
||||||
|
// empty like this: []. Break early if it's not a footnote and there's no ID
|
||||||
|
if noteID == 0 && idOffset == idEnd {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
// spacer: colon (space | tab)* newline? (space | tab)*
|
||||||
|
i++
|
||||||
|
if i >= len(data) || data[i] != ':' {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
i++
|
||||||
|
for i < len(data) && (data[i] == ' ' || data[i] == '\t') {
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
if i < len(data) && (data[i] == '\n' || data[i] == '\r') {
|
||||||
|
i++
|
||||||
|
if i < len(data) && data[i] == '\n' && data[i-1] == '\r' {
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for i < len(data) && (data[i] == ' ' || data[i] == '\t') {
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
if i >= len(data) {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
linkOffset, linkEnd int
|
||||||
|
titleOffset, titleEnd int
|
||||||
|
lineEnd int
|
||||||
|
raw []byte
|
||||||
|
hasBlock bool
|
||||||
|
)
|
||||||
|
|
||||||
|
if p.extensions&Footnotes != 0 && noteID != 0 {
|
||||||
|
linkOffset, linkEnd, raw, hasBlock = scanFootnote(p, data, i, tabSize)
|
||||||
|
lineEnd = linkEnd
|
||||||
|
} else {
|
||||||
|
linkOffset, linkEnd, titleOffset, titleEnd, lineEnd = scanLinkRef(p, data, i)
|
||||||
|
}
|
||||||
|
if lineEnd == 0 {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
// a valid ref has been found
|
||||||
|
|
||||||
|
ref := &reference{
|
||||||
|
noteID: noteID,
|
||||||
|
hasBlock: hasBlock,
|
||||||
|
}
|
||||||
|
|
||||||
|
if noteID > 0 {
|
||||||
|
// reusing the link field for the id since footnotes don't have links
|
||||||
|
ref.link = data[idOffset:idEnd]
|
||||||
|
// if footnote, it's not really a title, it's the contained text
|
||||||
|
ref.title = raw
|
||||||
|
} else {
|
||||||
|
ref.link = data[linkOffset:linkEnd]
|
||||||
|
ref.title = data[titleOffset:titleEnd]
|
||||||
|
}
|
||||||
|
|
||||||
|
// id matches are case-insensitive
|
||||||
|
id := string(bytes.ToLower(data[idOffset:idEnd]))
|
||||||
|
|
||||||
|
p.refs[id] = ref
|
||||||
|
|
||||||
|
return lineEnd
|
||||||
|
}
|
||||||
|
|
||||||
|
func scanLinkRef(p *Markdown, data []byte, i int) (linkOffset, linkEnd, titleOffset, titleEnd, lineEnd int) {
|
||||||
|
// link: whitespace-free sequence, optionally between angle brackets
|
||||||
|
if data[i] == '<' {
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
linkOffset = i
|
||||||
|
for i < len(data) && data[i] != ' ' && data[i] != '\t' && data[i] != '\n' && data[i] != '\r' {
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
linkEnd = i
|
||||||
|
if data[linkOffset] == '<' && data[linkEnd-1] == '>' {
|
||||||
|
linkOffset++
|
||||||
|
linkEnd--
|
||||||
|
}
|
||||||
|
|
||||||
|
// optional spacer: (space | tab)* (newline | '\'' | '"' | '(' )
|
||||||
|
for i < len(data) && (data[i] == ' ' || data[i] == '\t') {
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
if i < len(data) && data[i] != '\n' && data[i] != '\r' && data[i] != '\'' && data[i] != '"' && data[i] != '(' {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// compute end-of-line
|
||||||
|
if i >= len(data) || data[i] == '\r' || data[i] == '\n' {
|
||||||
|
lineEnd = i
|
||||||
|
}
|
||||||
|
if i+1 < len(data) && data[i] == '\r' && data[i+1] == '\n' {
|
||||||
|
lineEnd++
|
||||||
|
}
|
||||||
|
|
||||||
|
// optional (space|tab)* spacer after a newline
|
||||||
|
if lineEnd > 0 {
|
||||||
|
i = lineEnd + 1
|
||||||
|
for i < len(data) && (data[i] == ' ' || data[i] == '\t') {
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// optional title: any non-newline sequence enclosed in '"() alone on its line
|
||||||
|
if i+1 < len(data) && (data[i] == '\'' || data[i] == '"' || data[i] == '(') {
|
||||||
|
i++
|
||||||
|
titleOffset = i
|
||||||
|
|
||||||
|
// look for EOL
|
||||||
|
for i < len(data) && data[i] != '\n' && data[i] != '\r' {
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
if i+1 < len(data) && data[i] == '\n' && data[i+1] == '\r' {
|
||||||
|
titleEnd = i + 1
|
||||||
|
} else {
|
||||||
|
titleEnd = i
|
||||||
|
}
|
||||||
|
|
||||||
|
// step back
|
||||||
|
i--
|
||||||
|
for i > titleOffset && (data[i] == ' ' || data[i] == '\t') {
|
||||||
|
i--
|
||||||
|
}
|
||||||
|
if i > titleOffset && (data[i] == '\'' || data[i] == '"' || data[i] == ')') {
|
||||||
|
lineEnd = titleEnd
|
||||||
|
titleEnd = i
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// The first bit of this logic is the same as Parser.listItem, but the rest
|
||||||
|
// is much simpler. This function simply finds the entire block and shifts it
|
||||||
|
// over by one tab if it is indeed a block (just returns the line if it's not).
|
||||||
|
// blockEnd is the end of the section in the input buffer, and contents is the
|
||||||
|
// extracted text that was shifted over one tab. It will need to be rendered at
|
||||||
|
// the end of the document.
|
||||||
|
func scanFootnote(p *Markdown, data []byte, i, indentSize int) (blockStart, blockEnd int, contents []byte, hasBlock bool) {
|
||||||
|
if i == 0 || len(data) == 0 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// skip leading whitespace on first line
|
||||||
|
for i < len(data) && data[i] == ' ' {
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
|
||||||
|
blockStart = i
|
||||||
|
|
||||||
|
// find the end of the line
|
||||||
|
blockEnd = i
|
||||||
|
for i < len(data) && data[i-1] != '\n' {
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
|
||||||
|
// get working buffer
|
||||||
|
var raw bytes.Buffer
|
||||||
|
|
||||||
|
// put the first line into the working buffer
|
||||||
|
raw.Write(data[blockEnd:i])
|
||||||
|
blockEnd = i
|
||||||
|
|
||||||
|
// process the following lines
|
||||||
|
containsBlankLine := false
|
||||||
|
|
||||||
|
gatherLines:
|
||||||
|
for blockEnd < len(data) {
|
||||||
|
i++
|
||||||
|
|
||||||
|
// find the end of this line
|
||||||
|
for i < len(data) && data[i-1] != '\n' {
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
|
||||||
|
// if it is an empty line, guess that it is part of this item
|
||||||
|
// and move on to the next line
|
||||||
|
if p.isEmpty(data[blockEnd:i]) > 0 {
|
||||||
|
containsBlankLine = true
|
||||||
|
blockEnd = i
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
n := 0
|
||||||
|
if n = isIndented(data[blockEnd:i], indentSize); n == 0 {
|
||||||
|
// this is the end of the block.
|
||||||
|
// we don't want to include this last line in the index.
|
||||||
|
break gatherLines
|
||||||
|
}
|
||||||
|
|
||||||
|
// if there were blank lines before this one, insert a new one now
|
||||||
|
if containsBlankLine {
|
||||||
|
raw.WriteByte('\n')
|
||||||
|
containsBlankLine = false
|
||||||
|
}
|
||||||
|
|
||||||
|
// get rid of that first tab, write to buffer
|
||||||
|
raw.Write(data[blockEnd+n : i])
|
||||||
|
hasBlock = true
|
||||||
|
|
||||||
|
blockEnd = i
|
||||||
|
}
|
||||||
|
|
||||||
|
if data[blockEnd-1] != '\n' {
|
||||||
|
raw.WriteByte('\n')
|
||||||
|
}
|
||||||
|
|
||||||
|
contents = raw.Bytes()
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
//
|
||||||
|
// Miscellaneous helper functions
|
||||||
|
//
|
||||||
|
//
|
||||||
|
|
||||||
|
// Test if a character is a punctuation symbol.
|
||||||
|
// Taken from a private function in regexp in the stdlib.
|
||||||
|
func ispunct(c byte) bool {
|
||||||
|
for _, r := range []byte("!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~") {
|
||||||
|
if c == r {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test if a character is a whitespace character.
|
||||||
|
func isspace(c byte) bool {
|
||||||
|
return ishorizontalspace(c) || isverticalspace(c)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test if a character is a horizontal whitespace character.
|
||||||
|
func ishorizontalspace(c byte) bool {
|
||||||
|
return c == ' ' || c == '\t'
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test if a character is a vertical character.
|
||||||
|
func isverticalspace(c byte) bool {
|
||||||
|
return c == '\n' || c == '\r' || c == '\f' || c == '\v'
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test if a character is letter.
|
||||||
|
func isletter(c byte) bool {
|
||||||
|
return (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z')
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test if a character is a letter or a digit.
|
||||||
|
// TODO: check when this is looking for ASCII alnum and when it should use unicode
|
||||||
|
func isalnum(c byte) bool {
|
||||||
|
return (c >= '0' && c <= '9') || isletter(c)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Replace tab characters with spaces, aligning to the next TAB_SIZE column.
|
||||||
|
// always ends output with a newline
|
||||||
|
func expandTabs(out *bytes.Buffer, line []byte, tabSize int) {
|
||||||
|
// first, check for common cases: no tabs, or only tabs at beginning of line
|
||||||
|
i, prefix := 0, 0
|
||||||
|
slowcase := false
|
||||||
|
for i = 0; i < len(line); i++ {
|
||||||
|
if line[i] == '\t' {
|
||||||
|
if prefix == i {
|
||||||
|
prefix++
|
||||||
|
} else {
|
||||||
|
slowcase = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// no need to decode runes if all tabs are at the beginning of the line
|
||||||
|
if !slowcase {
|
||||||
|
for i = 0; i < prefix*tabSize; i++ {
|
||||||
|
out.WriteByte(' ')
|
||||||
|
}
|
||||||
|
out.Write(line[prefix:])
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// the slow case: we need to count runes to figure out how
|
||||||
|
// many spaces to insert for each tab
|
||||||
|
column := 0
|
||||||
|
i = 0
|
||||||
|
for i < len(line) {
|
||||||
|
start := i
|
||||||
|
for i < len(line) && line[i] != '\t' {
|
||||||
|
_, size := utf8.DecodeRune(line[i:])
|
||||||
|
i += size
|
||||||
|
column++
|
||||||
|
}
|
||||||
|
|
||||||
|
if i > start {
|
||||||
|
out.Write(line[start:i])
|
||||||
|
}
|
||||||
|
|
||||||
|
if i >= len(line) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
for {
|
||||||
|
out.WriteByte(' ')
|
||||||
|
column++
|
||||||
|
if column%tabSize == 0 {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find if a line counts as indented or not.
|
||||||
|
// Returns number of characters the indent is (0 = not indented).
|
||||||
|
func isIndented(data []byte, indentSize int) int {
|
||||||
|
if len(data) == 0 {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
if data[0] == '\t' {
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
if len(data) < indentSize {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
for i := 0; i < indentSize; i++ {
|
||||||
|
if data[i] != ' ' {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return indentSize
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a url-safe slug for fragments
|
||||||
|
func slugify(in []byte) []byte {
|
||||||
|
if len(in) == 0 {
|
||||||
|
return in
|
||||||
|
}
|
||||||
|
out := make([]byte, 0, len(in))
|
||||||
|
sym := false
|
||||||
|
|
||||||
|
for _, ch := range in {
|
||||||
|
if isalnum(ch) {
|
||||||
|
sym = false
|
||||||
|
out = append(out, ch)
|
||||||
|
} else if sym {
|
||||||
|
continue
|
||||||
|
} else {
|
||||||
|
out = append(out, '-')
|
||||||
|
sym = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
var a, b int
|
||||||
|
var ch byte
|
||||||
|
for a, ch = range out {
|
||||||
|
if ch != '-' {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for b = len(out) - 1; b > 0; b-- {
|
||||||
|
if out[b] != '-' {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return out[a : b+1]
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,354 @@
|
||||||
|
package blackfriday
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"fmt"
|
||||||
|
)
|
||||||
|
|
||||||
|
// NodeType specifies a type of a single node of a syntax tree. Usually one
|
||||||
|
// node (and its type) corresponds to a single markdown feature, e.g. emphasis
|
||||||
|
// or code block.
|
||||||
|
type NodeType int
|
||||||
|
|
||||||
|
// Constants for identifying different types of nodes. See NodeType.
|
||||||
|
const (
|
||||||
|
Document NodeType = iota
|
||||||
|
BlockQuote
|
||||||
|
List
|
||||||
|
Item
|
||||||
|
Paragraph
|
||||||
|
Heading
|
||||||
|
HorizontalRule
|
||||||
|
Emph
|
||||||
|
Strong
|
||||||
|
Del
|
||||||
|
Link
|
||||||
|
Image
|
||||||
|
Text
|
||||||
|
HTMLBlock
|
||||||
|
CodeBlock
|
||||||
|
Softbreak
|
||||||
|
Hardbreak
|
||||||
|
Code
|
||||||
|
HTMLSpan
|
||||||
|
Table
|
||||||
|
TableCell
|
||||||
|
TableHead
|
||||||
|
TableBody
|
||||||
|
TableRow
|
||||||
|
)
|
||||||
|
|
||||||
|
var nodeTypeNames = []string{
|
||||||
|
Document: "Document",
|
||||||
|
BlockQuote: "BlockQuote",
|
||||||
|
List: "List",
|
||||||
|
Item: "Item",
|
||||||
|
Paragraph: "Paragraph",
|
||||||
|
Heading: "Heading",
|
||||||
|
HorizontalRule: "HorizontalRule",
|
||||||
|
Emph: "Emph",
|
||||||
|
Strong: "Strong",
|
||||||
|
Del: "Del",
|
||||||
|
Link: "Link",
|
||||||
|
Image: "Image",
|
||||||
|
Text: "Text",
|
||||||
|
HTMLBlock: "HTMLBlock",
|
||||||
|
CodeBlock: "CodeBlock",
|
||||||
|
Softbreak: "Softbreak",
|
||||||
|
Hardbreak: "Hardbreak",
|
||||||
|
Code: "Code",
|
||||||
|
HTMLSpan: "HTMLSpan",
|
||||||
|
Table: "Table",
|
||||||
|
TableCell: "TableCell",
|
||||||
|
TableHead: "TableHead",
|
||||||
|
TableBody: "TableBody",
|
||||||
|
TableRow: "TableRow",
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t NodeType) String() string {
|
||||||
|
return nodeTypeNames[t]
|
||||||
|
}
|
||||||
|
|
||||||
|
// ListData contains fields relevant to a List and Item node type.
|
||||||
|
type ListData struct {
|
||||||
|
ListFlags ListType
|
||||||
|
Tight bool // Skip <p>s around list item data if true
|
||||||
|
BulletChar byte // '*', '+' or '-' in bullet lists
|
||||||
|
Delimiter byte // '.' or ')' after the number in ordered lists
|
||||||
|
RefLink []byte // If not nil, turns this list item into a footnote item and triggers different rendering
|
||||||
|
IsFootnotesList bool // This is a list of footnotes
|
||||||
|
}
|
||||||
|
|
||||||
|
// LinkData contains fields relevant to a Link node type.
|
||||||
|
type LinkData struct {
|
||||||
|
Destination []byte // Destination is what goes into a href
|
||||||
|
Title []byte // Title is the tooltip thing that goes in a title attribute
|
||||||
|
NoteID int // NoteID contains a serial number of a footnote, zero if it's not a footnote
|
||||||
|
Footnote *Node // If it's a footnote, this is a direct link to the footnote Node. Otherwise nil.
|
||||||
|
}
|
||||||
|
|
||||||
|
// CodeBlockData contains fields relevant to a CodeBlock node type.
|
||||||
|
type CodeBlockData struct {
|
||||||
|
IsFenced bool // Specifies whether it's a fenced code block or an indented one
|
||||||
|
Info []byte // This holds the info string
|
||||||
|
FenceChar byte
|
||||||
|
FenceLength int
|
||||||
|
FenceOffset int
|
||||||
|
}
|
||||||
|
|
||||||
|
// TableCellData contains fields relevant to a TableCell node type.
|
||||||
|
type TableCellData struct {
|
||||||
|
IsHeader bool // This tells if it's under the header row
|
||||||
|
Align CellAlignFlags // This holds the value for align attribute
|
||||||
|
}
|
||||||
|
|
||||||
|
// HeadingData contains fields relevant to a Heading node type.
|
||||||
|
type HeadingData struct {
|
||||||
|
Level int // This holds the heading level number
|
||||||
|
HeadingID string // This might hold heading ID, if present
|
||||||
|
IsTitleblock bool // Specifies whether it's a title block
|
||||||
|
}
|
||||||
|
|
||||||
|
// Node is a single element in the abstract syntax tree of the parsed document.
|
||||||
|
// It holds connections to the structurally neighboring nodes and, for certain
|
||||||
|
// types of nodes, additional information that might be needed when rendering.
|
||||||
|
type Node struct {
|
||||||
|
Type NodeType // Determines the type of the node
|
||||||
|
Parent *Node // Points to the parent
|
||||||
|
FirstChild *Node // Points to the first child, if any
|
||||||
|
LastChild *Node // Points to the last child, if any
|
||||||
|
Prev *Node // Previous sibling; nil if it's the first child
|
||||||
|
Next *Node // Next sibling; nil if it's the last child
|
||||||
|
|
||||||
|
Literal []byte // Text contents of the leaf nodes
|
||||||
|
|
||||||
|
HeadingData // Populated if Type is Heading
|
||||||
|
ListData // Populated if Type is List
|
||||||
|
CodeBlockData // Populated if Type is CodeBlock
|
||||||
|
LinkData // Populated if Type is Link
|
||||||
|
TableCellData // Populated if Type is TableCell
|
||||||
|
|
||||||
|
content []byte // Markdown content of the block nodes
|
||||||
|
open bool // Specifies an open block node that has not been finished to process yet
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewNode allocates a node of a specified type.
|
||||||
|
func NewNode(typ NodeType) *Node {
|
||||||
|
return &Node{
|
||||||
|
Type: typ,
|
||||||
|
open: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (n *Node) String() string {
|
||||||
|
ellipsis := ""
|
||||||
|
snippet := n.Literal
|
||||||
|
if len(snippet) > 16 {
|
||||||
|
snippet = snippet[:16]
|
||||||
|
ellipsis = "..."
|
||||||
|
}
|
||||||
|
return fmt.Sprintf("%s: '%s%s'", n.Type, snippet, ellipsis)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unlink removes node 'n' from the tree.
|
||||||
|
// It panics if the node is nil.
|
||||||
|
func (n *Node) Unlink() {
|
||||||
|
if n.Prev != nil {
|
||||||
|
n.Prev.Next = n.Next
|
||||||
|
} else if n.Parent != nil {
|
||||||
|
n.Parent.FirstChild = n.Next
|
||||||
|
}
|
||||||
|
if n.Next != nil {
|
||||||
|
n.Next.Prev = n.Prev
|
||||||
|
} else if n.Parent != nil {
|
||||||
|
n.Parent.LastChild = n.Prev
|
||||||
|
}
|
||||||
|
n.Parent = nil
|
||||||
|
n.Next = nil
|
||||||
|
n.Prev = nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// AppendChild adds a node 'child' as a child of 'n'.
|
||||||
|
// It panics if either node is nil.
|
||||||
|
func (n *Node) AppendChild(child *Node) {
|
||||||
|
child.Unlink()
|
||||||
|
child.Parent = n
|
||||||
|
if n.LastChild != nil {
|
||||||
|
n.LastChild.Next = child
|
||||||
|
child.Prev = n.LastChild
|
||||||
|
n.LastChild = child
|
||||||
|
} else {
|
||||||
|
n.FirstChild = child
|
||||||
|
n.LastChild = child
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// InsertBefore inserts 'sibling' immediately before 'n'.
|
||||||
|
// It panics if either node is nil.
|
||||||
|
func (n *Node) InsertBefore(sibling *Node) {
|
||||||
|
sibling.Unlink()
|
||||||
|
sibling.Prev = n.Prev
|
||||||
|
if sibling.Prev != nil {
|
||||||
|
sibling.Prev.Next = sibling
|
||||||
|
}
|
||||||
|
sibling.Next = n
|
||||||
|
n.Prev = sibling
|
||||||
|
sibling.Parent = n.Parent
|
||||||
|
if sibling.Prev == nil {
|
||||||
|
sibling.Parent.FirstChild = sibling
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (n *Node) isContainer() bool {
|
||||||
|
switch n.Type {
|
||||||
|
case Document:
|
||||||
|
fallthrough
|
||||||
|
case BlockQuote:
|
||||||
|
fallthrough
|
||||||
|
case List:
|
||||||
|
fallthrough
|
||||||
|
case Item:
|
||||||
|
fallthrough
|
||||||
|
case Paragraph:
|
||||||
|
fallthrough
|
||||||
|
case Heading:
|
||||||
|
fallthrough
|
||||||
|
case Emph:
|
||||||
|
fallthrough
|
||||||
|
case Strong:
|
||||||
|
fallthrough
|
||||||
|
case Del:
|
||||||
|
fallthrough
|
||||||
|
case Link:
|
||||||
|
fallthrough
|
||||||
|
case Image:
|
||||||
|
fallthrough
|
||||||
|
case Table:
|
||||||
|
fallthrough
|
||||||
|
case TableHead:
|
||||||
|
fallthrough
|
||||||
|
case TableBody:
|
||||||
|
fallthrough
|
||||||
|
case TableRow:
|
||||||
|
fallthrough
|
||||||
|
case TableCell:
|
||||||
|
return true
|
||||||
|
default:
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (n *Node) canContain(t NodeType) bool {
|
||||||
|
if n.Type == List {
|
||||||
|
return t == Item
|
||||||
|
}
|
||||||
|
if n.Type == Document || n.Type == BlockQuote || n.Type == Item {
|
||||||
|
return t != Item
|
||||||
|
}
|
||||||
|
if n.Type == Table {
|
||||||
|
return t == TableHead || t == TableBody
|
||||||
|
}
|
||||||
|
if n.Type == TableHead || n.Type == TableBody {
|
||||||
|
return t == TableRow
|
||||||
|
}
|
||||||
|
if n.Type == TableRow {
|
||||||
|
return t == TableCell
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// WalkStatus allows NodeVisitor to have some control over the tree traversal.
|
||||||
|
// It is returned from NodeVisitor and different values allow Node.Walk to
|
||||||
|
// decide which node to go to next.
|
||||||
|
type WalkStatus int
|
||||||
|
|
||||||
|
const (
|
||||||
|
// GoToNext is the default traversal of every node.
|
||||||
|
GoToNext WalkStatus = iota
|
||||||
|
// SkipChildren tells walker to skip all children of current node.
|
||||||
|
SkipChildren
|
||||||
|
// Terminate tells walker to terminate the traversal.
|
||||||
|
Terminate
|
||||||
|
)
|
||||||
|
|
||||||
|
// NodeVisitor is a callback to be called when traversing the syntax tree.
|
||||||
|
// Called twice for every node: once with entering=true when the branch is
|
||||||
|
// first visited, then with entering=false after all the children are done.
|
||||||
|
type NodeVisitor func(node *Node, entering bool) WalkStatus
|
||||||
|
|
||||||
|
// Walk is a convenience method that instantiates a walker and starts a
|
||||||
|
// traversal of subtree rooted at n.
|
||||||
|
func (n *Node) Walk(visitor NodeVisitor) {
|
||||||
|
w := newNodeWalker(n)
|
||||||
|
for w.current != nil {
|
||||||
|
status := visitor(w.current, w.entering)
|
||||||
|
switch status {
|
||||||
|
case GoToNext:
|
||||||
|
w.next()
|
||||||
|
case SkipChildren:
|
||||||
|
w.entering = false
|
||||||
|
w.next()
|
||||||
|
case Terminate:
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type nodeWalker struct {
|
||||||
|
current *Node
|
||||||
|
root *Node
|
||||||
|
entering bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func newNodeWalker(root *Node) *nodeWalker {
|
||||||
|
return &nodeWalker{
|
||||||
|
current: root,
|
||||||
|
root: root,
|
||||||
|
entering: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (nw *nodeWalker) next() {
|
||||||
|
if (!nw.current.isContainer() || !nw.entering) && nw.current == nw.root {
|
||||||
|
nw.current = nil
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if nw.entering && nw.current.isContainer() {
|
||||||
|
if nw.current.FirstChild != nil {
|
||||||
|
nw.current = nw.current.FirstChild
|
||||||
|
nw.entering = true
|
||||||
|
} else {
|
||||||
|
nw.entering = false
|
||||||
|
}
|
||||||
|
} else if nw.current.Next == nil {
|
||||||
|
nw.current = nw.current.Parent
|
||||||
|
nw.entering = false
|
||||||
|
} else {
|
||||||
|
nw.current = nw.current.Next
|
||||||
|
nw.entering = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func dump(ast *Node) {
|
||||||
|
fmt.Println(dumpString(ast))
|
||||||
|
}
|
||||||
|
|
||||||
|
func dumpR(ast *Node, depth int) string {
|
||||||
|
if ast == nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
indent := bytes.Repeat([]byte("\t"), depth)
|
||||||
|
content := ast.Literal
|
||||||
|
if content == nil {
|
||||||
|
content = ast.content
|
||||||
|
}
|
||||||
|
result := fmt.Sprintf("%s%s(%q)\n", indent, ast.Type, content)
|
||||||
|
for n := ast.FirstChild; n != nil; n = n.Next {
|
||||||
|
result += dumpR(n, depth+1)
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
func dumpString(ast *Node) string {
|
||||||
|
return dumpR(ast, 0)
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,457 @@
|
||||||
|
//
|
||||||
|
// Blackfriday Markdown Processor
|
||||||
|
// Available at http://github.com/russross/blackfriday
|
||||||
|
//
|
||||||
|
// Copyright © 2011 Russ Ross <russ@russross.com>.
|
||||||
|
// Distributed under the Simplified BSD License.
|
||||||
|
// See README.md for details.
|
||||||
|
//
|
||||||
|
|
||||||
|
//
|
||||||
|
//
|
||||||
|
// SmartyPants rendering
|
||||||
|
//
|
||||||
|
//
|
||||||
|
|
||||||
|
package blackfriday
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"io"
|
||||||
|
)
|
||||||
|
|
||||||
|
// SPRenderer is a struct containing state of a Smartypants renderer.
|
||||||
|
type SPRenderer struct {
|
||||||
|
inSingleQuote bool
|
||||||
|
inDoubleQuote bool
|
||||||
|
callbacks [256]smartCallback
|
||||||
|
}
|
||||||
|
|
||||||
|
func wordBoundary(c byte) bool {
|
||||||
|
return c == 0 || isspace(c) || ispunct(c)
|
||||||
|
}
|
||||||
|
|
||||||
|
func tolower(c byte) byte {
|
||||||
|
if c >= 'A' && c <= 'Z' {
|
||||||
|
return c - 'A' + 'a'
|
||||||
|
}
|
||||||
|
return c
|
||||||
|
}
|
||||||
|
|
||||||
|
func isdigit(c byte) bool {
|
||||||
|
return c >= '0' && c <= '9'
|
||||||
|
}
|
||||||
|
|
||||||
|
func smartQuoteHelper(out *bytes.Buffer, previousChar byte, nextChar byte, quote byte, isOpen *bool, addNBSP bool) bool {
|
||||||
|
// edge of the buffer is likely to be a tag that we don't get to see,
|
||||||
|
// so we treat it like text sometimes
|
||||||
|
|
||||||
|
// enumerate all sixteen possibilities for (previousChar, nextChar)
|
||||||
|
// each can be one of {0, space, punct, other}
|
||||||
|
switch {
|
||||||
|
case previousChar == 0 && nextChar == 0:
|
||||||
|
// context is not any help here, so toggle
|
||||||
|
*isOpen = !*isOpen
|
||||||
|
case isspace(previousChar) && nextChar == 0:
|
||||||
|
// [ "] might be [ "<code>foo...]
|
||||||
|
*isOpen = true
|
||||||
|
case ispunct(previousChar) && nextChar == 0:
|
||||||
|
// [!"] hmm... could be [Run!"] or [("<code>...]
|
||||||
|
*isOpen = false
|
||||||
|
case /* isnormal(previousChar) && */ nextChar == 0:
|
||||||
|
// [a"] is probably a close
|
||||||
|
*isOpen = false
|
||||||
|
case previousChar == 0 && isspace(nextChar):
|
||||||
|
// [" ] might be [...foo</code>" ]
|
||||||
|
*isOpen = false
|
||||||
|
case isspace(previousChar) && isspace(nextChar):
|
||||||
|
// [ " ] context is not any help here, so toggle
|
||||||
|
*isOpen = !*isOpen
|
||||||
|
case ispunct(previousChar) && isspace(nextChar):
|
||||||
|
// [!" ] is probably a close
|
||||||
|
*isOpen = false
|
||||||
|
case /* isnormal(previousChar) && */ isspace(nextChar):
|
||||||
|
// [a" ] this is one of the easy cases
|
||||||
|
*isOpen = false
|
||||||
|
case previousChar == 0 && ispunct(nextChar):
|
||||||
|
// ["!] hmm... could be ["$1.95] or [</code>"!...]
|
||||||
|
*isOpen = false
|
||||||
|
case isspace(previousChar) && ispunct(nextChar):
|
||||||
|
// [ "!] looks more like [ "$1.95]
|
||||||
|
*isOpen = true
|
||||||
|
case ispunct(previousChar) && ispunct(nextChar):
|
||||||
|
// [!"!] context is not any help here, so toggle
|
||||||
|
*isOpen = !*isOpen
|
||||||
|
case /* isnormal(previousChar) && */ ispunct(nextChar):
|
||||||
|
// [a"!] is probably a close
|
||||||
|
*isOpen = false
|
||||||
|
case previousChar == 0 /* && isnormal(nextChar) */ :
|
||||||
|
// ["a] is probably an open
|
||||||
|
*isOpen = true
|
||||||
|
case isspace(previousChar) /* && isnormal(nextChar) */ :
|
||||||
|
// [ "a] this is one of the easy cases
|
||||||
|
*isOpen = true
|
||||||
|
case ispunct(previousChar) /* && isnormal(nextChar) */ :
|
||||||
|
// [!"a] is probably an open
|
||||||
|
*isOpen = true
|
||||||
|
default:
|
||||||
|
// [a'b] maybe a contraction?
|
||||||
|
*isOpen = false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Note that with the limited lookahead, this non-breaking
|
||||||
|
// space will also be appended to single double quotes.
|
||||||
|
if addNBSP && !*isOpen {
|
||||||
|
out.WriteString(" ")
|
||||||
|
}
|
||||||
|
|
||||||
|
out.WriteByte('&')
|
||||||
|
if *isOpen {
|
||||||
|
out.WriteByte('l')
|
||||||
|
} else {
|
||||||
|
out.WriteByte('r')
|
||||||
|
}
|
||||||
|
out.WriteByte(quote)
|
||||||
|
out.WriteString("quo;")
|
||||||
|
|
||||||
|
if addNBSP && *isOpen {
|
||||||
|
out.WriteString(" ")
|
||||||
|
}
|
||||||
|
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *SPRenderer) smartSingleQuote(out *bytes.Buffer, previousChar byte, text []byte) int {
|
||||||
|
if len(text) >= 2 {
|
||||||
|
t1 := tolower(text[1])
|
||||||
|
|
||||||
|
if t1 == '\'' {
|
||||||
|
nextChar := byte(0)
|
||||||
|
if len(text) >= 3 {
|
||||||
|
nextChar = text[2]
|
||||||
|
}
|
||||||
|
if smartQuoteHelper(out, previousChar, nextChar, 'd', &r.inDoubleQuote, false) {
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (t1 == 's' || t1 == 't' || t1 == 'm' || t1 == 'd') && (len(text) < 3 || wordBoundary(text[2])) {
|
||||||
|
out.WriteString("’")
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(text) >= 3 {
|
||||||
|
t2 := tolower(text[2])
|
||||||
|
|
||||||
|
if ((t1 == 'r' && t2 == 'e') || (t1 == 'l' && t2 == 'l') || (t1 == 'v' && t2 == 'e')) &&
|
||||||
|
(len(text) < 4 || wordBoundary(text[3])) {
|
||||||
|
out.WriteString("’")
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
nextChar := byte(0)
|
||||||
|
if len(text) > 1 {
|
||||||
|
nextChar = text[1]
|
||||||
|
}
|
||||||
|
if smartQuoteHelper(out, previousChar, nextChar, 's', &r.inSingleQuote, false) {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
out.WriteByte(text[0])
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *SPRenderer) smartParens(out *bytes.Buffer, previousChar byte, text []byte) int {
|
||||||
|
if len(text) >= 3 {
|
||||||
|
t1 := tolower(text[1])
|
||||||
|
t2 := tolower(text[2])
|
||||||
|
|
||||||
|
if t1 == 'c' && t2 == ')' {
|
||||||
|
out.WriteString("©")
|
||||||
|
return 2
|
||||||
|
}
|
||||||
|
|
||||||
|
if t1 == 'r' && t2 == ')' {
|
||||||
|
out.WriteString("®")
|
||||||
|
return 2
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(text) >= 4 && t1 == 't' && t2 == 'm' && text[3] == ')' {
|
||||||
|
out.WriteString("™")
|
||||||
|
return 3
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
out.WriteByte(text[0])
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *SPRenderer) smartDash(out *bytes.Buffer, previousChar byte, text []byte) int {
|
||||||
|
if len(text) >= 2 {
|
||||||
|
if text[1] == '-' {
|
||||||
|
out.WriteString("—")
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
if wordBoundary(previousChar) && wordBoundary(text[1]) {
|
||||||
|
out.WriteString("–")
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
out.WriteByte(text[0])
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *SPRenderer) smartDashLatex(out *bytes.Buffer, previousChar byte, text []byte) int {
|
||||||
|
if len(text) >= 3 && text[1] == '-' && text[2] == '-' {
|
||||||
|
out.WriteString("—")
|
||||||
|
return 2
|
||||||
|
}
|
||||||
|
if len(text) >= 2 && text[1] == '-' {
|
||||||
|
out.WriteString("–")
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
out.WriteByte(text[0])
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *SPRenderer) smartAmpVariant(out *bytes.Buffer, previousChar byte, text []byte, quote byte, addNBSP bool) int {
|
||||||
|
if bytes.HasPrefix(text, []byte(""")) {
|
||||||
|
nextChar := byte(0)
|
||||||
|
if len(text) >= 7 {
|
||||||
|
nextChar = text[6]
|
||||||
|
}
|
||||||
|
if smartQuoteHelper(out, previousChar, nextChar, quote, &r.inDoubleQuote, addNBSP) {
|
||||||
|
return 5
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if bytes.HasPrefix(text, []byte("�")) {
|
||||||
|
return 3
|
||||||
|
}
|
||||||
|
|
||||||
|
out.WriteByte('&')
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *SPRenderer) smartAmp(angledQuotes, addNBSP bool) func(*bytes.Buffer, byte, []byte) int {
|
||||||
|
var quote byte = 'd'
|
||||||
|
if angledQuotes {
|
||||||
|
quote = 'a'
|
||||||
|
}
|
||||||
|
|
||||||
|
return func(out *bytes.Buffer, previousChar byte, text []byte) int {
|
||||||
|
return r.smartAmpVariant(out, previousChar, text, quote, addNBSP)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *SPRenderer) smartPeriod(out *bytes.Buffer, previousChar byte, text []byte) int {
|
||||||
|
if len(text) >= 3 && text[1] == '.' && text[2] == '.' {
|
||||||
|
out.WriteString("…")
|
||||||
|
return 2
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(text) >= 5 && text[1] == ' ' && text[2] == '.' && text[3] == ' ' && text[4] == '.' {
|
||||||
|
out.WriteString("…")
|
||||||
|
return 4
|
||||||
|
}
|
||||||
|
|
||||||
|
out.WriteByte(text[0])
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *SPRenderer) smartBacktick(out *bytes.Buffer, previousChar byte, text []byte) int {
|
||||||
|
if len(text) >= 2 && text[1] == '`' {
|
||||||
|
nextChar := byte(0)
|
||||||
|
if len(text) >= 3 {
|
||||||
|
nextChar = text[2]
|
||||||
|
}
|
||||||
|
if smartQuoteHelper(out, previousChar, nextChar, 'd', &r.inDoubleQuote, false) {
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
out.WriteByte(text[0])
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *SPRenderer) smartNumberGeneric(out *bytes.Buffer, previousChar byte, text []byte) int {
|
||||||
|
if wordBoundary(previousChar) && previousChar != '/' && len(text) >= 3 {
|
||||||
|
// is it of the form digits/digits(word boundary)?, i.e., \d+/\d+\b
|
||||||
|
// note: check for regular slash (/) or fraction slash (⁄, 0x2044, or 0xe2 81 84 in utf-8)
|
||||||
|
// and avoid changing dates like 1/23/2005 into fractions.
|
||||||
|
numEnd := 0
|
||||||
|
for len(text) > numEnd && isdigit(text[numEnd]) {
|
||||||
|
numEnd++
|
||||||
|
}
|
||||||
|
if numEnd == 0 {
|
||||||
|
out.WriteByte(text[0])
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
denStart := numEnd + 1
|
||||||
|
if len(text) > numEnd+3 && text[numEnd] == 0xe2 && text[numEnd+1] == 0x81 && text[numEnd+2] == 0x84 {
|
||||||
|
denStart = numEnd + 3
|
||||||
|
} else if len(text) < numEnd+2 || text[numEnd] != '/' {
|
||||||
|
out.WriteByte(text[0])
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
denEnd := denStart
|
||||||
|
for len(text) > denEnd && isdigit(text[denEnd]) {
|
||||||
|
denEnd++
|
||||||
|
}
|
||||||
|
if denEnd == denStart {
|
||||||
|
out.WriteByte(text[0])
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
if len(text) == denEnd || wordBoundary(text[denEnd]) && text[denEnd] != '/' {
|
||||||
|
out.WriteString("<sup>")
|
||||||
|
out.Write(text[:numEnd])
|
||||||
|
out.WriteString("</sup>⁄<sub>")
|
||||||
|
out.Write(text[denStart:denEnd])
|
||||||
|
out.WriteString("</sub>")
|
||||||
|
return denEnd - 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
out.WriteByte(text[0])
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *SPRenderer) smartNumber(out *bytes.Buffer, previousChar byte, text []byte) int {
|
||||||
|
if wordBoundary(previousChar) && previousChar != '/' && len(text) >= 3 {
|
||||||
|
if text[0] == '1' && text[1] == '/' && text[2] == '2' {
|
||||||
|
if len(text) < 4 || wordBoundary(text[3]) && text[3] != '/' {
|
||||||
|
out.WriteString("½")
|
||||||
|
return 2
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if text[0] == '1' && text[1] == '/' && text[2] == '4' {
|
||||||
|
if len(text) < 4 || wordBoundary(text[3]) && text[3] != '/' || (len(text) >= 5 && tolower(text[3]) == 't' && tolower(text[4]) == 'h') {
|
||||||
|
out.WriteString("¼")
|
||||||
|
return 2
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if text[0] == '3' && text[1] == '/' && text[2] == '4' {
|
||||||
|
if len(text) < 4 || wordBoundary(text[3]) && text[3] != '/' || (len(text) >= 6 && tolower(text[3]) == 't' && tolower(text[4]) == 'h' && tolower(text[5]) == 's') {
|
||||||
|
out.WriteString("¾")
|
||||||
|
return 2
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
out.WriteByte(text[0])
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *SPRenderer) smartDoubleQuoteVariant(out *bytes.Buffer, previousChar byte, text []byte, quote byte) int {
|
||||||
|
nextChar := byte(0)
|
||||||
|
if len(text) > 1 {
|
||||||
|
nextChar = text[1]
|
||||||
|
}
|
||||||
|
if !smartQuoteHelper(out, previousChar, nextChar, quote, &r.inDoubleQuote, false) {
|
||||||
|
out.WriteString(""")
|
||||||
|
}
|
||||||
|
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *SPRenderer) smartDoubleQuote(out *bytes.Buffer, previousChar byte, text []byte) int {
|
||||||
|
return r.smartDoubleQuoteVariant(out, previousChar, text, 'd')
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *SPRenderer) smartAngledDoubleQuote(out *bytes.Buffer, previousChar byte, text []byte) int {
|
||||||
|
return r.smartDoubleQuoteVariant(out, previousChar, text, 'a')
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *SPRenderer) smartLeftAngle(out *bytes.Buffer, previousChar byte, text []byte) int {
|
||||||
|
i := 0
|
||||||
|
|
||||||
|
for i < len(text) && text[i] != '>' {
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
|
||||||
|
out.Write(text[:i+1])
|
||||||
|
return i
|
||||||
|
}
|
||||||
|
|
||||||
|
type smartCallback func(out *bytes.Buffer, previousChar byte, text []byte) int
|
||||||
|
|
||||||
|
// NewSmartypantsRenderer constructs a Smartypants renderer object.
|
||||||
|
func NewSmartypantsRenderer(flags HTMLFlags) *SPRenderer {
|
||||||
|
var (
|
||||||
|
r SPRenderer
|
||||||
|
|
||||||
|
smartAmpAngled = r.smartAmp(true, false)
|
||||||
|
smartAmpAngledNBSP = r.smartAmp(true, true)
|
||||||
|
smartAmpRegular = r.smartAmp(false, false)
|
||||||
|
smartAmpRegularNBSP = r.smartAmp(false, true)
|
||||||
|
|
||||||
|
addNBSP = flags&SmartypantsQuotesNBSP != 0
|
||||||
|
)
|
||||||
|
|
||||||
|
if flags&SmartypantsAngledQuotes == 0 {
|
||||||
|
r.callbacks['"'] = r.smartDoubleQuote
|
||||||
|
if !addNBSP {
|
||||||
|
r.callbacks['&'] = smartAmpRegular
|
||||||
|
} else {
|
||||||
|
r.callbacks['&'] = smartAmpRegularNBSP
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
r.callbacks['"'] = r.smartAngledDoubleQuote
|
||||||
|
if !addNBSP {
|
||||||
|
r.callbacks['&'] = smartAmpAngled
|
||||||
|
} else {
|
||||||
|
r.callbacks['&'] = smartAmpAngledNBSP
|
||||||
|
}
|
||||||
|
}
|
||||||
|
r.callbacks['\''] = r.smartSingleQuote
|
||||||
|
r.callbacks['('] = r.smartParens
|
||||||
|
if flags&SmartypantsDashes != 0 {
|
||||||
|
if flags&SmartypantsLatexDashes == 0 {
|
||||||
|
r.callbacks['-'] = r.smartDash
|
||||||
|
} else {
|
||||||
|
r.callbacks['-'] = r.smartDashLatex
|
||||||
|
}
|
||||||
|
}
|
||||||
|
r.callbacks['.'] = r.smartPeriod
|
||||||
|
if flags&SmartypantsFractions == 0 {
|
||||||
|
r.callbacks['1'] = r.smartNumber
|
||||||
|
r.callbacks['3'] = r.smartNumber
|
||||||
|
} else {
|
||||||
|
for ch := '1'; ch <= '9'; ch++ {
|
||||||
|
r.callbacks[ch] = r.smartNumberGeneric
|
||||||
|
}
|
||||||
|
}
|
||||||
|
r.callbacks['<'] = r.smartLeftAngle
|
||||||
|
r.callbacks['`'] = r.smartBacktick
|
||||||
|
return &r
|
||||||
|
}
|
||||||
|
|
||||||
|
// Process is the entry point of the Smartypants renderer.
|
||||||
|
func (r *SPRenderer) Process(w io.Writer, text []byte) {
|
||||||
|
mark := 0
|
||||||
|
for i := 0; i < len(text); i++ {
|
||||||
|
if action := r.callbacks[text[i]]; action != nil {
|
||||||
|
if i > mark {
|
||||||
|
w.Write(text[mark:i])
|
||||||
|
}
|
||||||
|
previousChar := byte(0)
|
||||||
|
if i > 0 {
|
||||||
|
previousChar = text[i-1]
|
||||||
|
}
|
||||||
|
var tmp bytes.Buffer
|
||||||
|
i += action(&tmp, previousChar, text[i:])
|
||||||
|
w.Write(tmp.Bytes())
|
||||||
|
mark = i + 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if mark < len(text) {
|
||||||
|
w.Write(text[mark:])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,16 @@
|
||||||
|
sudo: false
|
||||||
|
language: go
|
||||||
|
go:
|
||||||
|
- 1.x
|
||||||
|
- master
|
||||||
|
matrix:
|
||||||
|
allow_failures:
|
||||||
|
- go: master
|
||||||
|
fast_finish: true
|
||||||
|
install:
|
||||||
|
- # Do nothing. This is needed to prevent default install action "go get -t -v ./..." from happening here (we want it to happen inside script step).
|
||||||
|
script:
|
||||||
|
- go get -t -v ./...
|
||||||
|
- diff -u <(echo -n) <(gofmt -d -s .)
|
||||||
|
- go tool vet .
|
||||||
|
- go test -v -race ./...
|
||||||
|
|
@ -0,0 +1,9 @@
|
||||||
|
load("@io_bazel_rules_go//go:def.bzl", "go_library")
|
||||||
|
|
||||||
|
go_library(
|
||||||
|
name = "go_default_library",
|
||||||
|
srcs = ["main.go"],
|
||||||
|
importmap = "k8s.io/kops/vendor/github.com/shurcooL/sanitized_anchor_name",
|
||||||
|
importpath = "github.com/shurcooL/sanitized_anchor_name",
|
||||||
|
visibility = ["//visibility:public"],
|
||||||
|
)
|
||||||
|
|
@ -0,0 +1,21 @@
|
||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2015 Dmitri Shuralyov
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
|
|
@ -0,0 +1,36 @@
|
||||||
|
sanitized_anchor_name
|
||||||
|
=====================
|
||||||
|
|
||||||
|
[](https://travis-ci.org/shurcooL/sanitized_anchor_name) [](https://godoc.org/github.com/shurcooL/sanitized_anchor_name)
|
||||||
|
|
||||||
|
Package sanitized_anchor_name provides a func to create sanitized anchor names.
|
||||||
|
|
||||||
|
Its logic can be reused by multiple packages to create interoperable anchor names
|
||||||
|
and links to those anchors.
|
||||||
|
|
||||||
|
At this time, it does not try to ensure that generated anchor names
|
||||||
|
are unique, that responsibility falls on the caller.
|
||||||
|
|
||||||
|
Installation
|
||||||
|
------------
|
||||||
|
|
||||||
|
```bash
|
||||||
|
go get -u github.com/shurcooL/sanitized_anchor_name
|
||||||
|
```
|
||||||
|
|
||||||
|
Example
|
||||||
|
-------
|
||||||
|
|
||||||
|
```Go
|
||||||
|
anchorName := sanitized_anchor_name.Create("This is a header")
|
||||||
|
|
||||||
|
fmt.Println(anchorName)
|
||||||
|
|
||||||
|
// Output:
|
||||||
|
// this-is-a-header
|
||||||
|
```
|
||||||
|
|
||||||
|
License
|
||||||
|
-------
|
||||||
|
|
||||||
|
- [MIT License](LICENSE)
|
||||||
|
|
@ -0,0 +1 @@
|
||||||
|
module github.com/shurcooL/sanitized_anchor_name
|
||||||
|
|
@ -0,0 +1,29 @@
|
||||||
|
// Package sanitized_anchor_name provides a func to create sanitized anchor names.
|
||||||
|
//
|
||||||
|
// Its logic can be reused by multiple packages to create interoperable anchor names
|
||||||
|
// and links to those anchors.
|
||||||
|
//
|
||||||
|
// At this time, it does not try to ensure that generated anchor names
|
||||||
|
// are unique, that responsibility falls on the caller.
|
||||||
|
package sanitized_anchor_name // import "github.com/shurcooL/sanitized_anchor_name"
|
||||||
|
|
||||||
|
import "unicode"
|
||||||
|
|
||||||
|
// Create returns a sanitized anchor name for the given text.
|
||||||
|
func Create(text string) string {
|
||||||
|
var anchorName []rune
|
||||||
|
var futureDash = false
|
||||||
|
for _, r := range text {
|
||||||
|
switch {
|
||||||
|
case unicode.IsLetter(r) || unicode.IsNumber(r):
|
||||||
|
if futureDash && len(anchorName) > 0 {
|
||||||
|
anchorName = append(anchorName, '-')
|
||||||
|
}
|
||||||
|
futureDash = false
|
||||||
|
anchorName = append(anchorName, unicode.ToLower(r))
|
||||||
|
default:
|
||||||
|
futureDash = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return string(anchorName)
|
||||||
|
}
|
||||||
|
|
@ -32,7 +32,8 @@ Session.vim
|
||||||
tags
|
tags
|
||||||
|
|
||||||
*.exe
|
*.exe
|
||||||
|
|
||||||
cobra.test
|
cobra.test
|
||||||
|
bin
|
||||||
|
|
||||||
.idea/*
|
.idea/
|
||||||
|
*.iml
|
||||||
|
|
|
||||||
|
|
@ -3,29 +3,27 @@ language: go
|
||||||
stages:
|
stages:
|
||||||
- diff
|
- diff
|
||||||
- test
|
- test
|
||||||
|
- build
|
||||||
|
|
||||||
go:
|
go:
|
||||||
- 1.10.x
|
|
||||||
- 1.11.x
|
|
||||||
- 1.12.x
|
- 1.12.x
|
||||||
|
- 1.13.x
|
||||||
- tip
|
- tip
|
||||||
|
|
||||||
|
before_install:
|
||||||
|
- go get -u github.com/kyoh86/richgo
|
||||||
|
- go get -u github.com/mitchellh/gox
|
||||||
|
|
||||||
matrix:
|
matrix:
|
||||||
allow_failures:
|
allow_failures:
|
||||||
- go: tip
|
- go: tip
|
||||||
include:
|
include:
|
||||||
- stage: diff
|
- stage: diff
|
||||||
go: 1.12.x
|
go: 1.13.x
|
||||||
script: diff -u <(echo -n) <(gofmt -d -s .)
|
script: make fmt
|
||||||
|
- stage: build
|
||||||
|
go: 1.13.x
|
||||||
|
script: make cobra_generator
|
||||||
|
|
||||||
before_install:
|
|
||||||
- mkdir -p bin
|
|
||||||
- curl -Lso bin/shellcheck https://github.com/caarlos0/shellcheck-docker/releases/download/v0.6.0/shellcheck
|
|
||||||
- chmod +x bin/shellcheck
|
|
||||||
- go get -u github.com/kyoh86/richgo
|
|
||||||
script:
|
script:
|
||||||
- PATH=$PATH:$PWD/bin richgo test -v ./...
|
- make test
|
||||||
- go build
|
|
||||||
- if [ -z $NOVET ]; then
|
|
||||||
diff -u <(echo -n) <(go vet . 2>&1 | grep -vE 'ExampleCommand|bash_completions.*Fprint');
|
|
||||||
fi
|
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,36 @@
|
||||||
|
BIN="./bin"
|
||||||
|
SRC=$(shell find . -name "*.go")
|
||||||
|
|
||||||
|
ifeq (, $(shell which richgo))
|
||||||
|
$(warning "could not find richgo in $(PATH), run: go get github.com/kyoh86/richgo")
|
||||||
|
endif
|
||||||
|
|
||||||
|
.PHONY: fmt vet test cobra_generator install_deps clean
|
||||||
|
|
||||||
|
default: all
|
||||||
|
|
||||||
|
all: fmt vet test cobra_generator
|
||||||
|
|
||||||
|
fmt:
|
||||||
|
$(info ******************** checking formatting ********************)
|
||||||
|
@test -z $(shell gofmt -l $(SRC)) || (gofmt -d $(SRC); exit 1)
|
||||||
|
|
||||||
|
test: install_deps vet
|
||||||
|
$(info ******************** running tests ********************)
|
||||||
|
richgo test -v ./...
|
||||||
|
|
||||||
|
cobra_generator: install_deps
|
||||||
|
$(info ******************** building generator ********************)
|
||||||
|
mkdir -p $(BIN)
|
||||||
|
make -C cobra all
|
||||||
|
|
||||||
|
install_deps:
|
||||||
|
$(info ******************** downloading dependencies ********************)
|
||||||
|
go get -v ./...
|
||||||
|
|
||||||
|
vet:
|
||||||
|
$(info ******************** vetting ********************)
|
||||||
|
go vet ./...
|
||||||
|
|
||||||
|
clean:
|
||||||
|
rm -rf $(BIN)
|
||||||
|
|
@ -24,11 +24,13 @@ Many of the most widely used Go projects are built using Cobra, such as:
|
||||||
[Prototool](https://github.com/uber/prototool),
|
[Prototool](https://github.com/uber/prototool),
|
||||||
[mattermost-server](https://github.com/mattermost/mattermost-server),
|
[mattermost-server](https://github.com/mattermost/mattermost-server),
|
||||||
[Gardener](https://github.com/gardener/gardenctl),
|
[Gardener](https://github.com/gardener/gardenctl),
|
||||||
|
[Linkerd](https://linkerd.io/),
|
||||||
etc.
|
etc.
|
||||||
|
|
||||||
[](https://travis-ci.org/spf13/cobra)
|
[](https://travis-ci.org/spf13/cobra)
|
||||||
[](https://circleci.com/gh/spf13/cobra)
|
[](https://circleci.com/gh/spf13/cobra)
|
||||||
[](https://godoc.org/github.com/spf13/cobra)
|
[](https://godoc.org/github.com/spf13/cobra)
|
||||||
|
[](https://goreportcard.com/report/github.com/spf13/cobra)
|
||||||
|
|
||||||
# Table of Contents
|
# Table of Contents
|
||||||
|
|
||||||
|
|
@ -208,51 +210,78 @@ You will additionally define flags and handle configuration in your init() funct
|
||||||
For example cmd/root.go:
|
For example cmd/root.go:
|
||||||
|
|
||||||
```go
|
```go
|
||||||
import (
|
package cmd
|
||||||
"fmt"
|
|
||||||
"os"
|
|
||||||
|
|
||||||
homedir "github.com/mitchellh/go-homedir"
|
import (
|
||||||
"github.com/spf13/cobra"
|
"fmt"
|
||||||
"github.com/spf13/viper"
|
"os"
|
||||||
|
|
||||||
|
homedir "github.com/mitchellh/go-homedir"
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
"github.com/spf13/viper"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
// Used for flags.
|
||||||
|
cfgFile string
|
||||||
|
userLicense string
|
||||||
|
|
||||||
|
rootCmd = &cobra.Command{
|
||||||
|
Use: "cobra",
|
||||||
|
Short: "A generator for Cobra based Applications",
|
||||||
|
Long: `Cobra is a CLI library for Go that empowers applications.
|
||||||
|
This application is a tool to generate the needed files
|
||||||
|
to quickly create a Cobra application.`,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
// Execute executes the root command.
|
||||||
|
func Execute() error {
|
||||||
|
return rootCmd.Execute()
|
||||||
|
}
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
cobra.OnInitialize(initConfig)
|
cobra.OnInitialize(initConfig)
|
||||||
rootCmd.PersistentFlags().StringVar(&cfgFile, "config", "", "config file (default is $HOME/.cobra.yaml)")
|
|
||||||
rootCmd.PersistentFlags().StringVarP(&projectBase, "projectbase", "b", "", "base project directory eg. github.com/spf13/")
|
rootCmd.PersistentFlags().StringVar(&cfgFile, "config", "", "config file (default is $HOME/.cobra.yaml)")
|
||||||
rootCmd.PersistentFlags().StringP("author", "a", "YOUR NAME", "Author name for copyright attribution")
|
rootCmd.PersistentFlags().StringP("author", "a", "YOUR NAME", "author name for copyright attribution")
|
||||||
rootCmd.PersistentFlags().StringVarP(&userLicense, "license", "l", "", "Name of license for the project (can provide `licensetext` in config)")
|
rootCmd.PersistentFlags().StringVarP(&userLicense, "license", "l", "", "name of license for the project")
|
||||||
rootCmd.PersistentFlags().Bool("viper", true, "Use Viper for configuration")
|
rootCmd.PersistentFlags().Bool("viper", true, "use Viper for configuration")
|
||||||
viper.BindPFlag("author", rootCmd.PersistentFlags().Lookup("author"))
|
viper.BindPFlag("author", rootCmd.PersistentFlags().Lookup("author"))
|
||||||
viper.BindPFlag("projectbase", rootCmd.PersistentFlags().Lookup("projectbase"))
|
viper.BindPFlag("useViper", rootCmd.PersistentFlags().Lookup("viper"))
|
||||||
viper.BindPFlag("useViper", rootCmd.PersistentFlags().Lookup("viper"))
|
viper.SetDefault("author", "NAME HERE <EMAIL ADDRESS>")
|
||||||
viper.SetDefault("author", "NAME HERE <EMAIL ADDRESS>")
|
viper.SetDefault("license", "apache")
|
||||||
viper.SetDefault("license", "apache")
|
|
||||||
|
rootCmd.AddCommand(addCmd)
|
||||||
|
rootCmd.AddCommand(initCmd)
|
||||||
|
}
|
||||||
|
|
||||||
|
func er(msg interface{}) {
|
||||||
|
fmt.Println("Error:", msg)
|
||||||
|
os.Exit(1)
|
||||||
}
|
}
|
||||||
|
|
||||||
func initConfig() {
|
func initConfig() {
|
||||||
// Don't forget to read config either from cfgFile or from home directory!
|
if cfgFile != "" {
|
||||||
if cfgFile != "" {
|
// Use config file from the flag.
|
||||||
// Use config file from the flag.
|
viper.SetConfigFile(cfgFile)
|
||||||
viper.SetConfigFile(cfgFile)
|
} else {
|
||||||
} else {
|
// Find home directory.
|
||||||
// Find home directory.
|
home, err := homedir.Dir()
|
||||||
home, err := homedir.Dir()
|
if err != nil {
|
||||||
if err != nil {
|
er(err)
|
||||||
fmt.Println(err)
|
}
|
||||||
os.Exit(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Search config in home directory with name ".cobra" (without extension).
|
// Search config in home directory with name ".cobra" (without extension).
|
||||||
viper.AddConfigPath(home)
|
viper.AddConfigPath(home)
|
||||||
viper.SetConfigName(".cobra")
|
viper.SetConfigName(".cobra")
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := viper.ReadInConfig(); err != nil {
|
viper.AutomaticEnv()
|
||||||
fmt.Println("Can't read config:", err)
|
|
||||||
os.Exit(1)
|
if err := viper.ReadInConfig(); err == nil {
|
||||||
}
|
fmt.Println("Using config file:", viper.ConfigFileUsed())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
@ -459,7 +488,7 @@ For many years people have printed back to the screen.`,
|
||||||
Echo works a lot like print, except it has a child command.`,
|
Echo works a lot like print, except it has a child command.`,
|
||||||
Args: cobra.MinimumNArgs(1),
|
Args: cobra.MinimumNArgs(1),
|
||||||
Run: func(cmd *cobra.Command, args []string) {
|
Run: func(cmd *cobra.Command, args []string) {
|
||||||
fmt.Println("Print: " + strings.Join(args, " "))
|
fmt.Println("Echo: " + strings.Join(args, " "))
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -61,6 +61,7 @@ __%[1]s_contains_word()
|
||||||
__%[1]s_handle_reply()
|
__%[1]s_handle_reply()
|
||||||
{
|
{
|
||||||
__%[1]s_debug "${FUNCNAME[0]}"
|
__%[1]s_debug "${FUNCNAME[0]}"
|
||||||
|
local comp
|
||||||
case $cur in
|
case $cur in
|
||||||
-*)
|
-*)
|
||||||
if [[ $(type -t compopt) = "builtin" ]]; then
|
if [[ $(type -t compopt) = "builtin" ]]; then
|
||||||
|
|
@ -72,7 +73,9 @@ __%[1]s_handle_reply()
|
||||||
else
|
else
|
||||||
allflags=("${flags[*]} ${two_word_flags[*]}")
|
allflags=("${flags[*]} ${two_word_flags[*]}")
|
||||||
fi
|
fi
|
||||||
COMPREPLY=( $(compgen -W "${allflags[*]}" -- "$cur") )
|
while IFS='' read -r comp; do
|
||||||
|
COMPREPLY+=("$comp")
|
||||||
|
done < <(compgen -W "${allflags[*]}" -- "$cur")
|
||||||
if [[ $(type -t compopt) = "builtin" ]]; then
|
if [[ $(type -t compopt) = "builtin" ]]; then
|
||||||
[[ "${COMPREPLY[0]}" == *= ]] || compopt +o nospace
|
[[ "${COMPREPLY[0]}" == *= ]] || compopt +o nospace
|
||||||
fi
|
fi
|
||||||
|
|
@ -122,10 +125,14 @@ __%[1]s_handle_reply()
|
||||||
if [[ ${#must_have_one_flag[@]} -ne 0 ]]; then
|
if [[ ${#must_have_one_flag[@]} -ne 0 ]]; then
|
||||||
completions+=("${must_have_one_flag[@]}")
|
completions+=("${must_have_one_flag[@]}")
|
||||||
fi
|
fi
|
||||||
COMPREPLY=( $(compgen -W "${completions[*]}" -- "$cur") )
|
while IFS='' read -r comp; do
|
||||||
|
COMPREPLY+=("$comp")
|
||||||
|
done < <(compgen -W "${completions[*]}" -- "$cur")
|
||||||
|
|
||||||
if [[ ${#COMPREPLY[@]} -eq 0 && ${#noun_aliases[@]} -gt 0 && ${#must_have_one_noun[@]} -ne 0 ]]; then
|
if [[ ${#COMPREPLY[@]} -eq 0 && ${#noun_aliases[@]} -gt 0 && ${#must_have_one_noun[@]} -ne 0 ]]; then
|
||||||
COMPREPLY=( $(compgen -W "${noun_aliases[*]}" -- "$cur") )
|
while IFS='' read -r comp; do
|
||||||
|
COMPREPLY+=("$comp")
|
||||||
|
done < <(compgen -W "${noun_aliases[*]}" -- "$cur")
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ ${#COMPREPLY[@]} -eq 0 ]]; then
|
if [[ ${#COMPREPLY[@]} -eq 0 ]]; then
|
||||||
|
|
@ -160,7 +167,7 @@ __%[1]s_handle_filename_extension_flag()
|
||||||
__%[1]s_handle_subdirs_in_dir_flag()
|
__%[1]s_handle_subdirs_in_dir_flag()
|
||||||
{
|
{
|
||||||
local dir="$1"
|
local dir="$1"
|
||||||
pushd "${dir}" >/dev/null 2>&1 && _filedir -d && popd >/dev/null 2>&1
|
pushd "${dir}" >/dev/null 2>&1 && _filedir -d && popd >/dev/null 2>&1 || return
|
||||||
}
|
}
|
||||||
|
|
||||||
__%[1]s_handle_flag()
|
__%[1]s_handle_flag()
|
||||||
|
|
|
||||||
|
|
@ -52,7 +52,7 @@ var EnableCommandSorting = true
|
||||||
// if the CLI is started from explorer.exe.
|
// if the CLI is started from explorer.exe.
|
||||||
// To disable the mousetrap, just set this variable to blank string ("").
|
// To disable the mousetrap, just set this variable to blank string ("").
|
||||||
// Works only on Microsoft Windows.
|
// Works only on Microsoft Windows.
|
||||||
var MousetrapHelpText string = `This is a command line tool.
|
var MousetrapHelpText = `This is a command line tool.
|
||||||
|
|
||||||
You need to open cmd.exe and run it from there.
|
You need to open cmd.exe and run it from there.
|
||||||
`
|
`
|
||||||
|
|
@ -61,7 +61,7 @@ You need to open cmd.exe and run it from there.
|
||||||
// if the CLI is started from explorer.exe. Set to 0 to wait for the return key to be pressed.
|
// if the CLI is started from explorer.exe. Set to 0 to wait for the return key to be pressed.
|
||||||
// To disable the mousetrap, just set MousetrapHelpText to blank string ("").
|
// To disable the mousetrap, just set MousetrapHelpText to blank string ("").
|
||||||
// Works only on Microsoft Windows.
|
// Works only on Microsoft Windows.
|
||||||
var MousetrapDisplayDuration time.Duration = 5 * time.Second
|
var MousetrapDisplayDuration = 5 * time.Second
|
||||||
|
|
||||||
// AddTemplateFunc adds a template function that's available to Usage and Help
|
// AddTemplateFunc adds a template function that's available to Usage and Help
|
||||||
// template generation.
|
// template generation.
|
||||||
|
|
|
||||||
|
|
@ -17,6 +17,7 @@ package cobra
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"os"
|
"os"
|
||||||
|
|
@ -80,7 +81,8 @@ type Command struct {
|
||||||
|
|
||||||
// Version defines the version for this command. If this value is non-empty and the command does not
|
// Version defines the version for this command. If this value is non-empty and the command does not
|
||||||
// define a "version" flag, a "version" boolean flag will be added to the command and, if specified,
|
// define a "version" flag, a "version" boolean flag will be added to the command and, if specified,
|
||||||
// will print content of the "Version" variable.
|
// will print content of the "Version" variable. A shorthand "v" flag will also be added if the
|
||||||
|
// command does not define one.
|
||||||
Version string
|
Version string
|
||||||
|
|
||||||
// The *Run functions are executed in the following order:
|
// The *Run functions are executed in the following order:
|
||||||
|
|
@ -140,9 +142,11 @@ type Command struct {
|
||||||
// TraverseChildren parses flags on all parents before executing child command.
|
// TraverseChildren parses flags on all parents before executing child command.
|
||||||
TraverseChildren bool
|
TraverseChildren bool
|
||||||
|
|
||||||
//FParseErrWhitelist flag parse errors to be ignored
|
// FParseErrWhitelist flag parse errors to be ignored
|
||||||
FParseErrWhitelist FParseErrWhitelist
|
FParseErrWhitelist FParseErrWhitelist
|
||||||
|
|
||||||
|
ctx context.Context
|
||||||
|
|
||||||
// commands is the list of commands supported by this program.
|
// commands is the list of commands supported by this program.
|
||||||
commands []*Command
|
commands []*Command
|
||||||
// parent is a parent command for this command.
|
// parent is a parent command for this command.
|
||||||
|
|
@ -202,6 +206,12 @@ type Command struct {
|
||||||
errWriter io.Writer
|
errWriter io.Writer
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Context returns underlying command context. If command wasn't
|
||||||
|
// executed with ExecuteContext Context returns Background context.
|
||||||
|
func (c *Command) Context() context.Context {
|
||||||
|
return c.ctx
|
||||||
|
}
|
||||||
|
|
||||||
// SetArgs sets arguments for the command. It is set to os.Args[1:] by default, if desired, can be overridden
|
// SetArgs sets arguments for the command. It is set to os.Args[1:] by default, if desired, can be overridden
|
||||||
// particularly useful when testing.
|
// particularly useful when testing.
|
||||||
func (c *Command) SetArgs(a []string) {
|
func (c *Command) SetArgs(a []string) {
|
||||||
|
|
@ -228,7 +238,7 @@ func (c *Command) SetErr(newErr io.Writer) {
|
||||||
c.errWriter = newErr
|
c.errWriter = newErr
|
||||||
}
|
}
|
||||||
|
|
||||||
// SetOut sets the source for input data
|
// SetIn sets the source for input data
|
||||||
// If newIn is nil, os.Stdin is used.
|
// If newIn is nil, os.Stdin is used.
|
||||||
func (c *Command) SetIn(newIn io.Reader) {
|
func (c *Command) SetIn(newIn io.Reader) {
|
||||||
c.inReader = newIn
|
c.inReader = newIn
|
||||||
|
|
@ -297,7 +307,7 @@ func (c *Command) ErrOrStderr() io.Writer {
|
||||||
return c.getErr(os.Stderr)
|
return c.getErr(os.Stderr)
|
||||||
}
|
}
|
||||||
|
|
||||||
// ErrOrStderr returns output to stderr
|
// InOrStdin returns input to stdin
|
||||||
func (c *Command) InOrStdin() io.Reader {
|
func (c *Command) InOrStdin() io.Reader {
|
||||||
return c.getIn(os.Stdin)
|
return c.getIn(os.Stdin)
|
||||||
}
|
}
|
||||||
|
|
@ -369,6 +379,8 @@ func (c *Command) HelpFunc() func(*Command, []string) {
|
||||||
}
|
}
|
||||||
return func(c *Command, a []string) {
|
return func(c *Command, a []string) {
|
||||||
c.mergePersistentFlags()
|
c.mergePersistentFlags()
|
||||||
|
// The help should be sent to stdout
|
||||||
|
// See https://github.com/spf13/cobra/issues/1002
|
||||||
err := tmpl(c.OutOrStdout(), c.HelpTemplate(), c)
|
err := tmpl(c.OutOrStdout(), c.HelpTemplate(), c)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
c.Println(err)
|
c.Println(err)
|
||||||
|
|
@ -857,6 +869,13 @@ func (c *Command) preRun() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ExecuteContext is the same as Execute(), but sets the ctx on the command.
|
||||||
|
// Retrieve ctx by calling cmd.Context() inside your *Run lifecycle functions.
|
||||||
|
func (c *Command) ExecuteContext(ctx context.Context) error {
|
||||||
|
c.ctx = ctx
|
||||||
|
return c.Execute()
|
||||||
|
}
|
||||||
|
|
||||||
// Execute uses the args (os.Args[1:] by default)
|
// Execute uses the args (os.Args[1:] by default)
|
||||||
// and run through the command tree finding appropriate matches
|
// and run through the command tree finding appropriate matches
|
||||||
// for commands and then corresponding flags.
|
// for commands and then corresponding flags.
|
||||||
|
|
@ -867,6 +886,10 @@ func (c *Command) Execute() error {
|
||||||
|
|
||||||
// ExecuteC executes the command.
|
// ExecuteC executes the command.
|
||||||
func (c *Command) ExecuteC() (cmd *Command, err error) {
|
func (c *Command) ExecuteC() (cmd *Command, err error) {
|
||||||
|
if c.ctx == nil {
|
||||||
|
c.ctx = context.Background()
|
||||||
|
}
|
||||||
|
|
||||||
// Regardless of what command execute is called on, run on Root only
|
// Regardless of what command execute is called on, run on Root only
|
||||||
if c.HasParent() {
|
if c.HasParent() {
|
||||||
return c.Root().ExecuteC()
|
return c.Root().ExecuteC()
|
||||||
|
|
@ -911,6 +934,12 @@ func (c *Command) ExecuteC() (cmd *Command, err error) {
|
||||||
cmd.commandCalledAs.name = cmd.Name()
|
cmd.commandCalledAs.name = cmd.Name()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// We have to pass global context to children command
|
||||||
|
// if context is present on the parent command.
|
||||||
|
if cmd.ctx == nil {
|
||||||
|
cmd.ctx = c.ctx
|
||||||
|
}
|
||||||
|
|
||||||
err = cmd.execute(flags)
|
err = cmd.execute(flags)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
// Always show help if requested, even if SilenceErrors is in
|
// Always show help if requested, even if SilenceErrors is in
|
||||||
|
|
@ -994,7 +1023,11 @@ func (c *Command) InitDefaultVersionFlag() {
|
||||||
} else {
|
} else {
|
||||||
usage += c.Name()
|
usage += c.Name()
|
||||||
}
|
}
|
||||||
c.Flags().Bool("version", false, usage)
|
if c.Flags().ShorthandLookup("v") == nil {
|
||||||
|
c.Flags().BoolP("version", "v", false, usage)
|
||||||
|
} else {
|
||||||
|
c.Flags().Bool("version", false, usage)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -1547,7 +1580,7 @@ func (c *Command) ParseFlags(args []string) error {
|
||||||
beforeErrorBufLen := c.flagErrorBuf.Len()
|
beforeErrorBufLen := c.flagErrorBuf.Len()
|
||||||
c.mergePersistentFlags()
|
c.mergePersistentFlags()
|
||||||
|
|
||||||
//do it here after merging all flags and just before parse
|
// do it here after merging all flags and just before parse
|
||||||
c.Flags().ParseErrorsWhitelist = flag.ParseErrorsWhitelist(c.FParseErrWhitelist)
|
c.Flags().ParseErrorsWhitelist = flag.ParseErrorsWhitelist(c.FParseErrWhitelist)
|
||||||
|
|
||||||
err := c.Flags().Parse(args)
|
err := c.Flags().Parse(args)
|
||||||
|
|
|
||||||
|
|
@ -13,7 +13,7 @@ go_library(
|
||||||
importpath = "github.com/spf13/cobra/doc",
|
importpath = "github.com/spf13/cobra/doc",
|
||||||
visibility = ["//visibility:public"],
|
visibility = ["//visibility:public"],
|
||||||
deps = [
|
deps = [
|
||||||
"//vendor/github.com/cpuguy83/go-md2man/md2man:go_default_library",
|
"//vendor/github.com/cpuguy83/go-md2man/v2/md2man:go_default_library",
|
||||||
"//vendor/github.com/spf13/cobra:go_default_library",
|
"//vendor/github.com/spf13/cobra:go_default_library",
|
||||||
"//vendor/github.com/spf13/pflag:go_default_library",
|
"//vendor/github.com/spf13/pflag:go_default_library",
|
||||||
"//vendor/gopkg.in/yaml.v2:go_default_library",
|
"//vendor/gopkg.in/yaml.v2:go_default_library",
|
||||||
|
|
|
||||||
|
|
@ -24,7 +24,7 @@ import (
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/cpuguy83/go-md2man/md2man"
|
"github.com/cpuguy83/go-md2man/v2/md2man"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
"github.com/spf13/pflag"
|
"github.com/spf13/pflag"
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
# Generating Markdown Docs For Your Own cobra.Command
|
# Generating Markdown Docs For Your Own cobra.Command
|
||||||
|
|
||||||
Generating man pages from a cobra command is incredibly easy. An example is as follows:
|
Generating Markdown pages from a cobra command is incredibly easy. An example is as follows:
|
||||||
|
|
||||||
```go
|
```go
|
||||||
package main
|
package main
|
||||||
|
|
|
||||||
|
|
@ -3,11 +3,10 @@ module github.com/spf13/cobra
|
||||||
go 1.12
|
go 1.12
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/BurntSushi/toml v0.3.1 // indirect
|
github.com/cpuguy83/go-md2man/v2 v2.0.0
|
||||||
github.com/cpuguy83/go-md2man v1.0.10
|
|
||||||
github.com/inconshreveable/mousetrap v1.0.0
|
github.com/inconshreveable/mousetrap v1.0.0
|
||||||
github.com/mitchellh/go-homedir v1.1.0
|
github.com/mitchellh/go-homedir v1.1.0
|
||||||
github.com/spf13/pflag v1.0.3
|
github.com/spf13/pflag v1.0.3
|
||||||
github.com/spf13/viper v1.3.2
|
github.com/spf13/viper v1.4.0
|
||||||
gopkg.in/yaml.v2 v2.2.2
|
gopkg.in/yaml.v2 v2.2.2
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -1,31 +1,91 @@
|
||||||
|
cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
|
||||||
github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ=
|
github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ=
|
||||||
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||||
|
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
|
||||||
|
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
|
||||||
|
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
|
||||||
github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8=
|
github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8=
|
||||||
|
github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
|
||||||
|
github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8=
|
||||||
|
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
|
||||||
|
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
|
||||||
|
github.com/coreos/bbolt v1.3.2/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkEiiKk=
|
||||||
github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE=
|
github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE=
|
||||||
github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk=
|
|
||||||
github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=
|
github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=
|
||||||
github.com/cpuguy83/go-md2man v1.0.10 h1:BSKMNlYxDvnunlTymqtgONjNnaRV1sTpcovwwjF22jk=
|
github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=
|
||||||
github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE=
|
github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA=
|
||||||
|
github.com/cpuguy83/go-md2man/v2 v2.0.0 h1:EoUDS0afbrsXAZ9YQ9jdu/mZ2sXgT1/2yyNng4PGlyM=
|
||||||
|
github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
|
||||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
|
github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ=
|
||||||
|
github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no=
|
||||||
github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I=
|
github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I=
|
||||||
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
|
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
|
||||||
|
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
|
||||||
|
github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
|
||||||
|
github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
|
||||||
|
github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk=
|
||||||
|
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
|
||||||
|
github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
|
||||||
|
github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4=
|
||||||
|
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
|
||||||
|
github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||||
|
github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
|
||||||
|
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||||
|
github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||||
|
github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
|
||||||
|
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
|
||||||
|
github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ=
|
||||||
|
github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs=
|
||||||
|
github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk=
|
||||||
|
github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY=
|
||||||
github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4=
|
github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4=
|
||||||
github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ=
|
github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ=
|
||||||
github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM=
|
github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM=
|
||||||
github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
|
github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
|
||||||
|
github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo=
|
||||||
|
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
|
||||||
|
github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q=
|
||||||
|
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
|
||||||
|
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||||
|
github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=
|
||||||
|
github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
|
||||||
|
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
||||||
|
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||||
|
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
|
||||||
|
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||||
github.com/magiconair/properties v1.8.0 h1:LLgXmsheXeRoUOBOjtwPQCWIYqM/LU1ayDtDePerRcY=
|
github.com/magiconair/properties v1.8.0 h1:LLgXmsheXeRoUOBOjtwPQCWIYqM/LU1ayDtDePerRcY=
|
||||||
github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ=
|
github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ=
|
||||||
|
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
|
||||||
github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y=
|
github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y=
|
||||||
github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
|
github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
|
||||||
github.com/mitchellh/mapstructure v1.1.2 h1:fmNYVwqnSfB9mZU6OS2O6GsXM+wcskZDuKQzvN1EDeE=
|
github.com/mitchellh/mapstructure v1.1.2 h1:fmNYVwqnSfB9mZU6OS2O6GsXM+wcskZDuKQzvN1EDeE=
|
||||||
github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
|
github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
|
||||||
|
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
|
||||||
|
github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U=
|
||||||
github.com/pelletier/go-toml v1.2.0 h1:T5zMGML61Wp+FlcbWjRDT7yAxhJNAiPPLOFECq181zc=
|
github.com/pelletier/go-toml v1.2.0 h1:T5zMGML61Wp+FlcbWjRDT7yAxhJNAiPPLOFECq181zc=
|
||||||
github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic=
|
github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic=
|
||||||
|
github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
github.com/russross/blackfriday v1.5.2 h1:HyvC0ARfnZBqnXwABFeSZHpKvJHJJfPz81GNueLj0oo=
|
github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
|
||||||
github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g=
|
github.com/prometheus/client_golang v0.9.3/go.mod h1:/TN21ttK/J9q6uSwhBd54HahCDft0ttaMvbicHlPoso=
|
||||||
|
github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
|
||||||
|
github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
||||||
|
github.com/prometheus/common v0.0.0-20181113130724-41aa239b4cce/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro=
|
||||||
|
github.com/prometheus/common v0.4.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4=
|
||||||
|
github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
|
||||||
|
github.com/prometheus/procfs v0.0.0-20190507164030-5867b95ac084/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=
|
||||||
|
github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU=
|
||||||
|
github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg=
|
||||||
|
github.com/russross/blackfriday/v2 v2.0.1 h1:lPqVAte+HuHNfhJ/0LC98ESWRz8afy9tM/0RK8m9o+Q=
|
||||||
|
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||||
|
github.com/shurcooL/sanitized_anchor_name v1.0.0 h1:PdmoCO6wvbs+7yrJyMORt4/BmY5IYyJwS/kOiWx8mHo=
|
||||||
|
github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
|
||||||
|
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
|
||||||
|
github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM=
|
||||||
|
github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA=
|
||||||
github.com/spf13/afero v1.1.2 h1:m8/z1t7/fwjysjQRYbP0RD+bUIF/8tJwPdEZsI83ACI=
|
github.com/spf13/afero v1.1.2 h1:m8/z1t7/fwjysjQRYbP0RD+bUIF/8tJwPdEZsI83ACI=
|
||||||
github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ=
|
github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ=
|
||||||
github.com/spf13/cast v1.3.0 h1:oget//CVOEoFewqQxwr0Ej5yjygnqGkvggSE/gB35Q8=
|
github.com/spf13/cast v1.3.0 h1:oget//CVOEoFewqQxwr0Ej5yjygnqGkvggSE/gB35Q8=
|
||||||
|
|
@ -34,18 +94,56 @@ github.com/spf13/jwalterweatherman v1.0.0 h1:XHEdyB+EcvlqZamSM4ZOMGlc93t6AcsBEu9
|
||||||
github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo=
|
github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo=
|
||||||
github.com/spf13/pflag v1.0.3 h1:zPAT6CGy6wXeQ7NtTnaTerfKOsV6V6F8agHXFiazDkg=
|
github.com/spf13/pflag v1.0.3 h1:zPAT6CGy6wXeQ7NtTnaTerfKOsV6V6F8agHXFiazDkg=
|
||||||
github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
|
github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
|
||||||
github.com/spf13/viper v1.3.2 h1:VUFqw5KcqRf7i70GOzW7N+Q7+gxVBkSSqiXB12+JQ4M=
|
github.com/spf13/viper v1.4.0 h1:yXHLWeravcrgGyFSyCgdYpXQ9dR9c/WED3pg1RhxqEU=
|
||||||
github.com/spf13/viper v1.3.2/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s=
|
github.com/spf13/viper v1.4.0/go.mod h1:PTJ7Z/lr49W6bUbkmS1V3by4uWynFiR9p7+dSq/yZzE=
|
||||||
|
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
github.com/stretchr/testify v1.2.2 h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1w=
|
github.com/stretchr/testify v1.2.2 h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1w=
|
||||||
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||||
github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0=
|
github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U=
|
||||||
|
github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc=
|
||||||
|
github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU=
|
||||||
github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q=
|
github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q=
|
||||||
golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
go.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU=
|
||||||
golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a h1:1n5lsVfiQW3yfsRGu98756EH1YthsFqr/5mxHduZW2A=
|
go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
|
||||||
golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0=
|
||||||
|
go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q=
|
||||||
|
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||||
|
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||||
|
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
|
||||||
|
golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
|
||||||
|
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
|
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
|
golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
|
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||||
|
golang.org/x/net v0.0.0-20190522155817-f3200d17e092/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
|
||||||
|
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||||
|
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
|
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
|
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
|
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a h1:1BGLXjeY4akVXGgbC9HugT3Jv3hCI0z56oJR5vAMgBU=
|
||||||
|
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg=
|
golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg=
|
||||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||||
|
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||||
|
golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||||
|
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
|
||||||
|
google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
|
||||||
|
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
|
||||||
|
google.golang.org/grpc v1.21.0/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
|
||||||
|
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
|
||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
|
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY=
|
||||||
|
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
|
gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo=
|
||||||
|
gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74=
|
||||||
|
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw=
|
gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw=
|
||||||
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
|
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||||
|
|
|
||||||
|
|
@ -1,20 +0,0 @@
|
||||||
sudo: false
|
|
||||||
language: go
|
|
||||||
go:
|
|
||||||
- 1.6.x
|
|
||||||
- 1.7.x
|
|
||||||
- 1.8.x
|
|
||||||
- 1.9.x
|
|
||||||
- 1.10.x
|
|
||||||
- 1.11.x
|
|
||||||
- 1.12.x
|
|
||||||
- 1.13.x
|
|
||||||
|
|
||||||
install: skip
|
|
||||||
script:
|
|
||||||
- go get golang.org/x/tools/cmd/cover
|
|
||||||
- go get github.com/smartystreets/goconvey
|
|
||||||
- mkdir -p $HOME/gopath/src/gopkg.in
|
|
||||||
- ln -s $HOME/gopath/src/github.com/go-ini/ini $HOME/gopath/src/gopkg.in/ini.v1
|
|
||||||
- cd $HOME/gopath/src/gopkg.in/ini.v1
|
|
||||||
- go test -v -cover -race
|
|
||||||
|
|
@ -6,7 +6,7 @@ test:
|
||||||
go test -v -cover -race
|
go test -v -cover -race
|
||||||
|
|
||||||
bench:
|
bench:
|
||||||
go test -v -cover -race -test.bench=. -test.benchmem
|
go test -v -cover -test.bench=. -test.benchmem
|
||||||
|
|
||||||
vet:
|
vet:
|
||||||
go vet
|
go vet
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,9 @@
|
||||||
# INI
|
# INI
|
||||||
|
|
||||||
[](https://travis-ci.org/go-ini/ini) [](https://sourcegraph.com/github.com/go-ini/ini)
|
[](https://github.com/go-ini/ini/actions?query=workflow%3AGo)
|
||||||
|
[](https://codecov.io/gh/go-ini/ini)
|
||||||
|
[](https://pkg.go.dev/github.com/go-ini/ini?tab=doc)
|
||||||
|
[](https://sourcegraph.com/github.com/go-ini/ini)
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
|
|
@ -8,7 +11,7 @@ Package ini provides INI file read and write functionality in Go.
|
||||||
|
|
||||||
## Features
|
## Features
|
||||||
|
|
||||||
- Load from multiple data sources(`[]byte`, file and `io.ReadCloser`) with overwrites.
|
- Load from multiple data sources(file, `[]byte`, `io.Reader` and `io.ReadCloser`) with overwrites.
|
||||||
- Read with recursion values.
|
- Read with recursion values.
|
||||||
- Read with parent-child sections.
|
- Read with parent-child sections.
|
||||||
- Read with auto-increment key names.
|
- Read with auto-increment key names.
|
||||||
|
|
@ -33,6 +36,7 @@ Please add `-u` flag to update in the future.
|
||||||
|
|
||||||
- [Getting Started](https://ini.unknwon.io/docs/intro/getting_started)
|
- [Getting Started](https://ini.unknwon.io/docs/intro/getting_started)
|
||||||
- [API Documentation](https://gowalker.org/gopkg.in/ini.v1)
|
- [API Documentation](https://gowalker.org/gopkg.in/ini.v1)
|
||||||
|
- 中国大陆镜像:https://ini.unknwon.cn
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,9 @@
|
||||||
|
coverage:
|
||||||
|
range: "60...95"
|
||||||
|
status:
|
||||||
|
project:
|
||||||
|
default:
|
||||||
|
threshold: 1%
|
||||||
|
|
||||||
|
comment:
|
||||||
|
layout: 'diff, files'
|
||||||
|
|
@ -68,6 +68,8 @@ func parseDataSource(source interface{}) (dataSource, error) {
|
||||||
return &sourceData{s}, nil
|
return &sourceData{s}, nil
|
||||||
case io.ReadCloser:
|
case io.ReadCloser:
|
||||||
return &sourceReadCloser{s}, nil
|
return &sourceReadCloser{s}, nil
|
||||||
|
case io.Reader:
|
||||||
|
return &sourceReadCloser{ioutil.NopCloser(s)}, nil
|
||||||
default:
|
default:
|
||||||
return nil, fmt.Errorf("error parsing data source: unknown type %q", s)
|
return nil, fmt.Errorf("error parsing data source: unknown type %q", s)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -25,7 +25,7 @@ import (
|
||||||
"sync"
|
"sync"
|
||||||
)
|
)
|
||||||
|
|
||||||
// File represents a combination of a or more INI file(s) in memory.
|
// File represents a combination of one or more INI files in memory.
|
||||||
type File struct {
|
type File struct {
|
||||||
options LoadOptions
|
options LoadOptions
|
||||||
dataSources []dataSource
|
dataSources []dataSource
|
||||||
|
|
@ -36,8 +36,12 @@ type File struct {
|
||||||
|
|
||||||
// To keep data in order.
|
// To keep data in order.
|
||||||
sectionList []string
|
sectionList []string
|
||||||
|
// To keep track of the index of a section with same name.
|
||||||
|
// This meta list is only used with non-unique section names are allowed.
|
||||||
|
sectionIndexes []int
|
||||||
|
|
||||||
// Actual data is stored here.
|
// Actual data is stored here.
|
||||||
sections map[string]*Section
|
sections map[string][]*Section
|
||||||
|
|
||||||
NameMapper
|
NameMapper
|
||||||
ValueMapper
|
ValueMapper
|
||||||
|
|
@ -48,27 +52,37 @@ func newFile(dataSources []dataSource, opts LoadOptions) *File {
|
||||||
if len(opts.KeyValueDelimiters) == 0 {
|
if len(opts.KeyValueDelimiters) == 0 {
|
||||||
opts.KeyValueDelimiters = "=:"
|
opts.KeyValueDelimiters = "=:"
|
||||||
}
|
}
|
||||||
|
if len(opts.KeyValueDelimiterOnWrite) == 0 {
|
||||||
|
opts.KeyValueDelimiterOnWrite = "="
|
||||||
|
}
|
||||||
|
|
||||||
return &File{
|
return &File{
|
||||||
BlockMode: true,
|
BlockMode: true,
|
||||||
dataSources: dataSources,
|
dataSources: dataSources,
|
||||||
sections: make(map[string]*Section),
|
sections: make(map[string][]*Section),
|
||||||
sectionList: make([]string, 0, 10),
|
|
||||||
options: opts,
|
options: opts,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Empty returns an empty file object.
|
// Empty returns an empty file object.
|
||||||
func Empty() *File {
|
func Empty(opts ...LoadOptions) *File {
|
||||||
// Ignore error here, we sure our data is good.
|
var opt LoadOptions
|
||||||
f, _ := Load([]byte(""))
|
if len(opts) > 0 {
|
||||||
|
opt = opts[0]
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ignore error here, we are sure our data is good.
|
||||||
|
f, _ := LoadSources(opt, []byte(""))
|
||||||
return f
|
return f
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewSection creates a new section.
|
// NewSection creates a new section.
|
||||||
func (f *File) NewSection(name string) (*Section, error) {
|
func (f *File) NewSection(name string) (*Section, error) {
|
||||||
if len(name) == 0 {
|
if len(name) == 0 {
|
||||||
return nil, errors.New("error creating new section: empty section name")
|
return nil, errors.New("empty section name")
|
||||||
} else if f.options.Insensitive && name != DefaultSection {
|
}
|
||||||
|
|
||||||
|
if f.options.Insensitive && name != DefaultSection {
|
||||||
name = strings.ToLower(name)
|
name = strings.ToLower(name)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -77,13 +91,20 @@ func (f *File) NewSection(name string) (*Section, error) {
|
||||||
defer f.lock.Unlock()
|
defer f.lock.Unlock()
|
||||||
}
|
}
|
||||||
|
|
||||||
if inSlice(name, f.sectionList) {
|
if !f.options.AllowNonUniqueSections && inSlice(name, f.sectionList) {
|
||||||
return f.sections[name], nil
|
return f.sections[name][0], nil
|
||||||
}
|
}
|
||||||
|
|
||||||
f.sectionList = append(f.sectionList, name)
|
f.sectionList = append(f.sectionList, name)
|
||||||
f.sections[name] = newSection(f, name)
|
|
||||||
return f.sections[name], nil
|
// NOTE: Append to indexes must happen before appending to sections,
|
||||||
|
// otherwise index will have off-by-one problem.
|
||||||
|
f.sectionIndexes = append(f.sectionIndexes, len(f.sections[name]))
|
||||||
|
|
||||||
|
sec := newSection(f, name)
|
||||||
|
f.sections[name] = append(f.sections[name], sec)
|
||||||
|
|
||||||
|
return sec, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewRawSection creates a new section with an unparseable body.
|
// NewRawSection creates a new section with an unparseable body.
|
||||||
|
|
@ -110,6 +131,16 @@ func (f *File) NewSections(names ...string) (err error) {
|
||||||
|
|
||||||
// GetSection returns section by given name.
|
// GetSection returns section by given name.
|
||||||
func (f *File) GetSection(name string) (*Section, error) {
|
func (f *File) GetSection(name string) (*Section, error) {
|
||||||
|
secs, err := f.SectionsByName(name)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return secs[0], err
|
||||||
|
}
|
||||||
|
|
||||||
|
// SectionsByName returns all sections with given name.
|
||||||
|
func (f *File) SectionsByName(name string) ([]*Section, error) {
|
||||||
if len(name) == 0 {
|
if len(name) == 0 {
|
||||||
name = DefaultSection
|
name = DefaultSection
|
||||||
}
|
}
|
||||||
|
|
@ -122,11 +153,12 @@ func (f *File) GetSection(name string) (*Section, error) {
|
||||||
defer f.lock.RUnlock()
|
defer f.lock.RUnlock()
|
||||||
}
|
}
|
||||||
|
|
||||||
sec := f.sections[name]
|
secs := f.sections[name]
|
||||||
if sec == nil {
|
if len(secs) == 0 {
|
||||||
return nil, fmt.Errorf("section '%s' does not exist", name)
|
return nil, fmt.Errorf("section %q does not exist", name)
|
||||||
}
|
}
|
||||||
return sec, nil
|
|
||||||
|
return secs, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Section assumes named section exists and returns a zero-value when not.
|
// Section assumes named section exists and returns a zero-value when not.
|
||||||
|
|
@ -141,6 +173,19 @@ func (f *File) Section(name string) *Section {
|
||||||
return sec
|
return sec
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SectionWithIndex assumes named section exists and returns a new section when not.
|
||||||
|
func (f *File) SectionWithIndex(name string, index int) *Section {
|
||||||
|
secs, err := f.SectionsByName(name)
|
||||||
|
if err != nil || len(secs) <= index {
|
||||||
|
// NOTE: It's OK here because the only possible error is empty section name,
|
||||||
|
// but if it's empty, this piece of code won't be executed.
|
||||||
|
newSec, _ := f.NewSection(name)
|
||||||
|
return newSec
|
||||||
|
}
|
||||||
|
|
||||||
|
return secs[index]
|
||||||
|
}
|
||||||
|
|
||||||
// Sections returns a list of Section stored in the current instance.
|
// Sections returns a list of Section stored in the current instance.
|
||||||
func (f *File) Sections() []*Section {
|
func (f *File) Sections() []*Section {
|
||||||
if f.BlockMode {
|
if f.BlockMode {
|
||||||
|
|
@ -150,7 +195,7 @@ func (f *File) Sections() []*Section {
|
||||||
|
|
||||||
sections := make([]*Section, len(f.sectionList))
|
sections := make([]*Section, len(f.sectionList))
|
||||||
for i, name := range f.sectionList {
|
for i, name := range f.sectionList {
|
||||||
sections[i] = f.sections[name]
|
sections[i] = f.sections[name][f.sectionIndexes[i]]
|
||||||
}
|
}
|
||||||
return sections
|
return sections
|
||||||
}
|
}
|
||||||
|
|
@ -167,24 +212,70 @@ func (f *File) SectionStrings() []string {
|
||||||
return list
|
return list
|
||||||
}
|
}
|
||||||
|
|
||||||
// DeleteSection deletes a section.
|
// DeleteSection deletes a section or all sections with given name.
|
||||||
func (f *File) DeleteSection(name string) {
|
func (f *File) DeleteSection(name string) {
|
||||||
if f.BlockMode {
|
secs, err := f.SectionsByName(name)
|
||||||
f.lock.Lock()
|
if err != nil {
|
||||||
defer f.lock.Unlock()
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
for i := 0; i < len(secs); i++ {
|
||||||
|
// For non-unique sections, it is always needed to remove the first one so
|
||||||
|
// in the next iteration, the subsequent section continue having index 0.
|
||||||
|
// Ignoring the error as index 0 never returns an error.
|
||||||
|
_ = f.DeleteSectionWithIndex(name, 0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeleteSectionWithIndex deletes a section with given name and index.
|
||||||
|
func (f *File) DeleteSectionWithIndex(name string, index int) error {
|
||||||
|
if !f.options.AllowNonUniqueSections && index != 0 {
|
||||||
|
return fmt.Errorf("delete section with non-zero index is only allowed when non-unique sections is enabled")
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(name) == 0 {
|
if len(name) == 0 {
|
||||||
name = DefaultSection
|
name = DefaultSection
|
||||||
}
|
}
|
||||||
|
if f.options.Insensitive {
|
||||||
for i, s := range f.sectionList {
|
name = strings.ToLower(name)
|
||||||
if s == name {
|
|
||||||
f.sectionList = append(f.sectionList[:i], f.sectionList[i+1:]...)
|
|
||||||
delete(f.sections, name)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if f.BlockMode {
|
||||||
|
f.lock.Lock()
|
||||||
|
defer f.lock.Unlock()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Count occurrences of the sections
|
||||||
|
occurrences := 0
|
||||||
|
|
||||||
|
sectionListCopy := make([]string, len(f.sectionList))
|
||||||
|
copy(sectionListCopy, f.sectionList)
|
||||||
|
|
||||||
|
for i, s := range sectionListCopy {
|
||||||
|
if s != name {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if occurrences == index {
|
||||||
|
if len(f.sections[name]) <= 1 {
|
||||||
|
delete(f.sections, name) // The last one in the map
|
||||||
|
} else {
|
||||||
|
f.sections[name] = append(f.sections[name][:index], f.sections[name][index+1:]...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fix section lists
|
||||||
|
f.sectionList = append(f.sectionList[:i], f.sectionList[i+1:]...)
|
||||||
|
f.sectionIndexes = append(f.sectionIndexes[:i], f.sectionIndexes[i+1:]...)
|
||||||
|
|
||||||
|
} else if occurrences > index {
|
||||||
|
// Fix the indices of all following sections with this name.
|
||||||
|
f.sectionIndexes[i-1]--
|
||||||
|
}
|
||||||
|
|
||||||
|
occurrences++
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (f *File) reload(s dataSource) error {
|
func (f *File) reload(s dataSource) error {
|
||||||
|
|
@ -203,7 +294,7 @@ func (f *File) Reload() (err error) {
|
||||||
if err = f.reload(s); err != nil {
|
if err = f.reload(s); err != nil {
|
||||||
// In loose mode, we create an empty default section for nonexistent files.
|
// In loose mode, we create an empty default section for nonexistent files.
|
||||||
if os.IsNotExist(err) && f.options.Loose {
|
if os.IsNotExist(err) && f.options.Loose {
|
||||||
f.parse(bytes.NewBuffer(nil))
|
_ = f.parse(bytes.NewBuffer(nil))
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
return err
|
return err
|
||||||
|
|
@ -230,16 +321,16 @@ func (f *File) Append(source interface{}, others ...interface{}) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (f *File) writeToBuffer(indent string) (*bytes.Buffer, error) {
|
func (f *File) writeToBuffer(indent string) (*bytes.Buffer, error) {
|
||||||
equalSign := DefaultFormatLeft + "=" + DefaultFormatRight
|
equalSign := DefaultFormatLeft + f.options.KeyValueDelimiterOnWrite + DefaultFormatRight
|
||||||
|
|
||||||
if PrettyFormat || PrettyEqual {
|
if PrettyFormat || PrettyEqual {
|
||||||
equalSign = " = "
|
equalSign = fmt.Sprintf(" %s ", f.options.KeyValueDelimiterOnWrite)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Use buffer to make sure target is safe until finish encoding.
|
// Use buffer to make sure target is safe until finish encoding.
|
||||||
buf := bytes.NewBuffer(nil)
|
buf := bytes.NewBuffer(nil)
|
||||||
for i, sname := range f.sectionList {
|
for i, sname := range f.sectionList {
|
||||||
sec := f.Section(sname)
|
sec := f.SectionWithIndex(sname, f.sectionIndexes[i])
|
||||||
if len(sec.Comment) > 0 {
|
if len(sec.Comment) > 0 {
|
||||||
// Support multiline comments
|
// Support multiline comments
|
||||||
lines := strings.Split(sec.Comment, LineBreak)
|
lines := strings.Split(sec.Comment, LineBreak)
|
||||||
|
|
@ -282,7 +373,7 @@ func (f *File) writeToBuffer(indent string) (*bytes.Buffer, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Count and generate alignment length and buffer spaces using the
|
// Count and generate alignment length and buffer spaces using the
|
||||||
// longest key. Keys may be modifed if they contain certain characters so
|
// longest key. Keys may be modified if they contain certain characters so
|
||||||
// we need to take that into account in our calculation.
|
// we need to take that into account in our calculation.
|
||||||
alignLength := 0
|
alignLength := 0
|
||||||
if PrettyFormat {
|
if PrettyFormat {
|
||||||
|
|
|
||||||
|
|
@ -18,8 +18,10 @@
|
||||||
package ini
|
package ini
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"os"
|
||||||
"regexp"
|
"regexp"
|
||||||
"runtime"
|
"runtime"
|
||||||
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
|
|
@ -29,14 +31,8 @@ const (
|
||||||
|
|
||||||
// Maximum allowed depth when recursively substituing variable names.
|
// Maximum allowed depth when recursively substituing variable names.
|
||||||
depthValues = 99
|
depthValues = 99
|
||||||
version = "1.51.0"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// Version returns current package version literal.
|
|
||||||
func Version() string {
|
|
||||||
return version
|
|
||||||
}
|
|
||||||
|
|
||||||
var (
|
var (
|
||||||
// LineBreak is the delimiter to determine or compose a new line.
|
// LineBreak is the delimiter to determine or compose a new line.
|
||||||
// This variable will be changed to "\r\n" automatically on Windows at package init time.
|
// This variable will be changed to "\r\n" automatically on Windows at package init time.
|
||||||
|
|
@ -61,8 +57,10 @@ var (
|
||||||
DefaultFormatRight = ""
|
DefaultFormatRight = ""
|
||||||
)
|
)
|
||||||
|
|
||||||
|
var inTest = len(os.Args) > 0 && strings.HasSuffix(strings.TrimSuffix(os.Args[0], ".exe"), ".test")
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
if runtime.GOOS == "windows" {
|
if runtime.GOOS == "windows" && !inTest {
|
||||||
LineBreak = "\r\n"
|
LineBreak = "\r\n"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -109,12 +107,16 @@ type LoadOptions struct {
|
||||||
UnparseableSections []string
|
UnparseableSections []string
|
||||||
// KeyValueDelimiters is the sequence of delimiters that are used to separate key and value. By default, it is "=:".
|
// KeyValueDelimiters is the sequence of delimiters that are used to separate key and value. By default, it is "=:".
|
||||||
KeyValueDelimiters string
|
KeyValueDelimiters string
|
||||||
|
// KeyValueDelimiters is the delimiter that are used to separate key and value output. By default, it is "=".
|
||||||
|
KeyValueDelimiterOnWrite string
|
||||||
// PreserveSurroundedQuote indicates whether to preserve surrounded quote (single and double quotes).
|
// PreserveSurroundedQuote indicates whether to preserve surrounded quote (single and double quotes).
|
||||||
PreserveSurroundedQuote bool
|
PreserveSurroundedQuote bool
|
||||||
// DebugFunc is called to collect debug information (currently only useful to debug parsing Python-style multiline values).
|
// DebugFunc is called to collect debug information (currently only useful to debug parsing Python-style multiline values).
|
||||||
DebugFunc DebugFunc
|
DebugFunc DebugFunc
|
||||||
// ReaderBufferSize is the buffer size of the reader in bytes.
|
// ReaderBufferSize is the buffer size of the reader in bytes.
|
||||||
ReaderBufferSize int
|
ReaderBufferSize int
|
||||||
|
// AllowNonUniqueSections indicates whether to allow sections with the same name multiple times.
|
||||||
|
AllowNonUniqueSections bool
|
||||||
}
|
}
|
||||||
|
|
||||||
// DebugFunc is the type of function called to log parse events.
|
// DebugFunc is the type of function called to log parse events.
|
||||||
|
|
|
||||||
|
|
@ -686,99 +686,127 @@ func (k *Key) StrictTimes(delim string) ([]time.Time, error) {
|
||||||
// parseBools transforms strings to bools.
|
// parseBools transforms strings to bools.
|
||||||
func (k *Key) parseBools(strs []string, addInvalid, returnOnInvalid bool) ([]bool, error) {
|
func (k *Key) parseBools(strs []string, addInvalid, returnOnInvalid bool) ([]bool, error) {
|
||||||
vals := make([]bool, 0, len(strs))
|
vals := make([]bool, 0, len(strs))
|
||||||
for _, str := range strs {
|
parser := func(str string) (interface{}, error) {
|
||||||
val, err := parseBool(str)
|
val, err := parseBool(str)
|
||||||
if err != nil && returnOnInvalid {
|
return val, err
|
||||||
return nil, err
|
}
|
||||||
}
|
rawVals, err := k.doParse(strs, addInvalid, returnOnInvalid, parser)
|
||||||
if err == nil || addInvalid {
|
if err == nil {
|
||||||
vals = append(vals, val)
|
for _, val := range rawVals {
|
||||||
|
vals = append(vals, val.(bool))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return vals, nil
|
return vals, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// parseFloat64s transforms strings to float64s.
|
// parseFloat64s transforms strings to float64s.
|
||||||
func (k *Key) parseFloat64s(strs []string, addInvalid, returnOnInvalid bool) ([]float64, error) {
|
func (k *Key) parseFloat64s(strs []string, addInvalid, returnOnInvalid bool) ([]float64, error) {
|
||||||
vals := make([]float64, 0, len(strs))
|
vals := make([]float64, 0, len(strs))
|
||||||
for _, str := range strs {
|
parser := func(str string) (interface{}, error) {
|
||||||
val, err := strconv.ParseFloat(str, 64)
|
val, err := strconv.ParseFloat(str, 64)
|
||||||
if err != nil && returnOnInvalid {
|
return val, err
|
||||||
return nil, err
|
}
|
||||||
}
|
rawVals, err := k.doParse(strs, addInvalid, returnOnInvalid, parser)
|
||||||
if err == nil || addInvalid {
|
if err == nil {
|
||||||
vals = append(vals, val)
|
for _, val := range rawVals {
|
||||||
|
vals = append(vals, val.(float64))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return vals, nil
|
return vals, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// parseInts transforms strings to ints.
|
// parseInts transforms strings to ints.
|
||||||
func (k *Key) parseInts(strs []string, addInvalid, returnOnInvalid bool) ([]int, error) {
|
func (k *Key) parseInts(strs []string, addInvalid, returnOnInvalid bool) ([]int, error) {
|
||||||
vals := make([]int, 0, len(strs))
|
vals := make([]int, 0, len(strs))
|
||||||
for _, str := range strs {
|
parser := func(str string) (interface{}, error) {
|
||||||
valInt64, err := strconv.ParseInt(str, 0, 64)
|
val, err := strconv.ParseInt(str, 0, 64)
|
||||||
val := int(valInt64)
|
return val, err
|
||||||
if err != nil && returnOnInvalid {
|
}
|
||||||
return nil, err
|
rawVals, err := k.doParse(strs, addInvalid, returnOnInvalid, parser)
|
||||||
}
|
if err == nil {
|
||||||
if err == nil || addInvalid {
|
for _, val := range rawVals {
|
||||||
vals = append(vals, val)
|
vals = append(vals, int(val.(int64)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return vals, nil
|
return vals, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// parseInt64s transforms strings to int64s.
|
// parseInt64s transforms strings to int64s.
|
||||||
func (k *Key) parseInt64s(strs []string, addInvalid, returnOnInvalid bool) ([]int64, error) {
|
func (k *Key) parseInt64s(strs []string, addInvalid, returnOnInvalid bool) ([]int64, error) {
|
||||||
vals := make([]int64, 0, len(strs))
|
vals := make([]int64, 0, len(strs))
|
||||||
for _, str := range strs {
|
parser := func(str string) (interface{}, error) {
|
||||||
val, err := strconv.ParseInt(str, 0, 64)
|
val, err := strconv.ParseInt(str, 0, 64)
|
||||||
if err != nil && returnOnInvalid {
|
return val, err
|
||||||
return nil, err
|
}
|
||||||
}
|
|
||||||
if err == nil || addInvalid {
|
rawVals, err := k.doParse(strs, addInvalid, returnOnInvalid, parser)
|
||||||
vals = append(vals, val)
|
if err == nil {
|
||||||
|
for _, val := range rawVals {
|
||||||
|
vals = append(vals, val.(int64))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return vals, nil
|
return vals, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// parseUints transforms strings to uints.
|
// parseUints transforms strings to uints.
|
||||||
func (k *Key) parseUints(strs []string, addInvalid, returnOnInvalid bool) ([]uint, error) {
|
func (k *Key) parseUints(strs []string, addInvalid, returnOnInvalid bool) ([]uint, error) {
|
||||||
vals := make([]uint, 0, len(strs))
|
vals := make([]uint, 0, len(strs))
|
||||||
for _, str := range strs {
|
parser := func(str string) (interface{}, error) {
|
||||||
val, err := strconv.ParseUint(str, 0, 0)
|
val, err := strconv.ParseUint(str, 0, 64)
|
||||||
if err != nil && returnOnInvalid {
|
return val, err
|
||||||
return nil, err
|
}
|
||||||
}
|
|
||||||
if err == nil || addInvalid {
|
rawVals, err := k.doParse(strs, addInvalid, returnOnInvalid, parser)
|
||||||
vals = append(vals, uint(val))
|
if err == nil {
|
||||||
|
for _, val := range rawVals {
|
||||||
|
vals = append(vals, uint(val.(uint64)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return vals, nil
|
return vals, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// parseUint64s transforms strings to uint64s.
|
// parseUint64s transforms strings to uint64s.
|
||||||
func (k *Key) parseUint64s(strs []string, addInvalid, returnOnInvalid bool) ([]uint64, error) {
|
func (k *Key) parseUint64s(strs []string, addInvalid, returnOnInvalid bool) ([]uint64, error) {
|
||||||
vals := make([]uint64, 0, len(strs))
|
vals := make([]uint64, 0, len(strs))
|
||||||
for _, str := range strs {
|
parser := func(str string) (interface{}, error) {
|
||||||
val, err := strconv.ParseUint(str, 0, 64)
|
val, err := strconv.ParseUint(str, 0, 64)
|
||||||
if err != nil && returnOnInvalid {
|
return val, err
|
||||||
return nil, err
|
}
|
||||||
}
|
rawVals, err := k.doParse(strs, addInvalid, returnOnInvalid, parser)
|
||||||
if err == nil || addInvalid {
|
if err == nil {
|
||||||
vals = append(vals, val)
|
for _, val := range rawVals {
|
||||||
|
vals = append(vals, val.(uint64))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return vals, nil
|
return vals, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
type Parser func(str string) (interface{}, error)
|
||||||
|
|
||||||
|
|
||||||
// parseTimesFormat transforms strings to times in given format.
|
// parseTimesFormat transforms strings to times in given format.
|
||||||
func (k *Key) parseTimesFormat(format string, strs []string, addInvalid, returnOnInvalid bool) ([]time.Time, error) {
|
func (k *Key) parseTimesFormat(format string, strs []string, addInvalid, returnOnInvalid bool) ([]time.Time, error) {
|
||||||
vals := make([]time.Time, 0, len(strs))
|
vals := make([]time.Time, 0, len(strs))
|
||||||
for _, str := range strs {
|
parser := func(str string) (interface{}, error) {
|
||||||
val, err := time.Parse(format, str)
|
val, err := time.Parse(format, str)
|
||||||
|
return val, err
|
||||||
|
}
|
||||||
|
rawVals, err := k.doParse(strs, addInvalid, returnOnInvalid, parser)
|
||||||
|
if err == nil {
|
||||||
|
for _, val := range rawVals {
|
||||||
|
vals = append(vals, val.(time.Time))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return vals, err
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// doParse transforms strings to different types
|
||||||
|
func (k *Key) doParse(strs []string, addInvalid, returnOnInvalid bool, parser Parser) ([]interface{}, error) {
|
||||||
|
vals := make([]interface{}, 0, len(strs))
|
||||||
|
for _, str := range strs {
|
||||||
|
val, err := parser(str)
|
||||||
if err != nil && returnOnInvalid {
|
if err != nil && returnOnInvalid {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -84,7 +84,10 @@ func (p *parser) BOM() error {
|
||||||
case mask[0] == 254 && mask[1] == 255:
|
case mask[0] == 254 && mask[1] == 255:
|
||||||
fallthrough
|
fallthrough
|
||||||
case mask[0] == 255 && mask[1] == 254:
|
case mask[0] == 255 && mask[1] == 254:
|
||||||
p.buf.Read(mask)
|
_, err = p.buf.Read(mask)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
case mask[0] == 239 && mask[1] == 187:
|
case mask[0] == 239 && mask[1] == 187:
|
||||||
mask, err := p.buf.Peek(3)
|
mask, err := p.buf.Peek(3)
|
||||||
if err != nil && err != io.EOF {
|
if err != nil && err != io.EOF {
|
||||||
|
|
@ -93,7 +96,10 @@ func (p *parser) BOM() error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
if mask[2] == 191 {
|
if mask[2] == 191 {
|
||||||
p.buf.Read(mask)
|
_, err = p.buf.Read(mask)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
|
|
@ -135,7 +141,7 @@ func readKeyName(delimiters string, in []byte) (string, int, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get out key name
|
// Get out key name
|
||||||
endIdx := -1
|
var endIdx int
|
||||||
if len(keyQuote) > 0 {
|
if len(keyQuote) > 0 {
|
||||||
startIdx := len(keyQuote)
|
startIdx := len(keyQuote)
|
||||||
// FIXME: fail case -> """"""name"""=value
|
// FIXME: fail case -> """"""name"""=value
|
||||||
|
|
@ -181,7 +187,7 @@ func (p *parser) readMultilines(line, val, valQuote string) (string, error) {
|
||||||
}
|
}
|
||||||
val += next
|
val += next
|
||||||
if p.isEOF {
|
if p.isEOF {
|
||||||
return "", fmt.Errorf("missing closing key quote from '%s' to '%s'", line, next)
|
return "", fmt.Errorf("missing closing key quote from %q to %q", line, next)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return val, nil
|
return val, nil
|
||||||
|
|
@ -413,7 +419,10 @@ func (f *File) parse(reader io.Reader) (err error) {
|
||||||
if f.options.AllowNestedValues &&
|
if f.options.AllowNestedValues &&
|
||||||
isLastValueEmpty && len(line) > 0 {
|
isLastValueEmpty && len(line) > 0 {
|
||||||
if line[0] == ' ' || line[0] == '\t' {
|
if line[0] == ' ' || line[0] == '\t' {
|
||||||
lastRegularKey.addNestedValue(string(bytes.TrimSpace(line)))
|
err = lastRegularKey.addNestedValue(string(bytes.TrimSpace(line)))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -453,14 +462,14 @@ func (f *File) parse(reader io.Reader) (err error) {
|
||||||
|
|
||||||
section.Comment = strings.TrimSpace(p.comment.String())
|
section.Comment = strings.TrimSpace(p.comment.String())
|
||||||
|
|
||||||
// Reset aotu-counter and comments
|
// Reset auto-counter and comments
|
||||||
p.comment.Reset()
|
p.comment.Reset()
|
||||||
p.count = 1
|
p.count = 1
|
||||||
|
|
||||||
inUnparseableSection = false
|
inUnparseableSection = false
|
||||||
for i := range f.options.UnparseableSections {
|
for i := range f.options.UnparseableSections {
|
||||||
if f.options.UnparseableSections[i] == name ||
|
if f.options.UnparseableSections[i] == name ||
|
||||||
(f.options.Insensitive && strings.ToLower(f.options.UnparseableSections[i]) == strings.ToLower(name)) {
|
(f.options.Insensitive && strings.EqualFold(f.options.UnparseableSections[i], name)) {
|
||||||
inUnparseableSection = true
|
inUnparseableSection = true
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -131,7 +131,7 @@ func (s *Section) GetKey(name string) (*Key, error) {
|
||||||
}
|
}
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
return nil, fmt.Errorf("error when getting key of section '%s': key '%s' not exists", s.name, name)
|
return nil, fmt.Errorf("error when getting key of section %q: key %q not exists", s.name, name)
|
||||||
}
|
}
|
||||||
return key, nil
|
return key, nil
|
||||||
}
|
}
|
||||||
|
|
@ -249,7 +249,7 @@ func (s *Section) ChildSections() []*Section {
|
||||||
children := make([]*Section, 0, 3)
|
children := make([]*Section, 0, 3)
|
||||||
for _, name := range s.f.sectionList {
|
for _, name := range s.f.sectionList {
|
||||||
if strings.HasPrefix(name, prefix) {
|
if strings.HasPrefix(name, prefix) {
|
||||||
children = append(children, s.f.sections[name])
|
children = append(children, s.f.sections[name]...)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return children
|
return children
|
||||||
|
|
|
||||||
|
|
@ -183,6 +183,10 @@ func setWithProperType(t reflect.Type, key *Key, field reflect.Value, delim stri
|
||||||
if vt.Name() == "Duration" {
|
if vt.Name() == "Duration" {
|
||||||
durationVal, err := key.Duration()
|
durationVal, err := key.Duration()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
if intVal, err := key.Int64(); err == nil {
|
||||||
|
field.SetInt(intVal)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
return wrapStrictError(err, isStrict)
|
return wrapStrictError(err, isStrict)
|
||||||
}
|
}
|
||||||
if isPtr {
|
if isPtr {
|
||||||
|
|
@ -254,13 +258,13 @@ func setWithProperType(t reflect.Type, key *Key, field reflect.Value, delim stri
|
||||||
case reflect.Slice:
|
case reflect.Slice:
|
||||||
return setSliceWithProperType(key, field, delim, allowShadow, isStrict)
|
return setSliceWithProperType(key, field, delim, allowShadow, isStrict)
|
||||||
default:
|
default:
|
||||||
return fmt.Errorf("unsupported type '%s'", t)
|
return fmt.Errorf("unsupported type %q", t)
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func parseTagOptions(tag string) (rawName string, omitEmpty bool, allowShadow bool) {
|
func parseTagOptions(tag string) (rawName string, omitEmpty bool, allowShadow bool, allowNonUnique bool) {
|
||||||
opts := strings.SplitN(tag, ",", 3)
|
opts := strings.SplitN(tag, ",", 4)
|
||||||
rawName = opts[0]
|
rawName = opts[0]
|
||||||
if len(opts) > 1 {
|
if len(opts) > 1 {
|
||||||
omitEmpty = opts[1] == "omitempty"
|
omitEmpty = opts[1] == "omitempty"
|
||||||
|
|
@ -268,10 +272,15 @@ func parseTagOptions(tag string) (rawName string, omitEmpty bool, allowShadow bo
|
||||||
if len(opts) > 2 {
|
if len(opts) > 2 {
|
||||||
allowShadow = opts[2] == "allowshadow"
|
allowShadow = opts[2] == "allowshadow"
|
||||||
}
|
}
|
||||||
return rawName, omitEmpty, allowShadow
|
if len(opts) > 3 {
|
||||||
|
allowNonUnique = opts[3] == "nonunique"
|
||||||
|
}
|
||||||
|
return rawName, omitEmpty, allowShadow, allowNonUnique
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *Section) mapTo(val reflect.Value, isStrict bool) error {
|
// mapToField maps the given value to the matching field of the given section.
|
||||||
|
// The sectionIndex is the index (if non unique sections are enabled) to which the value should be added.
|
||||||
|
func (s *Section) mapToField(val reflect.Value, isStrict bool, sectionIndex int) error {
|
||||||
if val.Kind() == reflect.Ptr {
|
if val.Kind() == reflect.Ptr {
|
||||||
val = val.Elem()
|
val = val.Elem()
|
||||||
}
|
}
|
||||||
|
|
@ -286,7 +295,7 @@ func (s *Section) mapTo(val reflect.Value, isStrict bool) error {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
rawName, _, allowShadow := parseTagOptions(tag)
|
rawName, _, allowShadow, allowNonUnique := parseTagOptions(tag)
|
||||||
fieldName := s.parseFieldName(tpField.Name, rawName)
|
fieldName := s.parseFieldName(tpField.Name, rawName)
|
||||||
if len(fieldName) == 0 || !field.CanSet() {
|
if len(fieldName) == 0 || !field.CanSet() {
|
||||||
continue
|
continue
|
||||||
|
|
@ -300,56 +309,96 @@ func (s *Section) mapTo(val reflect.Value, isStrict bool) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
if isAnonymous || isStruct || isStructPtr {
|
if isAnonymous || isStruct || isStructPtr {
|
||||||
if sec, err := s.f.GetSection(fieldName); err == nil {
|
if secs, err := s.f.SectionsByName(fieldName); err == nil {
|
||||||
// Only set the field to non-nil struct value if we have
|
if len(secs) <= sectionIndex {
|
||||||
// a section for it. Otherwise, we end up with a non-nil
|
return fmt.Errorf("there are not enough sections (%d <= %d) for the field %q", len(secs), sectionIndex, fieldName)
|
||||||
// struct ptr even though there is no data.
|
}
|
||||||
|
// Only set the field to non-nil struct value if we have a section for it.
|
||||||
|
// Otherwise, we end up with a non-nil struct ptr even though there is no data.
|
||||||
if isStructPtr && field.IsNil() {
|
if isStructPtr && field.IsNil() {
|
||||||
field.Set(reflect.New(tpField.Type.Elem()))
|
field.Set(reflect.New(tpField.Type.Elem()))
|
||||||
}
|
}
|
||||||
if err = sec.mapTo(field, isStrict); err != nil {
|
if err = secs[sectionIndex].mapToField(field, isStrict, sectionIndex); err != nil {
|
||||||
return fmt.Errorf("error mapping field(%s): %v", fieldName, err)
|
return fmt.Errorf("map to field %q: %v", fieldName, err)
|
||||||
}
|
}
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Map non-unique sections
|
||||||
|
if allowNonUnique && tpField.Type.Kind() == reflect.Slice {
|
||||||
|
newField, err := s.mapToSlice(fieldName, field, isStrict)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("map to slice %q: %v", fieldName, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
field.Set(newField)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
if key, err := s.GetKey(fieldName); err == nil {
|
if key, err := s.GetKey(fieldName); err == nil {
|
||||||
delim := parseDelim(tpField.Tag.Get("delim"))
|
delim := parseDelim(tpField.Tag.Get("delim"))
|
||||||
if err = setWithProperType(tpField.Type, key, field, delim, allowShadow, isStrict); err != nil {
|
if err = setWithProperType(tpField.Type, key, field, delim, allowShadow, isStrict); err != nil {
|
||||||
return fmt.Errorf("error mapping field(%s): %v", fieldName, err)
|
return fmt.Errorf("set field %q: %v", fieldName, err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// MapTo maps section to given struct.
|
// mapToSlice maps all sections with the same name and returns the new value.
|
||||||
func (s *Section) MapTo(v interface{}) error {
|
// The type of the Value must be a slice.
|
||||||
|
func (s *Section) mapToSlice(secName string, val reflect.Value, isStrict bool) (reflect.Value, error) {
|
||||||
|
secs, err := s.f.SectionsByName(secName)
|
||||||
|
if err != nil {
|
||||||
|
return reflect.Value{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
typ := val.Type().Elem()
|
||||||
|
for i, sec := range secs {
|
||||||
|
elem := reflect.New(typ)
|
||||||
|
if err = sec.mapToField(elem, isStrict, i); err != nil {
|
||||||
|
return reflect.Value{}, fmt.Errorf("map to field from section %q: %v", secName, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
val = reflect.Append(val, elem.Elem())
|
||||||
|
}
|
||||||
|
return val, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// mapTo maps a section to object v.
|
||||||
|
func (s *Section) mapTo(v interface{}, isStrict bool) error {
|
||||||
typ := reflect.TypeOf(v)
|
typ := reflect.TypeOf(v)
|
||||||
val := reflect.ValueOf(v)
|
val := reflect.ValueOf(v)
|
||||||
if typ.Kind() == reflect.Ptr {
|
if typ.Kind() == reflect.Ptr {
|
||||||
typ = typ.Elem()
|
typ = typ.Elem()
|
||||||
val = val.Elem()
|
val = val.Elem()
|
||||||
} else {
|
} else {
|
||||||
return errors.New("cannot map to non-pointer struct")
|
return errors.New("not a pointer to a struct")
|
||||||
}
|
}
|
||||||
|
|
||||||
return s.mapTo(val, false)
|
if typ.Kind() == reflect.Slice {
|
||||||
|
newField, err := s.mapToSlice(s.name, val, isStrict)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
val.Set(newField)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return s.mapToField(val, isStrict, 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
// MapTo maps section to given struct.
|
||||||
|
func (s *Section) MapTo(v interface{}) error {
|
||||||
|
return s.mapTo(v, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
// StrictMapTo maps section to given struct in strict mode,
|
// StrictMapTo maps section to given struct in strict mode,
|
||||||
// which returns all possible error including value parsing error.
|
// which returns all possible error including value parsing error.
|
||||||
func (s *Section) StrictMapTo(v interface{}) error {
|
func (s *Section) StrictMapTo(v interface{}) error {
|
||||||
typ := reflect.TypeOf(v)
|
return s.mapTo(v, true)
|
||||||
val := reflect.ValueOf(v)
|
|
||||||
if typ.Kind() == reflect.Ptr {
|
|
||||||
typ = typ.Elem()
|
|
||||||
val = val.Elem()
|
|
||||||
} else {
|
|
||||||
return errors.New("cannot map to non-pointer struct")
|
|
||||||
}
|
|
||||||
|
|
||||||
return s.mapTo(val, true)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// MapTo maps file to given struct.
|
// MapTo maps file to given struct.
|
||||||
|
|
@ -427,7 +476,7 @@ func reflectSliceWithProperType(key *Key, field reflect.Value, delim string, all
|
||||||
if i == 0 {
|
if i == 0 {
|
||||||
keyWithShadows = newKey(key.s, key.name, val)
|
keyWithShadows = newKey(key.s, key.name, val)
|
||||||
} else {
|
} else {
|
||||||
keyWithShadows.AddShadow(val)
|
_ = keyWithShadows.AddShadow(val)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
key = keyWithShadows
|
key = keyWithShadows
|
||||||
|
|
@ -480,7 +529,7 @@ func reflectWithProperType(t reflect.Type, key *Key, field reflect.Value, delim
|
||||||
return reflectWithProperType(t.Elem(), key, field.Elem(), delim, allowShadow)
|
return reflectWithProperType(t.Elem(), key, field.Elem(), delim, allowShadow)
|
||||||
}
|
}
|
||||||
default:
|
default:
|
||||||
return fmt.Errorf("unsupported type '%s'", t)
|
return fmt.Errorf("unsupported type %q", t)
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
@ -508,6 +557,11 @@ func isEmptyValue(v reflect.Value) bool {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// StructReflector is the interface implemented by struct types that can extract themselves into INI objects.
|
||||||
|
type StructReflector interface {
|
||||||
|
ReflectINIStruct(*File) error
|
||||||
|
}
|
||||||
|
|
||||||
func (s *Section) reflectFrom(val reflect.Value) error {
|
func (s *Section) reflectFrom(val reflect.Value) error {
|
||||||
if val.Kind() == reflect.Ptr {
|
if val.Kind() == reflect.Ptr {
|
||||||
val = val.Elem()
|
val = val.Elem()
|
||||||
|
|
@ -515,6 +569,10 @@ func (s *Section) reflectFrom(val reflect.Value) error {
|
||||||
typ := val.Type()
|
typ := val.Type()
|
||||||
|
|
||||||
for i := 0; i < typ.NumField(); i++ {
|
for i := 0; i < typ.NumField(); i++ {
|
||||||
|
if !val.Field(i).CanInterface() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
field := val.Field(i)
|
field := val.Field(i)
|
||||||
tpField := typ.Field(i)
|
tpField := typ.Field(i)
|
||||||
|
|
||||||
|
|
@ -523,11 +581,15 @@ func (s *Section) reflectFrom(val reflect.Value) error {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
rawName, omitEmpty, allowShadow := parseTagOptions(tag)
|
rawName, omitEmpty, allowShadow, allowNonUnique := parseTagOptions(tag)
|
||||||
if omitEmpty && isEmptyValue(field) {
|
if omitEmpty && isEmptyValue(field) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if r, ok := field.Interface().(StructReflector); ok {
|
||||||
|
return r.ReflectINIStruct(s.f)
|
||||||
|
}
|
||||||
|
|
||||||
fieldName := s.parseFieldName(tpField.Name, rawName)
|
fieldName := s.parseFieldName(tpField.Name, rawName)
|
||||||
if len(fieldName) == 0 || !field.CanSet() {
|
if len(fieldName) == 0 || !field.CanSet() {
|
||||||
continue
|
continue
|
||||||
|
|
@ -548,12 +610,41 @@ func (s *Section) reflectFrom(val reflect.Value) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
if err = sec.reflectFrom(field); err != nil {
|
if err = sec.reflectFrom(field); err != nil {
|
||||||
return fmt.Errorf("error reflecting field (%s): %v", fieldName, err)
|
return fmt.Errorf("reflect from field %q: %v", fieldName, err)
|
||||||
}
|
}
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
// Note: Same reason as secion.
|
if allowNonUnique && tpField.Type.Kind() == reflect.Slice {
|
||||||
|
slice := field.Slice(0, field.Len())
|
||||||
|
if field.Len() == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
sliceOf := field.Type().Elem().Kind()
|
||||||
|
|
||||||
|
for i := 0; i < field.Len(); i++ {
|
||||||
|
if sliceOf != reflect.Struct && sliceOf != reflect.Ptr {
|
||||||
|
return fmt.Errorf("field %q is not a slice of pointer or struct", fieldName)
|
||||||
|
}
|
||||||
|
|
||||||
|
sec, err := s.f.NewSection(fieldName)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add comment from comment tag
|
||||||
|
if len(sec.Comment) == 0 {
|
||||||
|
sec.Comment = tpField.Tag.Get("comment")
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := sec.reflectFrom(slice.Index(i)); err != nil {
|
||||||
|
return fmt.Errorf("reflect from field %q: %v", fieldName, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Note: Same reason as section.
|
||||||
key, err := s.GetKey(fieldName)
|
key, err := s.GetKey(fieldName)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
key, _ = s.NewKey(fieldName, "")
|
key, _ = s.NewKey(fieldName, "")
|
||||||
|
|
@ -564,23 +655,58 @@ func (s *Section) reflectFrom(val reflect.Value) error {
|
||||||
key.Comment = tpField.Tag.Get("comment")
|
key.Comment = tpField.Tag.Get("comment")
|
||||||
}
|
}
|
||||||
|
|
||||||
if err = reflectWithProperType(tpField.Type, key, field, parseDelim(tpField.Tag.Get("delim")), allowShadow); err != nil {
|
delim := parseDelim(tpField.Tag.Get("delim"))
|
||||||
return fmt.Errorf("error reflecting field (%s): %v", fieldName, err)
|
if err = reflectWithProperType(tpField.Type, key, field, delim, allowShadow); err != nil {
|
||||||
|
return fmt.Errorf("reflect field %q: %v", fieldName, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// ReflectFrom reflects secion from given struct.
|
// ReflectFrom reflects section from given struct. It overwrites existing ones.
|
||||||
func (s *Section) ReflectFrom(v interface{}) error {
|
func (s *Section) ReflectFrom(v interface{}) error {
|
||||||
typ := reflect.TypeOf(v)
|
typ := reflect.TypeOf(v)
|
||||||
val := reflect.ValueOf(v)
|
val := reflect.ValueOf(v)
|
||||||
|
|
||||||
|
if s.name != DefaultSection && s.f.options.AllowNonUniqueSections &&
|
||||||
|
(typ.Kind() == reflect.Slice || typ.Kind() == reflect.Ptr) {
|
||||||
|
// Clear sections to make sure none exists before adding the new ones
|
||||||
|
s.f.DeleteSection(s.name)
|
||||||
|
|
||||||
|
if typ.Kind() == reflect.Ptr {
|
||||||
|
sec, err := s.f.NewSection(s.name)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return sec.reflectFrom(val.Elem())
|
||||||
|
}
|
||||||
|
|
||||||
|
slice := val.Slice(0, val.Len())
|
||||||
|
sliceOf := val.Type().Elem().Kind()
|
||||||
|
if sliceOf != reflect.Ptr {
|
||||||
|
return fmt.Errorf("not a slice of pointers")
|
||||||
|
}
|
||||||
|
|
||||||
|
for i := 0; i < slice.Len(); i++ {
|
||||||
|
sec, err := s.f.NewSection(s.name)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
err = sec.reflectFrom(slice.Index(i))
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("reflect from %dth field: %v", i, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
if typ.Kind() == reflect.Ptr {
|
if typ.Kind() == reflect.Ptr {
|
||||||
typ = typ.Elem()
|
|
||||||
val = val.Elem()
|
val = val.Elem()
|
||||||
} else {
|
} else {
|
||||||
return errors.New("cannot reflect from non-pointer struct")
|
return errors.New("not a pointer to a struct")
|
||||||
}
|
}
|
||||||
|
|
||||||
return s.reflectFrom(val)
|
return s.reflectFrom(val)
|
||||||
|
|
|
||||||
|
|
@ -61,6 +61,12 @@ github.com/aokoli/goutils
|
||||||
github.com/apparentlymart/go-textseg/textseg
|
github.com/apparentlymart/go-textseg/textseg
|
||||||
# github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da
|
# github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da
|
||||||
github.com/armon/go-metrics
|
github.com/armon/go-metrics
|
||||||
|
# github.com/aws/amazon-ec2-instance-selector/v2 v2.0.1
|
||||||
|
## explicit
|
||||||
|
github.com/aws/amazon-ec2-instance-selector/v2/pkg/bytequantity
|
||||||
|
github.com/aws/amazon-ec2-instance-selector/v2/pkg/cli
|
||||||
|
github.com/aws/amazon-ec2-instance-selector/v2/pkg/selector
|
||||||
|
github.com/aws/amazon-ec2-instance-selector/v2/pkg/selector/outputs
|
||||||
# github.com/aws/aws-sdk-go v1.33.13
|
# github.com/aws/aws-sdk-go v1.33.13
|
||||||
## explicit
|
## explicit
|
||||||
github.com/aws/aws-sdk-go/aws
|
github.com/aws/aws-sdk-go/aws
|
||||||
|
|
@ -168,8 +174,8 @@ github.com/coreos/etcd/pkg/types
|
||||||
github.com/coreos/etcd/version
|
github.com/coreos/etcd/version
|
||||||
# github.com/coreos/go-semver v0.3.0
|
# github.com/coreos/go-semver v0.3.0
|
||||||
github.com/coreos/go-semver/semver
|
github.com/coreos/go-semver/semver
|
||||||
# github.com/cpuguy83/go-md2man v1.0.10
|
# github.com/cpuguy83/go-md2man/v2 v2.0.0
|
||||||
github.com/cpuguy83/go-md2man/md2man
|
github.com/cpuguy83/go-md2man/v2/md2man
|
||||||
# github.com/davecgh/go-spew v1.1.1
|
# github.com/davecgh/go-spew v1.1.1
|
||||||
github.com/davecgh/go-spew/spew
|
github.com/davecgh/go-spew/spew
|
||||||
# github.com/denverdino/aliyungo v0.0.0-20191128015008-acd8035bbb1d
|
# github.com/denverdino/aliyungo v0.0.0-20191128015008-acd8035bbb1d
|
||||||
|
|
@ -479,6 +485,8 @@ github.com/prometheus/procfs
|
||||||
github.com/prometheus/procfs/internal/fs
|
github.com/prometheus/procfs/internal/fs
|
||||||
# github.com/russross/blackfriday v1.5.2
|
# github.com/russross/blackfriday v1.5.2
|
||||||
github.com/russross/blackfriday
|
github.com/russross/blackfriday
|
||||||
|
# github.com/russross/blackfriday/v2 v2.0.1
|
||||||
|
github.com/russross/blackfriday/v2
|
||||||
# github.com/ryanuber/go-glob v1.0.0
|
# github.com/ryanuber/go-glob v1.0.0
|
||||||
github.com/ryanuber/go-glob
|
github.com/ryanuber/go-glob
|
||||||
# github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529
|
# github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529
|
||||||
|
|
@ -486,6 +494,8 @@ github.com/sean-/seed
|
||||||
# github.com/sergi/go-diff v1.0.0
|
# github.com/sergi/go-diff v1.0.0
|
||||||
## explicit
|
## explicit
|
||||||
github.com/sergi/go-diff/diffmatchpatch
|
github.com/sergi/go-diff/diffmatchpatch
|
||||||
|
# github.com/shurcooL/sanitized_anchor_name v1.0.0
|
||||||
|
github.com/shurcooL/sanitized_anchor_name
|
||||||
# github.com/sirupsen/logrus v1.4.2
|
# github.com/sirupsen/logrus v1.4.2
|
||||||
github.com/sirupsen/logrus
|
github.com/sirupsen/logrus
|
||||||
# github.com/spf13/afero v1.2.2
|
# github.com/spf13/afero v1.2.2
|
||||||
|
|
@ -493,7 +503,7 @@ github.com/spf13/afero
|
||||||
github.com/spf13/afero/mem
|
github.com/spf13/afero/mem
|
||||||
# github.com/spf13/cast v1.3.0
|
# github.com/spf13/cast v1.3.0
|
||||||
github.com/spf13/cast
|
github.com/spf13/cast
|
||||||
# github.com/spf13/cobra v0.0.5
|
# github.com/spf13/cobra v0.0.7
|
||||||
## explicit
|
## explicit
|
||||||
github.com/spf13/cobra
|
github.com/spf13/cobra
|
||||||
github.com/spf13/cobra/doc
|
github.com/spf13/cobra/doc
|
||||||
|
|
@ -786,7 +796,7 @@ gopkg.in/gcfg.v1/types
|
||||||
# gopkg.in/inf.v0 v0.9.1
|
# gopkg.in/inf.v0 v0.9.1
|
||||||
## explicit
|
## explicit
|
||||||
gopkg.in/inf.v0
|
gopkg.in/inf.v0
|
||||||
# gopkg.in/ini.v1 v1.51.0
|
# gopkg.in/ini.v1 v1.57.0
|
||||||
gopkg.in/ini.v1
|
gopkg.in/ini.v1
|
||||||
# gopkg.in/square/go-jose.v2 v2.3.1
|
# gopkg.in/square/go-jose.v2 v2.3.1
|
||||||
gopkg.in/square/go-jose.v2
|
gopkg.in/square/go-jose.v2
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue