Improve logic whereby subdirectory links are filtered.

This commit is contained in:
Diego Fernando Carrión
2019-07-16 11:49:48 +02:00
parent d32ce8dd5d
commit 38700c8583
2 changed files with 41 additions and 11 deletions

17
Gopkg.lock generated
View File

@ -2,26 +2,39 @@
[[projects]]
digest = "1:f1ff199c1c798c5984c1d8404f46cf7356ab9cc2f2ce00968a13faa544feb7e1"
name = "github.com/anaskhan96/soup"
packages = ["."]
pruneopts = ""
revision = "50123c340ba50505026229a3fb7e0bc5343e7e4d"
version = "v1.1.1"
[[projects]]
digest = "1:d5f5edf20f2539c95c3cc0208def77b52e7e87771dc027be073163621a1b1048"
name = "github.com/karrick/godirwalk"
packages = ["."]
pruneopts = ""
revision = "73c17a9b9528eb3ce857b782a2816c0cda581e62"
version = "v1.10.12"
[[projects]]
branch = "master"
digest = "1:31cd6e3c114e17c5f0c9e8b0bcaa3025ab3c221ce36323c7ce1acaa753d0d0aa"
name = "golang.org/x/net"
packages = ["html","html/atom"]
packages = [
"html",
"html/atom",
]
pruneopts = ""
revision = "da137c7871d730100384dbcf36e6f8fa493aef5b"
[solve-meta]
analyzer-name = "dep"
analyzer-version = 1
inputs-digest = "440f537b71a10485068d4828ef2242afbb4f914c3b7efee0f233c097b8499a47"
input-imports = [
"github.com/anaskhan96/soup",
"github.com/karrick/godirwalk",
"golang.org/x/net/html",
]
solver-name = "gps-cdcl"
solver-version = 1

31
main.go
View File

@ -55,6 +55,7 @@ const (
var (
cleanupTasks = list.New()
filesWritten = HashTable{}
)
func registerCleanUpTask(task func()) {
@ -81,6 +82,7 @@ var args struct {
pkgRoot string
godocCmd string
verbose bool
symlinks bool
exclude string
excludedDirs HashTable
}
@ -125,6 +127,12 @@ func initAndValidateArgs() {
false,
"Whether to print verbose output.",
)
flag.BoolVar(
&args.symlinks,
"symlinks",
false,
"Whether to follow symlinks when generating documentation.",
)
flag.StringVar(
&args.exclude,
"exclude",
@ -239,13 +247,6 @@ func modifyHTML(doc string, pkgPath string, subPkgs HashTable) ([]byte, error) {
footer := tagSoup.FindStrict("div", "id", "footer")
extractNode(&footer)
// Delete all pkg-name links which are not valid subpackages.
for _, td := range tagSoup.FindAllStrict("td", "id", "pkgName") {
if subPkgs.Has(td.Find("a").Text()) {
extractNode(&td)
}
}
// Add index.html to all the pkg-dir links.
pkgDir := tagSoup.FindStrict("div", "class", "pkg-dir")
if pkgDir.Pointer != nil {
@ -257,6 +258,14 @@ func modifyHTML(doc string, pkgPath string, subPkgs HashTable) ([]byte, error) {
}
}
// Delete all pkg-name links which are not valid subpackages.
for _, td := range tagSoup.FindAllStrict("td", "class", "pkg-name") {
linkTarget := fmt.Sprint(args.outputPath, "/", td.Find("a").Attrs()["href"])
if !filesWritten.Has(linkTarget) {
extractNode(&td)
}
}
// Fix the main body links.
page := tagSoup.Find("div", "id", "page")
if page.Pointer != nil {
@ -312,8 +321,13 @@ func writePkgDoc(pkgPath string, subPkgs HashTable) error {
}
outputFile := fmt.Sprint(
args.outputPath+"/",
strings.TrimPrefix(
fmt.Sprint(
strings.TrimPrefix(pkgPath, args.pkgRoot),
"/index.html",
),
"/",
),
)
verboseLogf("Writing %s documentation to %s", pkgName, outputFile)
os.MkdirAll(filepath.Dir(outputFile), os.ModePerm)
@ -324,6 +338,8 @@ func writePkgDoc(pkgPath string, subPkgs HashTable) error {
); err != nil {
return err
}
// Record that we have written this file (for later filtering of package links.)
filesWritten.Add(outputFile)
return nil
}
@ -333,6 +349,7 @@ func genDocs() error {
return walk.Walk(
args.pkgRoot,
&walk.Options{
FollowSymbolicLinks: args.symlinks,
// Define callback to run for every entity found.
Callback: func(path string, de *walk.Dirent) error {
verboseLogf("Examining %s", path)