2023-11-15 15:15:01 +03:00
|
|
|
// Copyright 2023 The Go Authors. All rights reserved.
|
|
|
|
// Use of this source code is governed by a BSD-style
|
|
|
|
// license that can be found in the LICENSE file.
|
|
|
|
|
|
|
|
// Package relnote supports working with release notes.
|
|
|
|
//
|
|
|
|
// Its main feature is the ability to merge Markdown fragments into a single
|
2024-01-18 03:33:15 +03:00
|
|
|
// document. (See [Merge].)
|
2023-11-15 15:15:01 +03:00
|
|
|
//
|
|
|
|
// This package has minimal imports, so that it can be vendored into the
|
|
|
|
// main go repo.
|
|
|
|
package relnote
|
|
|
|
|
|
|
|
import (
|
2024-01-18 03:33:15 +03:00
|
|
|
"bufio"
|
2023-11-15 15:15:01 +03:00
|
|
|
"bytes"
|
|
|
|
"errors"
|
|
|
|
"fmt"
|
|
|
|
"io"
|
2024-01-10 22:40:29 +03:00
|
|
|
"io/fs"
|
2024-01-14 16:13:45 +03:00
|
|
|
"path"
|
2024-01-18 03:33:15 +03:00
|
|
|
"regexp"
|
2024-01-10 22:40:29 +03:00
|
|
|
"slices"
|
2024-01-18 03:33:15 +03:00
|
|
|
"strconv"
|
2023-11-15 15:15:01 +03:00
|
|
|
"strings"
|
|
|
|
|
|
|
|
md "rsc.io/markdown"
|
|
|
|
)
|
|
|
|
|
|
|
|
// NewParser returns a properly configured Markdown parser.
|
|
|
|
func NewParser() *md.Parser {
|
|
|
|
var p md.Parser
|
|
|
|
p.HeadingIDs = true
|
|
|
|
return &p
|
|
|
|
}
|
|
|
|
|
|
|
|
// CheckFragment reports problems in a release-note fragment.
|
|
|
|
func CheckFragment(data string) error {
|
|
|
|
doc := NewParser().Parse(data)
|
2024-01-14 22:46:21 +03:00
|
|
|
// Check that the content of the document contains either a TODO or at least one sentence.
|
2024-01-31 17:38:51 +03:00
|
|
|
txt := ""
|
|
|
|
if len(doc.Blocks) > 0 {
|
|
|
|
txt = text(doc)
|
|
|
|
}
|
2024-01-14 22:46:21 +03:00
|
|
|
if !strings.Contains(txt, "TODO") && !strings.ContainsAny(txt, ".?!") {
|
2024-01-31 17:38:51 +03:00
|
|
|
return errors.New("File must contain a complete sentence or a TODO.")
|
2023-11-15 15:15:01 +03:00
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// text returns all the text in a block, without any formatting.
|
|
|
|
func text(b md.Block) string {
|
|
|
|
switch b := b.(type) {
|
2024-01-14 22:46:21 +03:00
|
|
|
case *md.Document:
|
|
|
|
return blocksText(b.Blocks)
|
2023-11-15 15:15:01 +03:00
|
|
|
case *md.Heading:
|
|
|
|
return text(b.Text)
|
|
|
|
case *md.Text:
|
|
|
|
return inlineText(b.Inline)
|
|
|
|
case *md.CodeBlock:
|
|
|
|
return strings.Join(b.Text, "\n")
|
|
|
|
case *md.HTMLBlock:
|
|
|
|
return strings.Join(b.Text, "\n")
|
|
|
|
case *md.List:
|
|
|
|
return blocksText(b.Items)
|
|
|
|
case *md.Item:
|
|
|
|
return blocksText(b.Blocks)
|
|
|
|
case *md.Empty:
|
|
|
|
return ""
|
|
|
|
case *md.Paragraph:
|
|
|
|
return text(b.Text)
|
|
|
|
case *md.Quote:
|
|
|
|
return blocksText(b.Blocks)
|
2024-01-19 14:52:13 +03:00
|
|
|
case *md.ThematicBreak:
|
|
|
|
return "---"
|
2023-11-15 15:15:01 +03:00
|
|
|
default:
|
|
|
|
panic(fmt.Sprintf("unknown block type %T", b))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// blocksText returns all the text in a slice of block nodes.
|
|
|
|
func blocksText(bs []md.Block) string {
|
|
|
|
var d strings.Builder
|
|
|
|
for _, b := range bs {
|
|
|
|
io.WriteString(&d, text(b))
|
|
|
|
fmt.Fprintln(&d)
|
|
|
|
}
|
|
|
|
return d.String()
|
|
|
|
}
|
|
|
|
|
|
|
|
// inlineText returns all the next in a slice of inline nodes.
|
|
|
|
func inlineText(ins []md.Inline) string {
|
|
|
|
var buf bytes.Buffer
|
|
|
|
for _, in := range ins {
|
|
|
|
in.PrintText(&buf)
|
|
|
|
}
|
|
|
|
return buf.String()
|
|
|
|
}
|
2024-01-10 22:40:29 +03:00
|
|
|
|
|
|
|
// Merge combines the markdown documents (files ending in ".md") in the tree rooted
|
|
|
|
// at fs into a single document.
|
|
|
|
// The blocks of the documents are concatenated in lexicographic order by filename.
|
|
|
|
// Heading with no content are removed.
|
|
|
|
// The link keys must be unique, and are combined into a single map.
|
2024-01-14 16:13:45 +03:00
|
|
|
//
|
2024-01-19 01:50:25 +03:00
|
|
|
// Files in the "minor changes" directory (the unique directory matching the glob
|
|
|
|
// "*stdlib/*minor") are named after the package to which they refer, and will have
|
2024-01-25 01:17:21 +03:00
|
|
|
// the package heading inserted automatically and links to other standard library
|
|
|
|
// symbols expanded automatically. For example, if a file *stdlib/minor/bytes/f.md
|
|
|
|
// contains the text
|
|
|
|
//
|
|
|
|
// [Reader] implements [io.Reader].
|
|
|
|
//
|
|
|
|
// then that will become
|
|
|
|
//
|
|
|
|
// [Reader](/pkg/bytes#Reader) implements [io.Reader](/pkg/io#Reader).
|
2024-01-10 22:40:29 +03:00
|
|
|
func Merge(fsys fs.FS) (*md.Document, error) {
|
|
|
|
filenames, err := sortedMarkdownFilenames(fsys)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2024-01-14 04:03:30 +03:00
|
|
|
doc := &md.Document{Links: map[string]*md.Link{}}
|
2024-01-14 16:13:45 +03:00
|
|
|
var prevPkg string // previous stdlib package, if any
|
2024-01-10 22:40:29 +03:00
|
|
|
for _, filename := range filenames {
|
2024-01-18 03:33:15 +03:00
|
|
|
newdoc, err := parseMarkdownFile(fsys, filename)
|
2024-01-10 22:40:29 +03:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2024-01-14 04:03:30 +03:00
|
|
|
if len(newdoc.Blocks) == 0 {
|
2024-01-10 22:40:29 +03:00
|
|
|
continue
|
|
|
|
}
|
2024-01-25 01:17:21 +03:00
|
|
|
pkg := stdlibPackage(filename)
|
|
|
|
// Autolink Go symbols.
|
|
|
|
addSymbolLinks(newdoc, pkg)
|
2024-01-10 22:40:29 +03:00
|
|
|
if len(doc.Blocks) > 0 {
|
2024-01-14 16:13:45 +03:00
|
|
|
// If this is the first file of a new stdlib package under the "Minor changes
|
|
|
|
// to the library" section, insert a heading for the package.
|
|
|
|
if pkg != "" && pkg != prevPkg {
|
|
|
|
h := stdlibPackageHeading(pkg, lastBlock(doc).Pos().EndLine)
|
|
|
|
doc.Blocks = append(doc.Blocks, h)
|
|
|
|
}
|
|
|
|
prevPkg = pkg
|
|
|
|
// Put a blank line between the current and new blocks, so that the end
|
|
|
|
// of a file acts as a blank line.
|
2024-01-10 22:40:29 +03:00
|
|
|
lastLine := lastBlock(doc).Pos().EndLine
|
2024-01-14 04:03:30 +03:00
|
|
|
delta := lastLine + 2 - newdoc.Blocks[0].Pos().StartLine
|
|
|
|
for _, b := range newdoc.Blocks {
|
2024-01-10 22:40:29 +03:00
|
|
|
addLines(b, delta)
|
|
|
|
}
|
|
|
|
}
|
2024-01-14 00:38:14 +03:00
|
|
|
// Append non-empty blocks to the result document.
|
2024-01-14 04:03:30 +03:00
|
|
|
for _, b := range newdoc.Blocks {
|
2024-01-14 00:38:14 +03:00
|
|
|
if _, ok := b.(*md.Empty); !ok {
|
|
|
|
doc.Blocks = append(doc.Blocks, b)
|
|
|
|
}
|
|
|
|
}
|
2024-01-14 04:03:30 +03:00
|
|
|
// Merge link references.
|
|
|
|
for key, link := range newdoc.Links {
|
|
|
|
if doc.Links[key] != nil {
|
|
|
|
return nil, fmt.Errorf("duplicate link reference %q; second in %s", key, filename)
|
|
|
|
}
|
|
|
|
doc.Links[key] = link
|
|
|
|
}
|
2024-01-10 22:40:29 +03:00
|
|
|
}
|
2024-01-14 00:38:14 +03:00
|
|
|
// Remove headings with empty contents.
|
|
|
|
doc.Blocks = removeEmptySections(doc.Blocks)
|
2024-01-14 04:03:30 +03:00
|
|
|
if len(doc.Blocks) > 0 && len(doc.Links) > 0 {
|
|
|
|
// Add a blank line to separate the links.
|
2024-01-14 16:13:45 +03:00
|
|
|
lastPos := lastBlock(doc).Pos()
|
2024-01-14 04:03:30 +03:00
|
|
|
lastPos.StartLine += 2
|
|
|
|
lastPos.EndLine += 2
|
|
|
|
doc.Blocks = append(doc.Blocks, &md.Empty{Position: lastPos})
|
|
|
|
}
|
2024-01-10 22:40:29 +03:00
|
|
|
return doc, nil
|
|
|
|
}
|
|
|
|
|
2024-01-14 16:13:45 +03:00
|
|
|
// stdlibPackage returns the standard library package for the given filename.
|
|
|
|
// If the filename does not represent a package, it returns the empty string.
|
|
|
|
// A filename represents package P if it is in a directory matching the glob
|
|
|
|
// "*stdlib/*minor/P".
|
|
|
|
func stdlibPackage(filename string) string {
|
|
|
|
dir, rest, _ := strings.Cut(filename, "/")
|
|
|
|
if !strings.HasSuffix(dir, "stdlib") {
|
|
|
|
return ""
|
|
|
|
}
|
|
|
|
dir, rest, _ = strings.Cut(rest, "/")
|
|
|
|
if !strings.HasSuffix(dir, "minor") {
|
|
|
|
return ""
|
|
|
|
}
|
|
|
|
pkg := path.Dir(rest)
|
|
|
|
if pkg == "." {
|
|
|
|
return ""
|
|
|
|
}
|
|
|
|
return pkg
|
|
|
|
}
|
|
|
|
|
|
|
|
func stdlibPackageHeading(pkg string, lastLine int) *md.Heading {
|
|
|
|
line := lastLine + 2
|
|
|
|
pos := md.Position{StartLine: line, EndLine: line}
|
|
|
|
return &md.Heading{
|
|
|
|
Position: pos,
|
|
|
|
Level: 4,
|
|
|
|
Text: &md.Text{
|
|
|
|
Position: pos,
|
|
|
|
Inline: []md.Inline{
|
|
|
|
&md.Link{
|
2024-04-08 21:55:49 +03:00
|
|
|
Inner: []md.Inline{&md.Code{Text: pkg}},
|
2024-01-14 16:13:45 +03:00
|
|
|
URL: "/pkg/" + pkg + "/",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-01-14 00:38:14 +03:00
|
|
|
// removeEmptySections removes headings with no content. A heading has no content
|
|
|
|
// if there are no blocks between it and the next heading at the same level, or the
|
|
|
|
// end of the document.
|
|
|
|
func removeEmptySections(bs []md.Block) []md.Block {
|
|
|
|
res := bs[:0]
|
|
|
|
delta := 0 // number of lines by which to adjust positions
|
|
|
|
|
|
|
|
// Remove preceding headings at same or higher level; they are empty.
|
|
|
|
rem := func(level int) {
|
|
|
|
for len(res) > 0 {
|
|
|
|
last := res[len(res)-1]
|
|
|
|
if lh, ok := last.(*md.Heading); ok && lh.Level >= level {
|
|
|
|
res = res[:len(res)-1]
|
|
|
|
// Adjust subsequent block positions by the size of this block
|
|
|
|
// plus 1 for the blank line between headings.
|
|
|
|
delta += lh.EndLine - lh.StartLine + 2
|
|
|
|
} else {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, b := range bs {
|
|
|
|
if h, ok := b.(*md.Heading); ok {
|
|
|
|
rem(h.Level)
|
|
|
|
}
|
|
|
|
addLines(b, -delta)
|
|
|
|
res = append(res, b)
|
|
|
|
}
|
|
|
|
// Remove empty headings at the end of the document.
|
|
|
|
rem(1)
|
|
|
|
return res
|
|
|
|
}
|
|
|
|
|
2024-01-10 22:40:29 +03:00
|
|
|
func sortedMarkdownFilenames(fsys fs.FS) ([]string, error) {
|
|
|
|
var filenames []string
|
|
|
|
err := fs.WalkDir(fsys, ".", func(path string, d fs.DirEntry, err error) error {
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
if !d.IsDir() && strings.HasSuffix(path, ".md") {
|
|
|
|
filenames = append(filenames, path)
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
// '.' comes before '/', which comes before alphanumeric characters.
|
|
|
|
// So just sorting the list will put a filename like "net.md" before
|
|
|
|
// the directory "net". That is what we want.
|
|
|
|
slices.Sort(filenames)
|
|
|
|
return filenames, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// lastBlock returns the last block in the document.
|
|
|
|
// It panics if the document has no blocks.
|
|
|
|
func lastBlock(doc *md.Document) md.Block {
|
|
|
|
return doc.Blocks[len(doc.Blocks)-1]
|
|
|
|
}
|
|
|
|
|
2024-01-14 00:38:14 +03:00
|
|
|
// addLines adds n lines to the position of b.
|
|
|
|
// n can be negative.
|
2024-01-10 22:40:29 +03:00
|
|
|
func addLines(b md.Block, n int) {
|
|
|
|
pos := position(b)
|
|
|
|
pos.StartLine += n
|
|
|
|
pos.EndLine += n
|
|
|
|
}
|
|
|
|
|
|
|
|
func position(b md.Block) *md.Position {
|
|
|
|
switch b := b.(type) {
|
|
|
|
case *md.Heading:
|
|
|
|
return &b.Position
|
|
|
|
case *md.Text:
|
|
|
|
return &b.Position
|
|
|
|
case *md.CodeBlock:
|
|
|
|
return &b.Position
|
|
|
|
case *md.HTMLBlock:
|
|
|
|
return &b.Position
|
|
|
|
case *md.List:
|
|
|
|
return &b.Position
|
|
|
|
case *md.Item:
|
|
|
|
return &b.Position
|
|
|
|
case *md.Empty:
|
|
|
|
return &b.Position
|
|
|
|
case *md.Paragraph:
|
|
|
|
return &b.Position
|
|
|
|
case *md.Quote:
|
|
|
|
return &b.Position
|
2024-01-19 14:52:13 +03:00
|
|
|
case *md.ThematicBreak:
|
|
|
|
return &b.Position
|
2024-01-10 22:40:29 +03:00
|
|
|
default:
|
|
|
|
panic(fmt.Sprintf("unknown block type %T", b))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-01-18 03:33:15 +03:00
|
|
|
func parseMarkdownFile(fsys fs.FS, path string) (*md.Document, error) {
|
2024-01-10 22:40:29 +03:00
|
|
|
f, err := fsys.Open(path)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
defer f.Close()
|
|
|
|
data, err := io.ReadAll(f)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
in := string(data)
|
|
|
|
doc := NewParser().Parse(in)
|
|
|
|
return doc, nil
|
|
|
|
}
|
2024-01-18 03:33:15 +03:00
|
|
|
|
|
|
|
// An APIFeature is a symbol mentioned in an API file,
|
|
|
|
// like the ones in the main go repo in the api directory.
|
|
|
|
type APIFeature struct {
|
|
|
|
Package string // package that the feature is in
|
2024-02-15 17:12:03 +03:00
|
|
|
Build string // build that the symbol is relevant for (e.g. GOOS, GOARCH)
|
2024-01-18 03:33:15 +03:00
|
|
|
Feature string // everything about the feature other than the package
|
|
|
|
Issue int // the issue that introduced the feature, or 0 if none
|
|
|
|
}
|
|
|
|
|
2024-02-15 17:12:03 +03:00
|
|
|
// This regexp has four capturing groups: package, build, feature and issue.
|
|
|
|
var apiFileLineRegexp = regexp.MustCompile(`^pkg ([^ \t]+)[ \t]*(\([^)]+\))?, ([^#]*)(#\d+)?$`)
|
2024-01-18 03:33:15 +03:00
|
|
|
|
|
|
|
// parseAPIFile parses a file in the api format and returns a list of the file's features.
|
|
|
|
// A feature is represented by a single line that looks like
|
|
|
|
//
|
2024-02-15 17:12:03 +03:00
|
|
|
// pkg PKG (BUILD) FEATURE #ISSUE
|
|
|
|
//
|
|
|
|
// where the BUILD and ISSUE may be absent.
|
2024-01-18 03:33:15 +03:00
|
|
|
func parseAPIFile(fsys fs.FS, filename string) ([]APIFeature, error) {
|
|
|
|
f, err := fsys.Open(filename)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
defer f.Close()
|
|
|
|
var features []APIFeature
|
|
|
|
scan := bufio.NewScanner(f)
|
|
|
|
for scan.Scan() {
|
|
|
|
line := strings.TrimSpace(scan.Text())
|
2024-02-15 17:12:03 +03:00
|
|
|
if line == "" || line[0] == '#' {
|
2024-01-18 03:33:15 +03:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
matches := apiFileLineRegexp.FindStringSubmatch(line)
|
|
|
|
if len(matches) == 0 {
|
|
|
|
return nil, fmt.Errorf("%s: malformed line %q", filename, line)
|
|
|
|
}
|
2024-02-15 17:12:03 +03:00
|
|
|
if len(matches) != 5 {
|
|
|
|
return nil, fmt.Errorf("wrong number of matches for line %q", line)
|
|
|
|
}
|
2024-01-18 03:33:15 +03:00
|
|
|
f := APIFeature{
|
|
|
|
Package: matches[1],
|
2024-02-15 17:12:03 +03:00
|
|
|
Build: matches[2],
|
|
|
|
Feature: strings.TrimSpace(matches[3]),
|
2024-01-18 03:33:15 +03:00
|
|
|
}
|
2024-02-15 17:12:03 +03:00
|
|
|
if issue := matches[4]; issue != "" {
|
2024-01-18 03:33:15 +03:00
|
|
|
var err error
|
2024-02-15 17:12:03 +03:00
|
|
|
f.Issue, err = strconv.Atoi(issue[1:]) // skip leading '#'
|
2024-01-18 03:33:15 +03:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
features = append(features, f)
|
|
|
|
}
|
|
|
|
if scan.Err() != nil {
|
|
|
|
return nil, scan.Err()
|
|
|
|
}
|
|
|
|
return features, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// GroupAPIFeaturesByFile returns a map of the given features keyed by
|
|
|
|
// the doc filename that they are associated with.
|
|
|
|
// A feature with package P and issue N should be documented in the file
|
|
|
|
// "P/N.md".
|
|
|
|
func GroupAPIFeaturesByFile(fs []APIFeature) (map[string][]APIFeature, error) {
|
|
|
|
m := map[string][]APIFeature{}
|
|
|
|
for _, f := range fs {
|
|
|
|
if f.Issue == 0 {
|
|
|
|
return nil, fmt.Errorf("%+v: zero issue", f)
|
|
|
|
}
|
|
|
|
filename := fmt.Sprintf("%s/%d.md", f.Package, f.Issue)
|
|
|
|
m[filename] = append(m[filename], f)
|
|
|
|
}
|
|
|
|
return m, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// CheckAPIFile reads the api file at filename in apiFS, and checks the corresponding
|
|
|
|
// release-note files under docFS. It checks that the files exist and that they have
|
2024-02-01 15:09:31 +03:00
|
|
|
// some minimal content (see [CheckFragment]).
|
|
|
|
// The docRoot argument is the path from the repo or project root to the root of docFS.
|
|
|
|
// It is used only for error messages.
|
2024-01-31 17:38:51 +03:00
|
|
|
func CheckAPIFile(apiFS fs.FS, filename string, docFS fs.FS, docRoot string) error {
|
2024-01-18 03:33:15 +03:00
|
|
|
features, err := parseAPIFile(apiFS, filename)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
byFile, err := GroupAPIFeaturesByFile(features)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
var filenames []string
|
|
|
|
for fn := range byFile {
|
|
|
|
filenames = append(filenames, fn)
|
|
|
|
}
|
|
|
|
slices.Sort(filenames)
|
2024-02-01 15:09:31 +03:00
|
|
|
mcDir, err := minorChangesDir(docFS)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2024-01-18 03:33:15 +03:00
|
|
|
var errs []error
|
2024-01-31 17:38:51 +03:00
|
|
|
for _, fn := range filenames {
|
2024-02-01 15:09:31 +03:00
|
|
|
// Use path.Join for consistency with io/fs pathnames.
|
|
|
|
fn = path.Join(mcDir, fn)
|
2024-01-18 03:33:15 +03:00
|
|
|
// TODO(jba): check that the file mentions each feature?
|
2024-01-31 17:38:51 +03:00
|
|
|
if err := checkFragmentFile(docFS, fn); err != nil {
|
|
|
|
errs = append(errs, fmt.Errorf("%s: %v\nSee doc/README.md for more information.", path.Join(docRoot, fn), err))
|
2024-01-18 03:33:15 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
return errors.Join(errs...)
|
|
|
|
}
|
|
|
|
|
2024-02-01 15:09:31 +03:00
|
|
|
// minorChangesDir returns the unique directory in docFS that corresponds to the
|
|
|
|
// "Minor changes to the standard library" section of the release notes.
|
|
|
|
func minorChangesDir(docFS fs.FS) (string, error) {
|
|
|
|
dirs, err := fs.Glob(docFS, "*stdlib/*minor")
|
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
var bad string
|
|
|
|
if len(dirs) == 0 {
|
|
|
|
bad = "No"
|
|
|
|
} else if len(dirs) > 1 {
|
|
|
|
bad = "More than one"
|
|
|
|
}
|
|
|
|
if bad != "" {
|
|
|
|
return "", fmt.Errorf("%s directory matches *stdlib/*minor.\nThis shouldn't happen; please file a bug at https://go.dev/issues/new.",
|
|
|
|
bad)
|
|
|
|
}
|
|
|
|
return dirs[0], nil
|
|
|
|
}
|
|
|
|
|
2024-01-18 03:33:15 +03:00
|
|
|
func checkFragmentFile(fsys fs.FS, filename string) error {
|
|
|
|
f, err := fsys.Open(filename)
|
|
|
|
if err != nil {
|
|
|
|
if errors.Is(err, fs.ErrNotExist) {
|
2024-01-31 17:38:51 +03:00
|
|
|
err = errors.New("File does not exist. Every API change must have a corresponding release note file.")
|
2024-01-18 03:33:15 +03:00
|
|
|
}
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
defer f.Close()
|
|
|
|
data, err := io.ReadAll(f)
|
|
|
|
return CheckFragment(string(data))
|
|
|
|
}
|