mirror of
https://github.com/go-task/task.git
synced 2025-04-18 12:04:04 +03:00
feat: decoding improvements (#2068)
* refactor: moved/simplified snippets into its own file with tests * refactor: move snippet to taskfile package * feat: support snippets with line/col = 0 * feat: functional options for snippets * feat: added option to hide snippet indicators * feat: store raw lines for length calculations * feat: add debug function for TaskfileDecodeError * fix: decode errors from commands * fix: schema for defer cmd calls * fix: linting issues * refactor: split var and vars into different files like other structures
This commit is contained in:
parent
fb27318601
commit
cdb6a3f70a
@ -2,36 +2,18 @@ package errors
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"embed"
|
||||
"cmp"
|
||||
"errors"
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/alecthomas/chroma/v2"
|
||||
"github.com/alecthomas/chroma/v2/quick"
|
||||
"github.com/alecthomas/chroma/v2/styles"
|
||||
"github.com/fatih/color"
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
//go:embed themes/*.xml
|
||||
var embedded embed.FS
|
||||
|
||||
var typeErrorRegex = regexp.MustCompile(`line \d+: (.*)`)
|
||||
|
||||
func init() {
|
||||
r, err := embedded.Open("themes/task.xml")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
style, err := chroma.NewXMLStyle(r)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
styles.Register(style)
|
||||
}
|
||||
|
||||
type (
|
||||
TaskfileDecodeError struct {
|
||||
Message string
|
||||
@ -39,15 +21,9 @@ type (
|
||||
Line int
|
||||
Column int
|
||||
Tag string
|
||||
Snippet TaskfileSnippet
|
||||
Snippet string
|
||||
Err error
|
||||
}
|
||||
TaskfileSnippet struct {
|
||||
Lines []string
|
||||
StartLine int
|
||||
EndLine int
|
||||
Padding int
|
||||
}
|
||||
)
|
||||
|
||||
func NewTaskfileDecodeError(err error, node *yaml.Node) *TaskfileDecodeError {
|
||||
@ -88,38 +64,44 @@ func (err *TaskfileDecodeError) Error() string {
|
||||
}
|
||||
}
|
||||
fmt.Fprintln(buf, color.RedString("file: %s:%d:%d", err.Location, err.Line, err.Column))
|
||||
fmt.Fprint(buf, err.Snippet)
|
||||
return buf.String()
|
||||
}
|
||||
|
||||
// Print the snippet
|
||||
maxLineNumberDigits := digits(err.Snippet.EndLine)
|
||||
lineNumberSpacer := strings.Repeat(" ", maxLineNumberDigits)
|
||||
columnSpacer := strings.Repeat(" ", err.Column-1)
|
||||
for i, line := range err.Snippet.Lines {
|
||||
currentLine := err.Snippet.StartLine + i + 1
|
||||
func (err *TaskfileDecodeError) Debug() string {
|
||||
const indentWidth = 2
|
||||
buf := &bytes.Buffer{}
|
||||
fmt.Fprintln(buf, "TaskfileDecodeError:")
|
||||
|
||||
lineIndicator := " "
|
||||
if currentLine == err.Line {
|
||||
lineIndicator = ">"
|
||||
}
|
||||
columnIndicator := "^"
|
||||
// Recursively loop through the error chain and print any details
|
||||
var debug func(error, int)
|
||||
debug = func(err error, indent int) {
|
||||
indentStr := strings.Repeat(" ", indent*indentWidth)
|
||||
|
||||
// Print each line
|
||||
lineIndicator = color.RedString(lineIndicator)
|
||||
columnIndicator = color.RedString(columnIndicator)
|
||||
lineNumberFormat := fmt.Sprintf("%%%dd", maxLineNumberDigits)
|
||||
lineNumber := fmt.Sprintf(lineNumberFormat, currentLine)
|
||||
fmt.Fprintf(buf, "%s %s | %s", lineIndicator, lineNumber, line)
|
||||
|
||||
// Print the column indicator
|
||||
if currentLine == err.Line {
|
||||
fmt.Fprintf(buf, "\n %s | %s%s", lineNumberSpacer, columnSpacer, columnIndicator)
|
||||
// Nothing left to unwrap
|
||||
if err == nil {
|
||||
fmt.Fprintf(buf, "%sEnd of chain\n", indentStr)
|
||||
return
|
||||
}
|
||||
|
||||
// If there are more lines to print, add a newline
|
||||
if i < len(err.Snippet.Lines)-1 {
|
||||
fmt.Fprintln(buf)
|
||||
// Taskfile decode error
|
||||
decodeErr := &TaskfileDecodeError{}
|
||||
if errors.As(err, &decodeErr) {
|
||||
fmt.Fprintf(buf, "%s%s (%s:%d:%d)\n",
|
||||
indentStr,
|
||||
cmp.Or(decodeErr.Message, "<no_message>"),
|
||||
decodeErr.Location,
|
||||
decodeErr.Line,
|
||||
decodeErr.Column,
|
||||
)
|
||||
debug(errors.Unwrap(err), indent+1)
|
||||
return
|
||||
}
|
||||
|
||||
fmt.Fprintf(buf, "%s%s\n", indentStr, err)
|
||||
debug(errors.Unwrap(err), indent+1)
|
||||
}
|
||||
|
||||
debug(err, 0)
|
||||
return buf.String()
|
||||
}
|
||||
|
||||
@ -141,23 +123,9 @@ func (err *TaskfileDecodeError) WithTypeMessage(t string) *TaskfileDecodeError {
|
||||
return err
|
||||
}
|
||||
|
||||
func (err *TaskfileDecodeError) WithFileInfo(location string, b []byte, padding int) *TaskfileDecodeError {
|
||||
buf := &bytes.Buffer{}
|
||||
if err := quick.Highlight(buf, string(b), "yaml", "terminal", "task"); err != nil {
|
||||
buf.WriteString(string(b))
|
||||
}
|
||||
lines := strings.Split(buf.String(), "\n")
|
||||
start := max(err.Line-1-padding, 0)
|
||||
end := min(err.Line+padding, len(lines)-1)
|
||||
|
||||
func (err *TaskfileDecodeError) WithFileInfo(location string, snippet string) *TaskfileDecodeError {
|
||||
err.Location = location
|
||||
err.Snippet = TaskfileSnippet{
|
||||
Lines: lines[start:end],
|
||||
StartLine: start,
|
||||
EndLine: end,
|
||||
Padding: padding,
|
||||
}
|
||||
|
||||
err.Snippet = snippet
|
||||
return err
|
||||
}
|
||||
|
||||
@ -168,12 +136,3 @@ func extractTypeErrorMessage(message string) string {
|
||||
}
|
||||
return message
|
||||
}
|
||||
|
||||
func digits(number int) int {
|
||||
count := 0
|
||||
for number != 0 {
|
||||
number /= 10
|
||||
count += 1
|
||||
}
|
||||
return count
|
||||
}
|
||||
|
@ -51,18 +51,53 @@ func (c *Cmd) UnmarshalYAML(node *yaml.Node) error {
|
||||
return nil
|
||||
|
||||
case yaml.MappingNode:
|
||||
|
||||
// A command with additional options
|
||||
var cmdStruct struct {
|
||||
Cmd string
|
||||
Task string
|
||||
For *For
|
||||
Silent bool
|
||||
Set []string
|
||||
Shopt []string
|
||||
Vars *Vars
|
||||
IgnoreError bool `yaml:"ignore_error"`
|
||||
Defer *Defer
|
||||
Platforms []*Platform
|
||||
}
|
||||
if err := node.Decode(&cmdStruct); err == nil && cmdStruct.Cmd != "" {
|
||||
if err := node.Decode(&cmdStruct); err != nil {
|
||||
return errors.NewTaskfileDecodeError(err, node)
|
||||
}
|
||||
if cmdStruct.Defer != nil {
|
||||
|
||||
// A deferred command
|
||||
if cmdStruct.Defer.Cmd != "" {
|
||||
c.Defer = true
|
||||
c.Cmd = cmdStruct.Defer.Cmd
|
||||
c.Silent = cmdStruct.Silent
|
||||
return nil
|
||||
}
|
||||
|
||||
// A deferred task call
|
||||
if cmdStruct.Defer.Task != "" {
|
||||
c.Defer = true
|
||||
c.Task = cmdStruct.Defer.Task
|
||||
c.Vars = cmdStruct.Defer.Vars
|
||||
c.Silent = cmdStruct.Defer.Silent
|
||||
return nil
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// A task call
|
||||
if cmdStruct.Task != "" {
|
||||
c.Task = cmdStruct.Task
|
||||
c.Vars = cmdStruct.Vars
|
||||
c.For = cmdStruct.For
|
||||
c.Silent = cmdStruct.Silent
|
||||
return nil
|
||||
}
|
||||
|
||||
// A command with additional options
|
||||
if cmdStruct.Cmd != "" {
|
||||
c.Cmd = cmdStruct.Cmd
|
||||
c.For = cmdStruct.For
|
||||
c.Silent = cmdStruct.Silent
|
||||
@ -73,45 +108,6 @@ func (c *Cmd) UnmarshalYAML(node *yaml.Node) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// A deferred command
|
||||
var deferredCmd struct {
|
||||
Defer string
|
||||
Silent bool
|
||||
}
|
||||
if err := node.Decode(&deferredCmd); err == nil && deferredCmd.Defer != "" {
|
||||
c.Defer = true
|
||||
c.Cmd = deferredCmd.Defer
|
||||
c.Silent = deferredCmd.Silent
|
||||
return nil
|
||||
}
|
||||
|
||||
// A deferred task call
|
||||
var deferredCall struct {
|
||||
Defer Call
|
||||
}
|
||||
if err := node.Decode(&deferredCall); err == nil && deferredCall.Defer.Task != "" {
|
||||
c.Defer = true
|
||||
c.Task = deferredCall.Defer.Task
|
||||
c.Vars = deferredCall.Defer.Vars
|
||||
c.Silent = deferredCall.Defer.Silent
|
||||
return nil
|
||||
}
|
||||
|
||||
// A task call
|
||||
var taskCall struct {
|
||||
Task string
|
||||
Vars *Vars
|
||||
For *For
|
||||
Silent bool
|
||||
}
|
||||
if err := node.Decode(&taskCall); err == nil && taskCall.Task != "" {
|
||||
c.Task = taskCall.Task
|
||||
c.Vars = taskCall.Vars
|
||||
c.For = taskCall.For
|
||||
c.Silent = taskCall.Silent
|
||||
return nil
|
||||
}
|
||||
|
||||
return errors.NewTaskfileDecodeError(nil, node).WithMessage("invalid keys in command")
|
||||
}
|
||||
|
||||
|
45
taskfile/ast/defer.go
Normal file
45
taskfile/ast/defer.go
Normal file
@ -0,0 +1,45 @@
|
||||
package ast
|
||||
|
||||
import (
|
||||
"gopkg.in/yaml.v3"
|
||||
|
||||
"github.com/go-task/task/v3/errors"
|
||||
)
|
||||
|
||||
type Defer struct {
|
||||
Cmd string
|
||||
Task string
|
||||
Vars *Vars
|
||||
Silent bool
|
||||
}
|
||||
|
||||
func (d *Defer) UnmarshalYAML(node *yaml.Node) error {
|
||||
switch node.Kind {
|
||||
|
||||
case yaml.ScalarNode:
|
||||
var cmd string
|
||||
if err := node.Decode(&cmd); err != nil {
|
||||
return errors.NewTaskfileDecodeError(err, node)
|
||||
}
|
||||
d.Cmd = cmd
|
||||
return nil
|
||||
|
||||
case yaml.MappingNode:
|
||||
var deferStruct struct {
|
||||
Defer string
|
||||
Task string
|
||||
Vars *Vars
|
||||
Silent bool
|
||||
}
|
||||
if err := node.Decode(&deferStruct); err != nil {
|
||||
return errors.NewTaskfileDecodeError(err, node)
|
||||
}
|
||||
d.Cmd = deferStruct.Defer
|
||||
d.Task = deferStruct.Task
|
||||
d.Vars = deferStruct.Vars
|
||||
d.Silent = deferStruct.Silent
|
||||
return nil
|
||||
}
|
||||
|
||||
return errors.NewTaskfileDecodeError(nil, node).WithTypeMessage("defer")
|
||||
}
|
@ -2,169 +2,13 @@ package ast
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"github.com/elliotchance/orderedmap/v2"
|
||||
"gopkg.in/yaml.v3"
|
||||
|
||||
"github.com/go-task/task/v3/errors"
|
||||
"github.com/go-task/task/v3/internal/deepcopy"
|
||||
"github.com/go-task/task/v3/internal/experiments"
|
||||
)
|
||||
|
||||
type (
|
||||
// Vars is an ordered map of variable names to values.
|
||||
Vars struct {
|
||||
om *orderedmap.OrderedMap[string, Var]
|
||||
mutex sync.RWMutex
|
||||
}
|
||||
// A VarElement is a key-value pair that is used for initializing a Vars
|
||||
// structure.
|
||||
VarElement orderedmap.Element[string, Var]
|
||||
)
|
||||
|
||||
// NewVars creates a new instance of Vars and initializes it with the provided
|
||||
// set of elements, if any. The elements are added in the order they are passed.
|
||||
func NewVars(els ...*VarElement) *Vars {
|
||||
vars := &Vars{
|
||||
om: orderedmap.NewOrderedMap[string, Var](),
|
||||
}
|
||||
for _, el := range els {
|
||||
vars.Set(el.Key, el.Value)
|
||||
}
|
||||
return vars
|
||||
}
|
||||
|
||||
// Len returns the number of variables in the Vars map.
|
||||
func (vars *Vars) Len() int {
|
||||
if vars == nil || vars.om == nil {
|
||||
return 0
|
||||
}
|
||||
defer vars.mutex.RUnlock()
|
||||
vars.mutex.RLock()
|
||||
return vars.om.Len()
|
||||
}
|
||||
|
||||
// Get returns the value the the variable with the provided key and a boolean
|
||||
// that indicates if the value was found or not. If the value is not found, the
|
||||
// returned variable is a zero value and the bool is false.
|
||||
func (vars *Vars) Get(key string) (Var, bool) {
|
||||
if vars == nil || vars.om == nil {
|
||||
return Var{}, false
|
||||
}
|
||||
defer vars.mutex.RUnlock()
|
||||
vars.mutex.RLock()
|
||||
return vars.om.Get(key)
|
||||
}
|
||||
|
||||
// Set sets the value of the variable with the provided key to the provided
|
||||
// value. If the variable already exists, its value is updated. If the variable
|
||||
// does not exist, it is created.
|
||||
func (vars *Vars) Set(key string, value Var) bool {
|
||||
if vars == nil {
|
||||
vars = NewVars()
|
||||
}
|
||||
if vars.om == nil {
|
||||
vars.om = orderedmap.NewOrderedMap[string, Var]()
|
||||
}
|
||||
defer vars.mutex.Unlock()
|
||||
vars.mutex.Lock()
|
||||
return vars.om.Set(key, value)
|
||||
}
|
||||
|
||||
// Range calls the provided function for each variable in the map. The function
|
||||
// receives the variable's key and value as arguments. If the function returns
|
||||
// an error, the iteration stops and the error is returned.
|
||||
func (vars *Vars) Range(f func(k string, v Var) error) error {
|
||||
if vars == nil || vars.om == nil {
|
||||
return nil
|
||||
}
|
||||
for pair := vars.om.Front(); pair != nil; pair = pair.Next() {
|
||||
if err := f(pair.Key, pair.Value); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// ToCacheMap converts Vars to an unordered map containing only the static
|
||||
// variables
|
||||
func (vars *Vars) ToCacheMap() (m map[string]any) {
|
||||
defer vars.mutex.RUnlock()
|
||||
vars.mutex.RLock()
|
||||
m = make(map[string]any, vars.Len())
|
||||
for pair := vars.om.Front(); pair != nil; pair = pair.Next() {
|
||||
if pair.Value.Sh != nil && *pair.Value.Sh != "" {
|
||||
// Dynamic variable is not yet resolved; trigger
|
||||
// <no value> to be used in templates.
|
||||
return nil
|
||||
}
|
||||
if pair.Value.Live != nil {
|
||||
m[pair.Key] = pair.Value.Live
|
||||
} else {
|
||||
m[pair.Key] = pair.Value.Value
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Merge loops over other and merges it values with the variables in vars. If
|
||||
// the include parameter is not nil and its it is an advanced import, the
|
||||
// directory is set set to the value of the include parameter.
|
||||
func (vars *Vars) Merge(other *Vars, include *Include) {
|
||||
if vars == nil || vars.om == nil || other == nil {
|
||||
return
|
||||
}
|
||||
defer other.mutex.RUnlock()
|
||||
other.mutex.RLock()
|
||||
for pair := other.om.Front(); pair != nil; pair = pair.Next() {
|
||||
if include != nil && include.AdvancedImport {
|
||||
pair.Value.Dir = include.Dir
|
||||
}
|
||||
vars.om.Set(pair.Key, pair.Value)
|
||||
}
|
||||
}
|
||||
|
||||
func (vs *Vars) DeepCopy() *Vars {
|
||||
if vs == nil {
|
||||
return nil
|
||||
}
|
||||
defer vs.mutex.RUnlock()
|
||||
vs.mutex.RLock()
|
||||
return &Vars{
|
||||
om: deepcopy.OrderedMap(vs.om),
|
||||
}
|
||||
}
|
||||
|
||||
func (vs *Vars) UnmarshalYAML(node *yaml.Node) error {
|
||||
if vs == nil || vs.om == nil {
|
||||
*vs = *NewVars()
|
||||
}
|
||||
vs.om = orderedmap.NewOrderedMap[string, Var]()
|
||||
switch node.Kind {
|
||||
case yaml.MappingNode:
|
||||
// NOTE: orderedmap does not have an unmarshaler, so we have to decode
|
||||
// the map manually. We increment over 2 values at a time and assign
|
||||
// them as a key-value pair.
|
||||
for i := 0; i < len(node.Content); i += 2 {
|
||||
keyNode := node.Content[i]
|
||||
valueNode := node.Content[i+1]
|
||||
|
||||
// Decode the value node into a Task struct
|
||||
var v Var
|
||||
if err := valueNode.Decode(&v); err != nil {
|
||||
return errors.NewTaskfileDecodeError(err, node)
|
||||
}
|
||||
|
||||
// Add the task to the ordered map
|
||||
vs.Set(keyNode.Value, v)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
return errors.NewTaskfileDecodeError(nil, node).WithTypeMessage("vars")
|
||||
}
|
||||
|
||||
// Var represents either a static or dynamic variable.
|
||||
type Var struct {
|
||||
Value any
|
||||
|
164
taskfile/ast/vars.go
Normal file
164
taskfile/ast/vars.go
Normal file
@ -0,0 +1,164 @@
|
||||
package ast
|
||||
|
||||
import (
|
||||
"sync"
|
||||
|
||||
"github.com/elliotchance/orderedmap/v2"
|
||||
"gopkg.in/yaml.v3"
|
||||
|
||||
"github.com/go-task/task/v3/errors"
|
||||
"github.com/go-task/task/v3/internal/deepcopy"
|
||||
)
|
||||
|
||||
type (
|
||||
// Vars is an ordered map of variable names to values.
|
||||
Vars struct {
|
||||
om *orderedmap.OrderedMap[string, Var]
|
||||
mutex sync.RWMutex
|
||||
}
|
||||
// A VarElement is a key-value pair that is used for initializing a Vars
|
||||
// structure.
|
||||
VarElement orderedmap.Element[string, Var]
|
||||
)
|
||||
|
||||
// NewVars creates a new instance of Vars and initializes it with the provided
|
||||
// set of elements, if any. The elements are added in the order they are passed.
|
||||
func NewVars(els ...*VarElement) *Vars {
|
||||
vars := &Vars{
|
||||
om: orderedmap.NewOrderedMap[string, Var](),
|
||||
}
|
||||
for _, el := range els {
|
||||
vars.Set(el.Key, el.Value)
|
||||
}
|
||||
return vars
|
||||
}
|
||||
|
||||
// Len returns the number of variables in the Vars map.
|
||||
func (vars *Vars) Len() int {
|
||||
if vars == nil || vars.om == nil {
|
||||
return 0
|
||||
}
|
||||
defer vars.mutex.RUnlock()
|
||||
vars.mutex.RLock()
|
||||
return vars.om.Len()
|
||||
}
|
||||
|
||||
// Get returns the value the the variable with the provided key and a boolean
|
||||
// that indicates if the value was found or not. If the value is not found, the
|
||||
// returned variable is a zero value and the bool is false.
|
||||
func (vars *Vars) Get(key string) (Var, bool) {
|
||||
if vars == nil || vars.om == nil {
|
||||
return Var{}, false
|
||||
}
|
||||
defer vars.mutex.RUnlock()
|
||||
vars.mutex.RLock()
|
||||
return vars.om.Get(key)
|
||||
}
|
||||
|
||||
// Set sets the value of the variable with the provided key to the provided
|
||||
// value. If the variable already exists, its value is updated. If the variable
|
||||
// does not exist, it is created.
|
||||
func (vars *Vars) Set(key string, value Var) bool {
|
||||
if vars == nil {
|
||||
vars = NewVars()
|
||||
}
|
||||
if vars.om == nil {
|
||||
vars.om = orderedmap.NewOrderedMap[string, Var]()
|
||||
}
|
||||
defer vars.mutex.Unlock()
|
||||
vars.mutex.Lock()
|
||||
return vars.om.Set(key, value)
|
||||
}
|
||||
|
||||
// Range calls the provided function for each variable in the map. The function
|
||||
// receives the variable's key and value as arguments. If the function returns
|
||||
// an error, the iteration stops and the error is returned.
|
||||
func (vars *Vars) Range(f func(k string, v Var) error) error {
|
||||
if vars == nil || vars.om == nil {
|
||||
return nil
|
||||
}
|
||||
for pair := vars.om.Front(); pair != nil; pair = pair.Next() {
|
||||
if err := f(pair.Key, pair.Value); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// ToCacheMap converts Vars to an unordered map containing only the static
|
||||
// variables
|
||||
func (vars *Vars) ToCacheMap() (m map[string]any) {
|
||||
defer vars.mutex.RUnlock()
|
||||
vars.mutex.RLock()
|
||||
m = make(map[string]any, vars.Len())
|
||||
for pair := vars.om.Front(); pair != nil; pair = pair.Next() {
|
||||
if pair.Value.Sh != nil && *pair.Value.Sh != "" {
|
||||
// Dynamic variable is not yet resolved; trigger
|
||||
// <no value> to be used in templates.
|
||||
return nil
|
||||
}
|
||||
if pair.Value.Live != nil {
|
||||
m[pair.Key] = pair.Value.Live
|
||||
} else {
|
||||
m[pair.Key] = pair.Value.Value
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Merge loops over other and merges it values with the variables in vars. If
|
||||
// the include parameter is not nil and its it is an advanced import, the
|
||||
// directory is set set to the value of the include parameter.
|
||||
func (vars *Vars) Merge(other *Vars, include *Include) {
|
||||
if vars == nil || vars.om == nil || other == nil {
|
||||
return
|
||||
}
|
||||
defer other.mutex.RUnlock()
|
||||
other.mutex.RLock()
|
||||
for pair := other.om.Front(); pair != nil; pair = pair.Next() {
|
||||
if include != nil && include.AdvancedImport {
|
||||
pair.Value.Dir = include.Dir
|
||||
}
|
||||
vars.om.Set(pair.Key, pair.Value)
|
||||
}
|
||||
}
|
||||
|
||||
func (vs *Vars) DeepCopy() *Vars {
|
||||
if vs == nil {
|
||||
return nil
|
||||
}
|
||||
defer vs.mutex.RUnlock()
|
||||
vs.mutex.RLock()
|
||||
return &Vars{
|
||||
om: deepcopy.OrderedMap(vs.om),
|
||||
}
|
||||
}
|
||||
|
||||
func (vs *Vars) UnmarshalYAML(node *yaml.Node) error {
|
||||
if vs == nil || vs.om == nil {
|
||||
*vs = *NewVars()
|
||||
}
|
||||
vs.om = orderedmap.NewOrderedMap[string, Var]()
|
||||
switch node.Kind {
|
||||
case yaml.MappingNode:
|
||||
// NOTE: orderedmap does not have an unmarshaler, so we have to decode
|
||||
// the map manually. We increment over 2 values at a time and assign
|
||||
// them as a key-value pair.
|
||||
for i := 0; i < len(node.Content); i += 2 {
|
||||
keyNode := node.Content[i]
|
||||
valueNode := node.Content[i+1]
|
||||
|
||||
// Decode the value node into a Task struct
|
||||
var v Var
|
||||
if err := valueNode.Decode(&v); err != nil {
|
||||
return errors.NewTaskfileDecodeError(err, node)
|
||||
}
|
||||
|
||||
// Add the task to the ordered map
|
||||
vs.Set(keyNode.Value, v)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
return errors.NewTaskfileDecodeError(nil, node).WithTypeMessage("vars")
|
||||
}
|
@ -193,9 +193,14 @@ func (r *Reader) readNode(node Node) (*ast.Taskfile, error) {
|
||||
var tf ast.Taskfile
|
||||
if err := yaml.Unmarshal(b, &tf); err != nil {
|
||||
// Decode the taskfile and add the file info the any errors
|
||||
taskfileInvalidErr := &errors.TaskfileDecodeError{}
|
||||
if errors.As(err, &taskfileInvalidErr) {
|
||||
return nil, taskfileInvalidErr.WithFileInfo(node.Location(), b, 2)
|
||||
taskfileDecodeErr := &errors.TaskfileDecodeError{}
|
||||
if errors.As(err, &taskfileDecodeErr) {
|
||||
snippet := NewSnippet(b,
|
||||
SnippetWithLine(taskfileDecodeErr.Line),
|
||||
SnippetWithColumn(taskfileDecodeErr.Column),
|
||||
SnippetWithPadding(2),
|
||||
)
|
||||
return nil, taskfileDecodeErr.WithFileInfo(node.Location(), snippet.String())
|
||||
}
|
||||
return nil, &errors.TaskfileInvalidError{URI: filepathext.TryAbsToRel(node.Location()), Err: err}
|
||||
}
|
||||
|
148
taskfile/snippet.go
Normal file
148
taskfile/snippet.go
Normal file
@ -0,0 +1,148 @@
|
||||
package taskfile
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"embed"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/alecthomas/chroma/v2"
|
||||
"github.com/alecthomas/chroma/v2/quick"
|
||||
"github.com/alecthomas/chroma/v2/styles"
|
||||
"github.com/fatih/color"
|
||||
)
|
||||
|
||||
//go:embed themes/*.xml
|
||||
var embedded embed.FS
|
||||
|
||||
const (
|
||||
lineIndicator = ">"
|
||||
columnIndicator = "^"
|
||||
)
|
||||
|
||||
func init() {
|
||||
r, err := embedded.Open("themes/task.xml")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
style, err := chroma.NewXMLStyle(r)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
styles.Register(style)
|
||||
}
|
||||
|
||||
type (
|
||||
SnippetOption func(*Snippet)
|
||||
Snippet struct {
|
||||
linesRaw []string
|
||||
linesHighlighted []string
|
||||
start int
|
||||
end int
|
||||
line int
|
||||
column int
|
||||
padding int
|
||||
noIndicators bool
|
||||
}
|
||||
)
|
||||
|
||||
// NewSnippet creates a new snippet from a byte slice and a line and column
|
||||
// number. The line and column numbers should be 1-indexed. For example, the
|
||||
// first character in the file would be 1:1 (line 1, column 1). The padding
|
||||
// determines the number of lines to include before and after the chosen line.
|
||||
func NewSnippet(b []byte, opts ...SnippetOption) *Snippet {
|
||||
snippet := &Snippet{}
|
||||
for _, opt := range opts {
|
||||
opt(snippet)
|
||||
}
|
||||
|
||||
// Syntax highlight the input and split it into lines
|
||||
buf := &bytes.Buffer{}
|
||||
if err := quick.Highlight(buf, string(b), "yaml", "terminal", "task"); err != nil {
|
||||
buf.WriteString(string(b))
|
||||
}
|
||||
linesRaw := strings.Split(string(b), "\n")
|
||||
linesHighlighted := strings.Split(buf.String(), "\n")
|
||||
|
||||
// Work out the start and end lines of the snippet
|
||||
snippet.start = max(snippet.line-snippet.padding, 1)
|
||||
snippet.end = min(snippet.line+snippet.padding, len(linesRaw)-1)
|
||||
snippet.linesRaw = linesRaw[snippet.start-1 : snippet.end]
|
||||
snippet.linesHighlighted = linesHighlighted[snippet.start-1 : snippet.end]
|
||||
|
||||
return snippet
|
||||
}
|
||||
|
||||
func SnippetWithLine(line int) SnippetOption {
|
||||
return func(snippet *Snippet) {
|
||||
snippet.line = line
|
||||
}
|
||||
}
|
||||
|
||||
func SnippetWithColumn(column int) SnippetOption {
|
||||
return func(snippet *Snippet) {
|
||||
snippet.column = column
|
||||
}
|
||||
}
|
||||
|
||||
func SnippetWithPadding(padding int) SnippetOption {
|
||||
return func(snippet *Snippet) {
|
||||
snippet.padding = padding
|
||||
}
|
||||
}
|
||||
|
||||
func SnippetWithNoIndicators() SnippetOption {
|
||||
return func(snippet *Snippet) {
|
||||
snippet.noIndicators = true
|
||||
}
|
||||
}
|
||||
|
||||
func (snippet *Snippet) String() string {
|
||||
buf := &bytes.Buffer{}
|
||||
|
||||
maxLineNumberDigits := digits(snippet.end)
|
||||
lineNumberFormat := fmt.Sprintf("%%%dd", maxLineNumberDigits)
|
||||
lineNumberSpacer := strings.Repeat(" ", maxLineNumberDigits)
|
||||
lineIndicatorSpacer := strings.Repeat(" ", len(lineIndicator))
|
||||
columnSpacer := strings.Repeat(" ", max(snippet.column-1, 0))
|
||||
|
||||
// Loop over each line in the snippet
|
||||
for i, lineHighlighted := range snippet.linesHighlighted {
|
||||
if i > 0 {
|
||||
fmt.Fprintln(buf)
|
||||
}
|
||||
|
||||
currentLine := snippet.start + i
|
||||
lineNumber := fmt.Sprintf(lineNumberFormat, currentLine)
|
||||
|
||||
// If this is a padding line or indicators are disabled, print it as normal
|
||||
if currentLine != snippet.line || snippet.noIndicators {
|
||||
fmt.Fprintf(buf, "%s %s | %s", lineIndicatorSpacer, lineNumber, lineHighlighted)
|
||||
continue
|
||||
}
|
||||
|
||||
// Otherwise, print the line with indicators
|
||||
fmt.Fprintf(buf, "%s %s | %s", color.RedString(lineIndicator), lineNumber, lineHighlighted)
|
||||
|
||||
// Only print the column indicator if the column is in bounds
|
||||
if snippet.column > 0 && snippet.column <= len(snippet.linesRaw[i]) {
|
||||
fmt.Fprintf(buf, "\n%s %s | %s%s", lineIndicatorSpacer, lineNumberSpacer, columnSpacer, color.RedString(columnIndicator))
|
||||
}
|
||||
}
|
||||
|
||||
// If there are lines, but no line is selected, print the column indicator under all the lines
|
||||
if len(snippet.linesHighlighted) > 0 && snippet.line == 0 && snippet.column > 0 {
|
||||
fmt.Fprintf(buf, "\n%s %s | %s%s", lineIndicatorSpacer, lineNumberSpacer, columnSpacer, color.RedString(columnIndicator))
|
||||
}
|
||||
|
||||
return buf.String()
|
||||
}
|
||||
|
||||
func digits(number int) int {
|
||||
count := 0
|
||||
for number != 0 {
|
||||
number /= 10
|
||||
count += 1
|
||||
}
|
||||
return count
|
||||
}
|
289
taskfile/snippet_test.go
Normal file
289
taskfile/snippet_test.go
Normal file
@ -0,0 +1,289 @@
|
||||
package taskfile
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
const sample = `version: 3
|
||||
|
||||
tasks:
|
||||
default:
|
||||
vars:
|
||||
FOO: foo
|
||||
BAR: bar
|
||||
cmds:
|
||||
- echo "{{.FOO}}"
|
||||
- echo "{{.BAR}}"
|
||||
`
|
||||
|
||||
func TestNewSnippet(t *testing.T) {
|
||||
t.Parallel()
|
||||
tests := []struct {
|
||||
name string
|
||||
b []byte
|
||||
opts []SnippetOption
|
||||
want *Snippet
|
||||
}{
|
||||
{
|
||||
name: "first line, first column",
|
||||
b: []byte(sample),
|
||||
opts: []SnippetOption{
|
||||
SnippetWithLine(1),
|
||||
SnippetWithColumn(1),
|
||||
},
|
||||
want: &Snippet{
|
||||
linesRaw: []string{
|
||||
"version: 3",
|
||||
},
|
||||
linesHighlighted: []string{
|
||||
"\x1b[33mversion\x1b[0m\x1b[1m\x1b[30m:\x1b[0m\x1b[1m\x1b[30m \x1b[0m\x1b[36m3\x1b[0m\x1b[1m\x1b[30m\x1b[0m",
|
||||
},
|
||||
start: 1,
|
||||
end: 1,
|
||||
line: 1,
|
||||
column: 1,
|
||||
padding: 0,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "first line, first column, padding=2",
|
||||
b: []byte(sample),
|
||||
opts: []SnippetOption{
|
||||
SnippetWithLine(1),
|
||||
SnippetWithColumn(1),
|
||||
SnippetWithPadding(2),
|
||||
},
|
||||
want: &Snippet{
|
||||
linesRaw: []string{
|
||||
"version: 3",
|
||||
"",
|
||||
"tasks:",
|
||||
},
|
||||
linesHighlighted: []string{
|
||||
"\x1b[33mversion\x1b[0m\x1b[1m\x1b[30m:\x1b[0m\x1b[1m\x1b[30m \x1b[0m\x1b[36m3\x1b[0m\x1b[1m\x1b[30m\x1b[0m",
|
||||
"\x1b[1m\x1b[30m\x1b[0m",
|
||||
"\x1b[33mtasks\x1b[0m\x1b[1m\x1b[30m:\x1b[0m\x1b[1m\x1b[30m\x1b[0m",
|
||||
},
|
||||
start: 1,
|
||||
end: 3,
|
||||
line: 1,
|
||||
column: 1,
|
||||
padding: 2,
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
got := NewSnippet(tt.b, tt.opts...)
|
||||
require.Equal(t, tt.want, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestSnippetString(t *testing.T) {
|
||||
t.Parallel()
|
||||
tests := []struct {
|
||||
name string
|
||||
b []byte
|
||||
opts []SnippetOption
|
||||
want string
|
||||
}{
|
||||
{
|
||||
name: "empty",
|
||||
b: []byte{},
|
||||
opts: []SnippetOption{
|
||||
SnippetWithLine(1),
|
||||
SnippetWithColumn(1),
|
||||
},
|
||||
want: "",
|
||||
},
|
||||
{
|
||||
name: "0th line, 0th column (no indicators)",
|
||||
b: []byte(sample),
|
||||
want: "",
|
||||
},
|
||||
{
|
||||
name: "1st line, 0th column (line indicator only)",
|
||||
b: []byte(sample),
|
||||
opts: []SnippetOption{
|
||||
SnippetWithLine(1),
|
||||
},
|
||||
want: "> 1 | \x1b[33mversion\x1b[0m\x1b[1m\x1b[30m:\x1b[0m\x1b[1m\x1b[30m \x1b[0m\x1b[36m3\x1b[0m\x1b[1m\x1b[30m\x1b[0m",
|
||||
},
|
||||
{
|
||||
name: "0th line, 1st column (column indicator only)",
|
||||
b: []byte(sample),
|
||||
opts: []SnippetOption{
|
||||
SnippetWithColumn(1),
|
||||
},
|
||||
want: "",
|
||||
},
|
||||
{
|
||||
name: "0th line, 1st column, padding=2 (column indicator only)",
|
||||
b: []byte(sample),
|
||||
opts: []SnippetOption{
|
||||
SnippetWithColumn(1),
|
||||
SnippetWithPadding(2),
|
||||
},
|
||||
want: " 1 | \x1b[33mversion\x1b[0m\x1b[1m\x1b[30m:\x1b[0m\x1b[1m\x1b[30m \x1b[0m\x1b[36m3\x1b[0m\x1b[1m\x1b[30m\x1b[0m\n 2 | \x1b[1m\x1b[30m\x1b[0m\n | ^",
|
||||
},
|
||||
{
|
||||
name: "1st line, 1st column",
|
||||
b: []byte(sample),
|
||||
opts: []SnippetOption{
|
||||
SnippetWithLine(1),
|
||||
SnippetWithColumn(1),
|
||||
},
|
||||
want: "> 1 | \x1b[33mversion\x1b[0m\x1b[1m\x1b[30m:\x1b[0m\x1b[1m\x1b[30m \x1b[0m\x1b[36m3\x1b[0m\x1b[1m\x1b[30m\x1b[0m\n | ^",
|
||||
},
|
||||
{
|
||||
name: "1st line, 10th column",
|
||||
b: []byte(sample),
|
||||
opts: []SnippetOption{
|
||||
SnippetWithLine(1),
|
||||
SnippetWithColumn(10),
|
||||
},
|
||||
want: "> 1 | \x1b[33mversion\x1b[0m\x1b[1m\x1b[30m:\x1b[0m\x1b[1m\x1b[30m \x1b[0m\x1b[36m3\x1b[0m\x1b[1m\x1b[30m\x1b[0m\n | ^",
|
||||
},
|
||||
{
|
||||
name: "1st line, 1st column, padding=2",
|
||||
b: []byte(sample),
|
||||
opts: []SnippetOption{
|
||||
SnippetWithLine(1),
|
||||
SnippetWithColumn(1),
|
||||
SnippetWithPadding(2),
|
||||
},
|
||||
want: "> 1 | \x1b[33mversion\x1b[0m\x1b[1m\x1b[30m:\x1b[0m\x1b[1m\x1b[30m \x1b[0m\x1b[36m3\x1b[0m\x1b[1m\x1b[30m\x1b[0m\n | ^\n 2 | \x1b[1m\x1b[30m\x1b[0m\n 3 | \x1b[33mtasks\x1b[0m\x1b[1m\x1b[30m:\x1b[0m\x1b[1m\x1b[30m\x1b[0m",
|
||||
},
|
||||
{
|
||||
name: "1st line, 10th column, padding=2",
|
||||
b: []byte(sample),
|
||||
opts: []SnippetOption{
|
||||
SnippetWithLine(1),
|
||||
SnippetWithColumn(10),
|
||||
SnippetWithPadding(2),
|
||||
},
|
||||
want: "> 1 | \x1b[33mversion\x1b[0m\x1b[1m\x1b[30m:\x1b[0m\x1b[1m\x1b[30m \x1b[0m\x1b[36m3\x1b[0m\x1b[1m\x1b[30m\x1b[0m\n | ^\n 2 | \x1b[1m\x1b[30m\x1b[0m\n 3 | \x1b[33mtasks\x1b[0m\x1b[1m\x1b[30m:\x1b[0m\x1b[1m\x1b[30m\x1b[0m",
|
||||
},
|
||||
{
|
||||
name: "5th line, 1st column",
|
||||
b: []byte(sample),
|
||||
opts: []SnippetOption{
|
||||
SnippetWithLine(5),
|
||||
SnippetWithColumn(1),
|
||||
},
|
||||
want: "> 5 | \x1b[1m\x1b[30m \x1b[0m\x1b[33mvars\x1b[0m\x1b[1m\x1b[30m:\x1b[0m\x1b[1m\x1b[30m\x1b[0m\n | ^",
|
||||
},
|
||||
{
|
||||
name: "5th line, 5th column",
|
||||
b: []byte(sample),
|
||||
opts: []SnippetOption{
|
||||
SnippetWithLine(5),
|
||||
SnippetWithColumn(5),
|
||||
},
|
||||
want: "> 5 | \x1b[1m\x1b[30m \x1b[0m\x1b[33mvars\x1b[0m\x1b[1m\x1b[30m:\x1b[0m\x1b[1m\x1b[30m\x1b[0m\n | ^",
|
||||
},
|
||||
{
|
||||
name: "5th line, 5th column, padding=2",
|
||||
b: []byte(sample),
|
||||
opts: []SnippetOption{
|
||||
SnippetWithLine(5),
|
||||
SnippetWithColumn(5),
|
||||
SnippetWithPadding(2),
|
||||
},
|
||||
want: " 3 | \x1b[33mtasks\x1b[0m\x1b[1m\x1b[30m:\x1b[0m\x1b[1m\x1b[30m\x1b[0m\n 4 | \x1b[1m\x1b[30m \x1b[0m\x1b[33mdefault\x1b[0m\x1b[1m\x1b[30m:\x1b[0m\x1b[1m\x1b[30m\x1b[0m\n> 5 | \x1b[1m\x1b[30m \x1b[0m\x1b[33mvars\x1b[0m\x1b[1m\x1b[30m:\x1b[0m\x1b[1m\x1b[30m\x1b[0m\n | ^\n 6 | \x1b[1m\x1b[30m \x1b[0m\x1b[33mFOO\x1b[0m\x1b[1m\x1b[30m:\x1b[0m\x1b[1m\x1b[30m \x1b[0m\x1b[36mfoo\x1b[0m\x1b[1m\x1b[30m\x1b[0m\n 7 | \x1b[1m\x1b[30m \x1b[0m\x1b[33mBAR\x1b[0m\x1b[1m\x1b[30m:\x1b[0m\x1b[1m\x1b[30m \x1b[0m\x1b[36mbar\x1b[0m\x1b[1m\x1b[30m\x1b[0m",
|
||||
},
|
||||
{
|
||||
name: "5th line, 5th column, padding=2, no indicators",
|
||||
b: []byte(sample),
|
||||
opts: []SnippetOption{
|
||||
SnippetWithLine(5),
|
||||
SnippetWithColumn(5),
|
||||
SnippetWithPadding(2),
|
||||
SnippetWithNoIndicators(),
|
||||
},
|
||||
want: " 3 | \x1b[33mtasks\x1b[0m\x1b[1m\x1b[30m:\x1b[0m\x1b[1m\x1b[30m\x1b[0m\n 4 | \x1b[1m\x1b[30m \x1b[0m\x1b[33mdefault\x1b[0m\x1b[1m\x1b[30m:\x1b[0m\x1b[1m\x1b[30m\x1b[0m\n 5 | \x1b[1m\x1b[30m \x1b[0m\x1b[33mvars\x1b[0m\x1b[1m\x1b[30m:\x1b[0m\x1b[1m\x1b[30m\x1b[0m\n 6 | \x1b[1m\x1b[30m \x1b[0m\x1b[33mFOO\x1b[0m\x1b[1m\x1b[30m:\x1b[0m\x1b[1m\x1b[30m \x1b[0m\x1b[36mfoo\x1b[0m\x1b[1m\x1b[30m\x1b[0m\n 7 | \x1b[1m\x1b[30m \x1b[0m\x1b[33mBAR\x1b[0m\x1b[1m\x1b[30m:\x1b[0m\x1b[1m\x1b[30m \x1b[0m\x1b[36mbar\x1b[0m\x1b[1m\x1b[30m\x1b[0m",
|
||||
},
|
||||
{
|
||||
name: "10th line, 1st column",
|
||||
b: []byte(sample),
|
||||
opts: []SnippetOption{
|
||||
SnippetWithLine(10),
|
||||
SnippetWithColumn(1),
|
||||
},
|
||||
want: "> 10 | \x1b[1m\x1b[30m \x1b[0m\x1b[1m\x1b[30m- \x1b[0m\x1b[36mecho \"{{.BAR}}\"\x1b[0m\x1b[1m\x1b[30m\x1b[0m\n | ^",
|
||||
},
|
||||
{
|
||||
name: "10th line, 23rd column",
|
||||
b: []byte(sample),
|
||||
opts: []SnippetOption{
|
||||
SnippetWithLine(10),
|
||||
SnippetWithColumn(23),
|
||||
},
|
||||
want: "> 10 | \x1b[1m\x1b[30m \x1b[0m\x1b[1m\x1b[30m- \x1b[0m\x1b[36mecho \"{{.BAR}}\"\x1b[0m\x1b[1m\x1b[30m\x1b[0m\n | ^",
|
||||
},
|
||||
{
|
||||
name: "10th line, 24th column (out of bounds)",
|
||||
b: []byte(sample),
|
||||
opts: []SnippetOption{
|
||||
SnippetWithLine(10),
|
||||
SnippetWithColumn(24),
|
||||
},
|
||||
want: "> 10 | \x1b[1m\x1b[30m \x1b[0m\x1b[1m\x1b[30m- \x1b[0m\x1b[36mecho \"{{.BAR}}\"\x1b[0m\x1b[1m\x1b[30m\x1b[0m",
|
||||
},
|
||||
{
|
||||
name: "10th line, 23rd column, padding=2",
|
||||
b: []byte(sample),
|
||||
opts: []SnippetOption{
|
||||
SnippetWithLine(10),
|
||||
SnippetWithColumn(23),
|
||||
SnippetWithPadding(2),
|
||||
},
|
||||
want: " 8 | \x1b[1m\x1b[30m \x1b[0m\x1b[33mcmds\x1b[0m\x1b[1m\x1b[30m:\x1b[0m\x1b[1m\x1b[30m\x1b[0m\n 9 | \x1b[1m\x1b[30m \x1b[0m\x1b[1m\x1b[30m- \x1b[0m\x1b[36mecho \"{{.FOO}}\"\x1b[0m\x1b[1m\x1b[30m\x1b[0m\n> 10 | \x1b[1m\x1b[30m \x1b[0m\x1b[1m\x1b[30m- \x1b[0m\x1b[36mecho \"{{.BAR}}\"\x1b[0m\x1b[1m\x1b[30m\x1b[0m\n | ^",
|
||||
},
|
||||
{
|
||||
name: "5th line, 5th column, padding=100",
|
||||
b: []byte(sample),
|
||||
opts: []SnippetOption{
|
||||
SnippetWithLine(5),
|
||||
SnippetWithColumn(5),
|
||||
SnippetWithPadding(100),
|
||||
},
|
||||
want: " 1 | \x1b[33mversion\x1b[0m\x1b[1m\x1b[30m:\x1b[0m\x1b[1m\x1b[30m \x1b[0m\x1b[36m3\x1b[0m\x1b[1m\x1b[30m\x1b[0m\n 2 | \x1b[1m\x1b[30m\x1b[0m\n 3 | \x1b[33mtasks\x1b[0m\x1b[1m\x1b[30m:\x1b[0m\x1b[1m\x1b[30m\x1b[0m\n 4 | \x1b[1m\x1b[30m \x1b[0m\x1b[33mdefault\x1b[0m\x1b[1m\x1b[30m:\x1b[0m\x1b[1m\x1b[30m\x1b[0m\n> 5 | \x1b[1m\x1b[30m \x1b[0m\x1b[33mvars\x1b[0m\x1b[1m\x1b[30m:\x1b[0m\x1b[1m\x1b[30m\x1b[0m\n | ^\n 6 | \x1b[1m\x1b[30m \x1b[0m\x1b[33mFOO\x1b[0m\x1b[1m\x1b[30m:\x1b[0m\x1b[1m\x1b[30m \x1b[0m\x1b[36mfoo\x1b[0m\x1b[1m\x1b[30m\x1b[0m\n 7 | \x1b[1m\x1b[30m \x1b[0m\x1b[33mBAR\x1b[0m\x1b[1m\x1b[30m:\x1b[0m\x1b[1m\x1b[30m \x1b[0m\x1b[36mbar\x1b[0m\x1b[1m\x1b[30m\x1b[0m\n 8 | \x1b[1m\x1b[30m \x1b[0m\x1b[33mcmds\x1b[0m\x1b[1m\x1b[30m:\x1b[0m\x1b[1m\x1b[30m\x1b[0m\n 9 | \x1b[1m\x1b[30m \x1b[0m\x1b[1m\x1b[30m- \x1b[0m\x1b[36mecho \"{{.FOO}}\"\x1b[0m\x1b[1m\x1b[30m\x1b[0m\n 10 | \x1b[1m\x1b[30m \x1b[0m\x1b[1m\x1b[30m- \x1b[0m\x1b[36mecho \"{{.BAR}}\"\x1b[0m\x1b[1m\x1b[30m\x1b[0m",
|
||||
},
|
||||
{
|
||||
name: "11th line (out of bounds), 1st column",
|
||||
b: []byte(sample),
|
||||
opts: []SnippetOption{
|
||||
SnippetWithLine(11),
|
||||
SnippetWithColumn(1),
|
||||
},
|
||||
want: "",
|
||||
},
|
||||
{
|
||||
name: "11th line (out of bounds), 1st column, padding=2",
|
||||
b: []byte(sample),
|
||||
opts: []SnippetOption{
|
||||
SnippetWithLine(11),
|
||||
SnippetWithColumn(1),
|
||||
SnippetWithPadding(2),
|
||||
},
|
||||
want: " 9 | \x1b[1m\x1b[30m \x1b[0m\x1b[1m\x1b[30m- \x1b[0m\x1b[36mecho \"{{.FOO}}\"\x1b[0m\x1b[1m\x1b[30m\x1b[0m\n 10 | \x1b[1m\x1b[30m \x1b[0m\x1b[1m\x1b[30m- \x1b[0m\x1b[36mecho \"{{.BAR}}\"\x1b[0m\x1b[1m\x1b[30m\x1b[0m",
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
snippet := NewSnippet(tt.b, tt.opts...)
|
||||
got := snippet.String()
|
||||
if strings.Contains(got, "\t") {
|
||||
t.Fatalf("tab character found in snippet - check the sample string")
|
||||
}
|
||||
require.Equal(t, tt.want, got)
|
||||
})
|
||||
}
|
||||
}
|
@ -29,6 +29,12 @@
|
||||
},
|
||||
{
|
||||
"$ref": "#/definitions/task_call"
|
||||
},
|
||||
{
|
||||
"$ref": "#/definitions/defer_task_call"
|
||||
},
|
||||
{
|
||||
"$ref": "#/definitions/defer_cmd_call"
|
||||
}
|
||||
]
|
||||
}
|
||||
@ -216,7 +222,10 @@
|
||||
"$ref": "#/definitions/task_call"
|
||||
},
|
||||
{
|
||||
"$ref": "#/definitions/defer_call"
|
||||
"$ref": "#/definitions/defer_task_call"
|
||||
},
|
||||
{
|
||||
"$ref": "#/definitions/defer_cmd_call"
|
||||
},
|
||||
{
|
||||
"$ref": "#/definitions/for_cmds_call"
|
||||
@ -350,15 +359,12 @@
|
||||
"additionalProperties": false,
|
||||
"required": ["cmd"]
|
||||
},
|
||||
"defer_call": {
|
||||
"defer_task_call": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"defer": {
|
||||
"description": "Run a command when the task completes. This command will run even when the task fails",
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"$ref": "#/definitions/task_call"
|
||||
}
|
||||
@ -368,6 +374,21 @@
|
||||
"additionalProperties": false,
|
||||
"required": ["defer"]
|
||||
},
|
||||
"defer_cmd_call": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"defer": {
|
||||
"description": "Name of the command to defer",
|
||||
"type": "string"
|
||||
},
|
||||
"silent": {
|
||||
"description": "Hides task name and command from output. The command's output will still be redirected to `STDOUT` and `STDERR`.",
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"required": ["defer"]
|
||||
},
|
||||
"for_cmds_call": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
|
Loading…
x
Reference in New Issue
Block a user