diff --git a/config.go b/config.go index d3882dd..2126af0 100644 --- a/config.go +++ b/config.go @@ -28,13 +28,13 @@ import ( "strconv" "strings" "time" + + "github.com/jakobmoellerdev/lvm2go/config" + "github.com/jakobmoellerdev/lvm2go/util" ) var ErrProfileNameEmpty = errors.New("profile name is empty") -const LVMConfigStructTag = "lvm" -const LVMProfileExtension = ".profile" - type ( ConfigOptions struct { ConfigType @@ -141,7 +141,7 @@ func (c *client) ReadAndDecodeConfig(ctx context.Context, v any, opts ...ConfigO } func (c *client) WriteAndEncodeConfig(_ context.Context, v any, writer io.Writer) error { - return NewLexingConfigEncoder(writer).Encode(v) + return config.NewLexingEncoder(writer).Encode(v) } func (c *client) GetProfileDirectory(ctx context.Context) (string, error) { @@ -207,7 +207,7 @@ func (c *client) GetProfilePath(ctx context.Context, profile Profile) (string, e } if ext := filepath.Ext(path); ext == "" { - path = fmt.Sprintf("%s%s", path, LVMProfileExtension) + path = fmt.Sprintf("%s%s", path, config.LVMProfileExtension) } else if ext != ".profile" { return "", fmt.Errorf("%q is an invalid profile extension: %w", ext, ErrInvalidProfileExtension) } @@ -254,11 +254,11 @@ func (c *client) UpdateConfigFromPath(ctx context.Context, v any, path string) e // If the resulting configuration is smaller than the original, the difference is padded with empty bytes. // The configuration file is written back to the start of original configuration file. func updateConfig(ctx context.Context, v any, rw io.ReadWriteSeeker) error { - structMappings, err := DecodeLVMStructTagFieldMappings(v) + structMappings, err := config.DecodeFieldMappings(v) if err != nil { return fmt.Errorf("failed to read lvm struct tag: %v", err) } - tokensToModify := StructMappingsToConfigTokens(structMappings) + tokensToModify := config.FieldMappingsToTokens(structMappings) data, err := io.ReadAll(rw) if err != nil { @@ -266,21 +266,21 @@ func updateConfig(ctx context.Context, v any, rw io.ReadWriteSeeker) error { } reader := bytes.NewReader(data) - tokensFromFile, err := NewBufferedConfigLexer(reader).Lex() + tokensFromFile, err := config.NewBufferedLexer(reader).Lex() if err != nil { return fmt.Errorf("failed to read configuration: %v", err) } // First merge all assignments from the new struct into the existing configuration - newTokens := assignmentsWithSections(tokensFromFile). - overrideWith(assignmentsWithSections(tokensToModify)) + newTokens := config.AssignmentsWithSections(tokensFromFile). + OverrideWith(config.AssignmentsWithSections(tokensToModify)) // Then append any new assignments at the end of the sections - tokens := appendAssignmentsAtEndOfSections(tokensFromFile, newTokens) + tokens := config.AppendAssignmentsAtEndOfSections(tokensFromFile, newTokens) // Write the new configuration to a buffer buf := bytes.NewBuffer(make([]byte, 0, len(data))) - if err := NewLexingConfigEncoder(buf).Encode(tokens); err != nil { + if err := config.NewLexingEncoder(buf).Encode(tokens); err != nil { return fmt.Errorf("failed to encode new configuration: %v", err) } @@ -298,14 +298,8 @@ func updateConfig(ctx context.Context, v any, rw io.ReadWriteSeeker) error { return copyWithTimeout(ctx, rw, buf, 10*time.Second) } -// generateLVMConfigEditComment generates a comment to be added to the configuration file -// This comment is used to indicate that the field was edited by the client. -func generateLVMConfigEditComment() string { - return fmt.Sprintf(`This field was edited by %s at %s`, ModuleID(), time.Now().Format(time.RFC3339)) -} - func generateLVMConfigCreateComment() string { - return fmt.Sprintf(`configuration created by %s at %s`, ModuleID(), time.Now().Format(time.RFC3339)) + return fmt.Sprintf(`configuration created by %s at %s`, util.ModuleID(), time.Now().Format(time.RFC3339)) } // GetFromRawConfig retrieves a value from a RawConfig by key and attempts to cast it to the type of T. @@ -357,18 +351,18 @@ func (opts *ConfigOptions) ApplyToArgs(args Arguments) error { } func getStructProcessorAndQuery(v any) (RawOutputProcessor, []string, error) { - fieldsForConfigQuery, err := DecodeLVMStructTagFieldMappings(v) + fieldsForConfigQuery, err := config.DecodeFieldMappings(v) if err != nil { return nil, nil, fmt.Errorf("failed to read lvm struct tag: %v", err) } var query []string for _, field := range fieldsForConfigQuery { - query = append(query, fmt.Sprintf("%s/%s", field.prefix, field.name)) + query = append(query, fmt.Sprintf("%s/%s", field.Prefix, field.Name)) } return func(out io.Reader) error { - return newLexingConfigDecoderWithFieldMapping(out, fieldsForConfigQuery).Decode() + return config.NewLexingConfigDecoderWithFieldMapping(out, fieldsForConfigQuery).Decode() }, query, nil } diff --git a/config/ast.go b/config/ast.go new file mode 100644 index 0000000..fc9f0d7 --- /dev/null +++ b/config/ast.go @@ -0,0 +1,288 @@ +package config + +import ( + "bufio" + "fmt" + "reflect" + "strings" +) + +type AST interface { + Statements() []ASTStatement + + Parse(t Tokens) + Tokens() Tokens +} + +type ModifiableAST interface { + AST + Append(ASTStatement) +} + +type ast struct { + statements []ASTStatement +} + +func (a *ast) Append(statement ASTStatement) { + a.statements = append(a.statements, statement) +} + +func (a *ast) Statements() []ASTStatement { + return a.statements +} + +type ASTStatement interface { + Tokens() Tokens +} + +type Newline interface { + ASTStatement +} + +type newline struct { + *Token +} + +func (n *newline) Tokens() Tokens { + return Tokens{n.Token} +} + +type Section interface { + ASTStatement + Name() string + Children() []ASTStatement + + Append(ASTStatement) +} + +type Comment interface { + ASTStatement + Value() string +} + +type Assignment interface { + ASTStatement + Key() string + Value() string +} + +func NewAST() ModifiableAST { + return &ast{} +} + +type section struct { + sectionToken *Token + sectionStart *Token + sectionEnd *Token + statements []ASTStatement +} + +func (s *section) Tokens() Tokens { + tokens := make(Tokens, 0) + tokens = append(tokens, s.sectionToken) + tokens = append(tokens, s.sectionStart) + tokens = append(tokens, NewNewline().Tokens()...) + for _, statement := range s.statements { + tokens = append(tokens, statement.Tokens()...) + } + tokens = append(tokens, s.sectionEnd) + tokens = append(tokens, NewNewline().Tokens()...) + return tokens +} + +func (s *section) Append(statement ASTStatement) { + s.statements = append(s.statements, statement) +} + +func (s *section) Children() []ASTStatement { + return s.statements +} + +var _ Section = §ion{} +var _ ASTStatement = §ion{} + +func (s *section) Name() string { + return string(s.sectionToken.Value) +} + +var _ Section = §ion{} + +type comment struct { + indicator *Token + value *Token +} + +func (c *comment) Tokens() Tokens { + return append(Tokens{c.indicator, c.value}, NewNewline().Tokens()...) +} + +func (c *comment) Value() string { + return string(c.value.Value) +} + +var _ Comment = &comment{} + +type assignment struct { + key *Token + assignment *Token + value *Token +} + +func (a *assignment) Tokens() Tokens { + return append(Tokens{a.key, a.assignment, a.value}, NewNewline().Tokens()...) +} + +var _ Assignment = &assignment{} + +func (a *assignment) Key() string { + return string(a.key.Value) +} + +func (a *assignment) Value() string { + return string(a.value.Value) +} + +func (a *ast) Parse(t Tokens) { + a.statements = nil + var currentSection *section + add := func(statement ASTStatement) { + if currentSection != nil { + currentSection.statements = append(currentSection.statements, statement) + } else { + a.statements = append(a.statements, statement) + } + } + for i, token := range t { + if token.Type == TokenTypeSection { + section := §ion{sectionToken: token} + a.statements = append(a.statements, section) + currentSection = section + continue + } + if token.Type == TokenTypeStartOfSection { + currentSection.sectionStart = token + continue + } + if token.Type == TokenTypeEndOfSection { + currentSection.sectionEnd = token + currentSection = nil + continue + } + if token.Type == TokenTypeComment { + comment := &comment{indicator: token} + comment.value = t[i+1] + add(comment) + i++ + continue + } + if token.Type == TokenTypeAssignment { + assignment := &assignment{assignment: token} + assignment.key = t[i-1] + assignment.value = t[i+1] + add(assignment) + i++ + continue + } + } +} + +func (a *ast) Tokens() Tokens { + tokens := make(Tokens, 0) + for _, statement := range a.statements { + tokens = append(tokens, statement.Tokens()...) + } + + line := 1 + for _, token := range tokens { + token.Line = line + if token.Type == TokenTypeEndOfStatement { + line++ + } + } + + if len(tokens) > 0 { + tokens = append(tokens, TokenEOF) + } + + return tokens +} + +func NewSection(name string) Section { + return §ion{ + sectionToken: &Token{Type: TokenTypeSection, Value: []byte(name)}, + sectionStart: &Token{Type: TokenTypeStartOfSection, Value: []byte{'{'}}, + sectionEnd: &Token{Type: TokenTypeEndOfSection, Value: []byte{'}'}}, + } +} + +type MultiLineComment interface { + ASTStatement +} + +type multiLineComment []Comment + +func (m multiLineComment) Tokens() Tokens { + tokens := make(Tokens, 0, len(m)) + for _, comment := range m { + tokens = append(tokens, comment.Tokens()...) + } + return tokens +} + +func NewMultiLineComment(value string) MultiLineComment { + scanner := bufio.NewScanner(strings.NewReader(value)) + comments := make([]Comment, 0) + for scanner.Scan() { + comments = append(comments, NewComment(scanner.Text())) + } + return multiLineComment(comments) +} + +func NewComment(value string) Comment { + return &comment{ + indicator: &Token{Type: TokenTypeComment, Value: []byte{'#'}}, + value: &Token{Type: TokenTypeCommentValue, Value: []byte(value)}, + } +} + +func NewAssignmentFromString(key, value string) Assignment { + return &assignment{ + key: &Token{Type: TokenTypeIdentifier, Value: []byte(key)}, + assignment: &Token{Type: TokenTypeAssignment, Value: []byte{'='}}, + value: &Token{Type: TokenTypeString, Value: []byte(value)}, + } +} + +func NewAssignmentFromSpec(spec *LVMStructTagFieldMapping) Assignment { + switch spec.Kind() { + case reflect.Int64: + return NewAssignmentInt64(spec.Name, spec.Value.Int()) + default: + return NewAssignmentFromString(spec.Name, spec.Value.String()) + } +} + +func NewAssignment(key string, v any) Assignment { + switch v := v.(type) { + case *LVMStructTagFieldMapping: + return NewAssignmentFromSpec(v) + case int64: + return NewAssignmentInt64(key, v) + case string: + return NewAssignmentFromString(key, v) + default: + panic(fmt.Sprintf("unexpected type %T", v)) + } +} + +func NewAssignmentInt64(key string, value int64) Assignment { + return &assignment{ + key: &Token{Type: TokenTypeIdentifier, Value: []byte(key)}, + assignment: &Token{Type: TokenTypeAssignment, Value: []byte{'='}}, + value: &Token{Type: TokenTypeInt64, Value: []byte(fmt.Sprintf("%d", value))}, + } +} + +func NewNewline() Newline { + return &newline{&Token{Type: TokenTypeEndOfStatement, Value: []byte{'\n'}}} +} diff --git a/config_lexer.go b/config/lexer.go similarity index 56% rename from config_lexer.go rename to config/lexer.go index c7e1165..8d9da2f 100644 --- a/config_lexer.go +++ b/config/lexer.go @@ -1,4 +1,4 @@ -package lvm2go +package config import ( "bufio" @@ -6,19 +6,22 @@ import ( "fmt" "io" "strings" + "time" "unicode/utf8" + + "github.com/jakobmoellerdev/lvm2go/util" ) -// ConfigLexerReader is an interface for reading tokens from a configuration file +// LexerReader is an interface for reading tokens from a configuration file // The lexer reads the configuration file and returns ConfigTokens that can be used to // decode the configuration file into a struct or do other operations. -// Any returned Token is one of ConfigTokenType, for more details see the ConfigTokenType documentation. -type ConfigLexerReader interface { +// Any returned Token is one of TokenType, for more details see the TokenType documentation. +type LexerReader interface { // Lex reads the configuration file and returns all tokens in the file or an error if one occurs // The lexer will return an EOF token when the end of the file is reached. // The lexer will return an Error token when an error occurs. // Lex can be used to read the entire configuration file in one operation and to decouple reading from parsing. - Lex() (ConfigTokens, error) + Lex() (Tokens, error) // Next returns the next set of tokens in the configuration file that can be read in a single operation // Note that using Next() will not fail out if an error occurs, it will return the ConfigTokenError in the tokens @@ -26,128 +29,128 @@ type ConfigLexerReader interface { // The lexer will return an EOF token when the end of the file is reached. // The lexer will return an Error token when an error occurs. // Next can be used to implement efficient parsers that only read the next token when needed. - Next() ConfigTokens + Next() Tokens } -func NewBufferedConfigLexer(dataStream io.Reader) ConfigLexerReader { - return &ConfigLexer{ - current: ConfigTokenTypeSOF, +func NewBufferedLexer(dataStream io.Reader) LexerReader { + return &Lexer{ + current: TokenTypeSOF, dataStream: bufio.NewReaderSize(dataStream, 4096), lineBuffer: bytes.NewBuffer(make([]byte, 256)), currentLine: 1, } } -type ConfigTokenType rune +type TokenType rune const ( - // ConfigTokenTypeComment represents comments such as + // TokenTypeComment represents comments such as // # This is a comment - ConfigTokenTypeComment ConfigTokenType = iota + TokenTypeComment TokenType = iota - ConfigTokenTypeCommentValue ConfigTokenType = iota + TokenTypeCommentValue TokenType = iota - // ConfigTokenTypeEndOfStatement represents the end of a statement + // TokenTypeEndOfStatement represents the end of a statement // This can be a newline. - ConfigTokenTypeEndOfStatement ConfigTokenType = iota + TokenTypeEndOfStatement TokenType = iota - // ConfigTokenTypeSection represents a section name + // TokenTypeSection represents a section name // Example: // config { ← This is a section named "config" // key = value // } - ConfigTokenTypeSection ConfigTokenType = iota + TokenTypeSection TokenType = iota - // ConfigTokenTypeStartOfSection represents the start of a section + // TokenTypeStartOfSection represents the start of a section // Example: // config { ← This is a section start token "{" - ConfigTokenTypeStartOfSection ConfigTokenType = iota + TokenTypeStartOfSection TokenType = iota - // ConfigTokenTypeEndOfSection represents the end of a section + // TokenTypeEndOfSection represents the end of a section // Example: // config { ← This is a section end token "}" - ConfigTokenTypeEndOfSection ConfigTokenType = iota + TokenTypeEndOfSection TokenType = iota - // ConfigTokenTypeString represents a string + // TokenTypeString represents a string // Example: // key = "value" ← This is a string token "value" - ConfigTokenTypeString ConfigTokenType = iota + TokenTypeString TokenType = iota - // ConfigTokenTypeIdentifier represents an identifier + // TokenTypeIdentifier represents an identifier // Example: // key = value ← This is an identifier token "key" - ConfigTokenTypeIdentifier ConfigTokenType = iota + TokenTypeIdentifier TokenType = iota - // ConfigTokenTypeAssignment represents an assignment + // TokenTypeAssignment represents an assignment // Example: // key = value ← This is an assignment token "=" - ConfigTokenTypeAssignment ConfigTokenType = iota + TokenTypeAssignment TokenType = iota - // ConfigTokenTypeInt64 represents an int64 + // TokenTypeInt64 represents an int64 // Example: // key = 1234 ← This is an int64 token "1234" - ConfigTokenTypeInt64 ConfigTokenType = iota + TokenTypeInt64 TokenType = iota - // ConfigTokenTypeSOF represents the start of the file - ConfigTokenTypeSOF ConfigTokenType = iota + // TokenTypeSOF represents the start of the file + TokenTypeSOF TokenType = iota - // ConfigTokenTypeEOF represents the end of the file - ConfigTokenTypeEOF ConfigTokenType = iota + // TokenTypeEOF represents the end of the file + TokenTypeEOF TokenType = iota - ConfigTokenTypeError ConfigTokenType = iota + TokenTypeError TokenType = iota - // configTokenTypeNotYetKnown represents a token that has not yet been lexed - configTokenTypeNotYetKnown ConfigTokenType = iota + // TokenTypeNotYetKnown represents a token that has not yet been lexed + TokenTypeNotYetKnown TokenType = iota ) -func (t ConfigTokenType) String() string { +func (t TokenType) String() string { switch t { - case ConfigTokenTypeComment: + case TokenTypeComment: return "Comment" - case ConfigTokenTypeCommentValue: + case TokenTypeCommentValue: return "CommentValue" - case ConfigTokenTypeEndOfStatement: + case TokenTypeEndOfStatement: return "EndOfStatement" - case ConfigTokenTypeSection: + case TokenTypeSection: return "Section" - case ConfigTokenTypeStartOfSection: + case TokenTypeStartOfSection: return "SectionStart" - case ConfigTokenTypeEndOfSection: + case TokenTypeEndOfSection: return "SectionEnd" - case ConfigTokenTypeString: + case TokenTypeString: return "String" - case ConfigTokenTypeIdentifier: + case TokenTypeIdentifier: return "Identifier" - case ConfigTokenTypeAssignment: + case TokenTypeAssignment: return "Assignment" - case ConfigTokenTypeInt64: + case TokenTypeInt64: return "Int64" - case ConfigTokenTypeSOF: + case TokenTypeSOF: return "SOF" - case ConfigTokenTypeEOF: + case TokenTypeEOF: return "EOF" - case ConfigTokenTypeError: + case TokenTypeError: return "Error" default: return "Unknown" } } -type ConfigLexer struct { +type Lexer struct { // dataStream is the stream of data to be lexed dataStream *bufio.Reader // lineBuffer is a buffer to store the current line being lexed in case of lookbehind lineBuffer *bytes.Buffer - current ConfigTokenType + current TokenType readCount int currentLine int } -type ConfigTokens []*ConfigToken +type Tokens []*Token -func (t ConfigTokens) String() string { +func (t Tokens) String() string { builder := strings.Builder{} for _, token := range t { builder.WriteString(token.String()) @@ -156,7 +159,7 @@ func (t ConfigTokens) String() string { return builder.String() } -func (t ConfigTokens) minimumSize() int { +func (t Tokens) minimumSize() int { size := 0 for _, token := range t { size += len(token.Value) @@ -164,22 +167,22 @@ func (t ConfigTokens) minimumSize() int { return size } -type configTokensByIdentifier map[string]ConfigTokens +type configTokensByIdentifier map[string]Tokens -func assignmentsWithSections(t ConfigTokens) configTokensByIdentifier { +func AssignmentsWithSections(t Tokens) configTokensByIdentifier { sectionIndex := -1 - assignments := make(map[string]ConfigTokens) + assignments := make(map[string]Tokens) for i, token := range t { - if token.Type == ConfigTokenTypeSection { + if token.Type == TokenTypeSection { sectionIndex = i continue } - if token.Type != ConfigTokenTypeAssignment { + if token.Type != TokenTypeAssignment { continue } - assignments[string(t[i-1].Value)] = ConfigTokens{ + assignments[string(t[i-1].Value)] = Tokens{ t[sectionIndex], t[i-1], token, @@ -189,7 +192,7 @@ func assignmentsWithSections(t ConfigTokens) configTokensByIdentifier { return assignments } -func (a configTokensByIdentifier) overrideWith(other configTokensByIdentifier) (notFound ConfigTokens) { +func (a configTokensByIdentifier) OverrideWith(other configTokensByIdentifier) (notFound Tokens) { for key, value := range other { v, ok := a[key] if !ok { @@ -201,41 +204,41 @@ func (a configTokensByIdentifier) overrideWith(other configTokensByIdentifier) ( return } -func appendAssignmentsAtEndOfSections(into ConfigTokens, toAdd ConfigTokens) ConfigTokens { +func AppendAssignmentsAtEndOfSections(into Tokens, toAdd Tokens) Tokens { section := "" - tokens := ConfigTokens{} + tokens := Tokens{} for i, token := range into { tokens = append(tokens, token) - if token.Type == ConfigTokenTypeSection { + if token.Type == TokenTypeSection { section = string(token.Value) continue } - if token.Type == ConfigTokenTypeEndOfSection { - candidates := ConfigTokens{} + if token.Type == TokenTypeEndOfSection { + candidates := Tokens{} for j, token := range toAdd { - if token.Type != ConfigTokenTypeAssignment { + if token.Type != TokenTypeAssignment { continue } inSection := section != "" - isID := toAdd[j-1].Type == ConfigTokenTypeIdentifier - isSection := toAdd[j-2].Type == ConfigTokenTypeSection + isID := toAdd[j-1].Type == TokenTypeIdentifier + isSection := toAdd[j-2].Type == TokenTypeSection if inSection && isID && isSection && section == string(toAdd[j-2].Value) { candidates = append(candidates, - &ConfigToken{ - Type: ConfigTokenTypeComment, + &Token{ + Type: TokenTypeComment, Value: runeToUTF8('#'), }, - &ConfigToken{ - Type: ConfigTokenTypeCommentValue, + &Token{ + Type: TokenTypeCommentValue, Value: []byte(generateLVMConfigEditComment()), }, - &ConfigToken{ - Type: ConfigTokenTypeEndOfStatement, + &Token{ + Type: TokenTypeEndOfStatement, Value: runeToUTF8('\n'), }, toAdd[j-1], token, toAdd[j+1], - &ConfigToken{ - Type: ConfigTokenTypeEndOfStatement, + &Token{ + Type: TokenTypeEndOfStatement, Value: runeToUTF8('\n'), }) } @@ -247,18 +250,18 @@ func appendAssignmentsAtEndOfSections(into ConfigTokens, toAdd ConfigTokens) Con return tokens } -func (t ConfigTokens) InSection(section string) ConfigTokens { - tokensInSection := ConfigTokens{} +func (t Tokens) InSection(section string) Tokens { + tokensInSection := Tokens{} for _, token := range t { - if token.Type == ConfigTokenTypeSection { + if token.Type == TokenTypeSection { if inSection := string(token.Value) == section; inSection { continue } } - if token.Type == ConfigTokenTypeStartOfSection { + if token.Type == TokenTypeStartOfSection { continue } - if token.Type == ConfigTokenTypeEndOfSection { + if token.Type == TokenTypeEndOfSection { break } tokensInSection = append(tokensInSection, token) @@ -266,14 +269,14 @@ func (t ConfigTokens) InSection(section string) ConfigTokens { return tokensInSection } -type ConfigToken struct { - Type ConfigTokenType +type Token struct { + Type TokenType Value []byte Err error Line, Start int } -func (t ConfigToken) String() string { +func (t Token) String() string { builder := strings.Builder{} builder.WriteString(fmt.Sprintf("%d:%d\t", t.Line, t.Start)) builder.WriteString(t.Type.String()) @@ -286,24 +289,24 @@ func (t ConfigToken) String() string { return builder.String() } -var ConfigTokenEOF = &ConfigToken{Type: ConfigTokenTypeEOF, Start: -1, Line: -1} +var TokenEOF = &Token{Type: TokenTypeEOF, Start: -1, Line: -1} -func ConfigTokenError(err error) *ConfigToken { - return &ConfigToken{Type: ConfigTokenTypeError, Err: err, Start: -1, Line: -1} +func TokenError(err error) *Token { + return &Token{Type: TokenTypeError, Err: err, Start: -1, Line: -1} } -func (l *ConfigLexer) Lex() (ConfigTokens, error) { - tokens := make(ConfigTokens, 0, 4) +func (l *Lexer) Lex() (Tokens, error) { + tokens := make(Tokens, 0, 4) for { tokensFromNext := l.Next() tokens = append(tokens, tokensFromNext...) // If the next token is an EOF or an error, return the tokens for _, next := range tokensFromNext { - if next.Type == ConfigTokenTypeEOF { + if next.Type == TokenTypeEOF { return tokens, nil } - if next.Type == ConfigTokenTypeError { + if next.Type == TokenTypeError { return tokens, next.Err } } @@ -311,30 +314,30 @@ func (l *ConfigLexer) Lex() (ConfigTokens, error) { } // Next returns the next token in the stream -func (l *ConfigLexer) Next() ConfigTokens { +func (l *Lexer) Next() Tokens { l.lineBuffer.Reset() for { candidate, size, err := l.dataStream.ReadRune() if err == io.EOF { - return ConfigTokens{ConfigTokenEOF} + return Tokens{TokenEOF} } if err != nil { - return ConfigTokens{ConfigTokenError(err)} + return Tokens{TokenError(err)} } l.readCount += size tokenType := l.RuneToTokenType(candidate) - if tokenType == configTokenTypeNotYetKnown { + if tokenType == TokenTypeNotYetKnown { l.lineBuffer.WriteRune(candidate) continue } - if tokenType == ConfigTokenTypeEndOfStatement { + if tokenType == TokenTypeEndOfStatement { l.lineBuffer.Reset() l.currentLine++ - return ConfigTokens{{ - Type: ConfigTokenTypeEndOfStatement, + return Tokens{{ + Type: TokenTypeEndOfStatement, Value: runeToUTF8(candidate), Start: l.readCount, Line: l.currentLine, @@ -342,50 +345,50 @@ func (l *ConfigLexer) Next() ConfigTokens { } loc := l.readCount - tokens := ConfigTokens{} + tokens := Tokens{} switch tokenType { - case ConfigTokenTypeComment: + case TokenTypeComment: tokens = l.newComment(candidate, loc) - case ConfigTokenTypeEndOfSection: - tokens = append(tokens, &ConfigToken{ - Type: ConfigTokenTypeEndOfSection, + case TokenTypeEndOfSection: + tokens = append(tokens, &Token{ + Type: TokenTypeEndOfSection, Value: runeToUTF8(candidate), Start: l.readCount, Line: l.currentLine, }) - case ConfigTokenTypeStartOfSection: + case TokenTypeStartOfSection: tokens = l.newSectionStart(candidate, loc) - case ConfigTokenTypeAssignment: + case TokenTypeAssignment: tokens = l.newAssignment(candidate, loc) default: - return ConfigTokens{ConfigTokenError(fmt.Errorf("unexpected token type %v", tokenType))} + return Tokens{TokenError(fmt.Errorf("unexpected token type %v", tokenType))} } return tokens } } -func (l *ConfigLexer) newComment(candidate rune, loc int) ConfigTokens { +func (l *Lexer) newComment(candidate rune, loc int) Tokens { comment, err := l.dataStream.ReadBytes('\n') l.readCount += len(comment) trimmedComment := bytes.TrimSpace(comment) - tokens := ConfigTokens{ + tokens := Tokens{ { - Type: ConfigTokenTypeComment, + Type: TokenTypeComment, Value: runeToUTF8(candidate), Start: loc, Line: l.currentLine, }, { - Type: ConfigTokenTypeCommentValue, + Type: TokenTypeCommentValue, Value: trimmedComment, Start: loc + len(comment) - len(trimmedComment), Line: l.currentLine, }, { - Type: ConfigTokenTypeEndOfStatement, + Type: TokenTypeEndOfStatement, Value: runeToUTF8('\n'), Start: loc + len(comment), Line: l.currentLine, @@ -393,9 +396,9 @@ func (l *ConfigLexer) newComment(candidate rune, loc int) ConfigTokens { } if err == io.EOF { - tokens = append(tokens, ConfigTokenEOF) + tokens = append(tokens, TokenEOF) } else if err != nil { - tokens = append(tokens, ConfigTokenError(err)) + tokens = append(tokens, TokenError(err)) } l.lineBuffer.Reset() l.currentLine++ @@ -403,19 +406,19 @@ func (l *ConfigLexer) newComment(candidate rune, loc int) ConfigTokens { return tokens } -func (l *ConfigLexer) newSectionStart(candidate rune, loc int) ConfigTokens { +func (l *Lexer) newSectionStart(candidate rune, loc int) Tokens { section := l.lineBuffer.Bytes() sectionTrimmed := bytes.TrimSpace(section) - tokens := ConfigTokens{ + tokens := Tokens{ { - Type: ConfigTokenTypeSection, + Type: TokenTypeSection, Value: bytes.Clone(sectionTrimmed), Start: loc - len(section), Line: l.currentLine, }, { - Type: ConfigTokenTypeStartOfSection, + Type: TokenTypeStartOfSection, Value: runeToUTF8(candidate), Start: loc, Line: l.currentLine, @@ -425,17 +428,17 @@ func (l *ConfigLexer) newSectionStart(candidate rune, loc int) ConfigTokens { return tokens } -func (l *ConfigLexer) newAssignment(candidate rune, loc int) ConfigTokens { +func (l *Lexer) newAssignment(candidate rune, loc int) Tokens { identifier := bytes.TrimSpace(l.lineBuffer.Bytes()) - tokens := ConfigTokens{ + tokens := Tokens{ { - Type: ConfigTokenTypeIdentifier, + Type: TokenTypeIdentifier, Value: bytes.Clone(identifier), Start: loc - len(identifier) - 1, Line: l.currentLine, }, { - Type: ConfigTokenTypeAssignment, + Type: TokenTypeAssignment, Value: runeToUTF8(candidate), Start: loc, Line: l.currentLine, @@ -455,14 +458,14 @@ func (l *ConfigLexer) newAssignment(candidate rune, loc int) ConfigTokens { tokens = append(tokens, valueToken, - &ConfigToken{ - Type: ConfigTokenTypeComment, + &Token{ + Type: TokenTypeComment, Value: runeToUTF8('#'), Start: commentStart, Line: l.currentLine, }, - &ConfigToken{ - Type: ConfigTokenTypeCommentValue, + &Token{ + Type: TokenTypeCommentValue, Value: bytes.TrimSpace(commentTrimmed), Start: commentStart + len(comment) - len(commentTrimmed) - 1, Line: l.currentLine, @@ -474,8 +477,8 @@ func (l *ConfigLexer) newAssignment(candidate rune, loc int) ConfigTokens { } tokens = append(tokens, - &ConfigToken{ - Type: ConfigTokenTypeEndOfStatement, + &Token{ + Type: TokenTypeEndOfStatement, Value: runeToUTF8('\n'), Line: l.currentLine, Start: l.readCount, @@ -486,30 +489,30 @@ func (l *ConfigLexer) newAssignment(candidate rune, loc int) ConfigTokens { l.currentLine++ if err == io.EOF { - tokens = append(tokens, ConfigTokenEOF) + tokens = append(tokens, TokenEOF) } else if err != nil { - tokens = append(tokens, ConfigTokenError(err)) + tokens = append(tokens, TokenError(err)) } return tokens } -func (l *ConfigLexer) createValueToken(line []byte, loc int) *ConfigToken { +func (l *Lexer) createValueToken(line []byte, loc int) *Token { sQidx := bytes.IndexByte(line, '"') lQidx := bytes.LastIndexByte(line, '"') - var valueToken ConfigToken + var valueToken Token if sQidx == -1 && lQidx == -1 { trimmedLine := bytes.TrimSpace(line) - valueToken = ConfigToken{ - Type: ConfigTokenTypeInt64, + valueToken = Token{ + Type: TokenTypeInt64, Value: trimmedLine, Start: loc + len(line) - len(trimmedLine), Line: l.currentLine, } } else { trimmedLine := bytes.TrimSpace(line[sQidx+1 : lQidx]) - valueToken = ConfigToken{ - Type: ConfigTokenTypeString, + valueToken = Token{ + Type: TokenTypeString, Value: trimmedLine, Start: loc + len(line) - len(trimmedLine) - 2, Line: l.currentLine, @@ -518,20 +521,20 @@ func (l *ConfigLexer) createValueToken(line []byte, loc int) *ConfigToken { return &valueToken } -func (l *ConfigLexer) RuneToTokenType(r rune) ConfigTokenType { +func (l *Lexer) RuneToTokenType(r rune) TokenType { switch r { case '{': - return ConfigTokenTypeStartOfSection + return TokenTypeStartOfSection case '}': - return ConfigTokenTypeEndOfSection + return TokenTypeEndOfSection case '=': - return ConfigTokenTypeAssignment + return TokenTypeAssignment case '\n': - return ConfigTokenTypeEndOfStatement + return TokenTypeEndOfStatement case '#': - return ConfigTokenTypeComment + return TokenTypeComment default: - return configTokenTypeNotYetKnown + return TokenTypeNotYetKnown } } @@ -554,3 +557,9 @@ func runesToUTF8(rs []rune) []byte { return bs } + +// generateLVMConfigEditComment generates a comment to be added to the configuration file +// This comment is used to indicate that the field was edited by the client. +func generateLVMConfigEditComment() string { + return fmt.Sprintf(`This field was edited by %s at %s`, util.ModuleID(), time.Now().Format(time.RFC3339)) +} diff --git a/config_lexer_decoder.go b/config/lexer_decoder.go similarity index 50% rename from config_lexer_decoder.go rename to config/lexer_decoder.go index 64f6968..f9b0bc7 100644 --- a/config_lexer_decoder.go +++ b/config/lexer_decoder.go @@ -1,4 +1,4 @@ -package lvm2go +package config import ( "fmt" @@ -6,51 +6,38 @@ import ( "strconv" ) -type LexingConfigDecoder interface { - ConfigLexerReader +type LexingDecoder interface { Decode(v any) error } -type LexingConfigEncoder interface { - Encode(v any) error -} - -type StructuredLexingConfigDecoder interface { +type StructuredLexingDecoder interface { DecodeStructured(v any) error } -type UnstructuredLexingConfigDecoder interface { +type UnstructuredLexingDecoder interface { DecodeUnstructured(v any) error } -func NewLexingConfigDecoder(reader io.Reader) LexingConfigDecoder { - return &configLexDecoder{ - ConfigLexerReader: NewBufferedConfigLexer(reader), +func NewLexingConfigDecoder(reader io.Reader) LexingDecoder { + return &lexDecoder{ + LexerReader: NewBufferedLexer(reader), } } -type configLexDecoder struct { - ConfigLexerReader +type lexDecoder struct { + LexerReader } -var _ LexingConfigDecoder = &configLexDecoder{} +var _ LexingDecoder = &lexDecoder{} -func (d *configLexDecoder) Decode(v any) error { +func (d *lexDecoder) Decode(v any) error { if isUnstructuredMap(v) { return d.DecodeUnstructured(v) } return d.DecodeStructured(v) } -func isUnstructuredMap(v any) bool { - switch v.(type) { - case map[string]interface{}, *map[string]interface{}: - return true - } - return false -} - -func (d *configLexDecoder) DecodeUnstructured(v any) error { +func (d *lexDecoder) DecodeUnstructured(v any) error { lexTree, err := d.Lex() if err != nil { return err @@ -67,21 +54,21 @@ func (d *configLexDecoder) DecodeUnstructured(v any) error { var section string for i, node := range lexTree { - if node.Type == ConfigTokenTypeSection { + if node.Type == TokenTypeSection { section = string(node.Value) continue } - if node.Type == ConfigTokenTypeEndOfSection { + if node.Type == TokenTypeEndOfSection { section = "" continue } - if node.Type == ConfigTokenTypeAssignment { + if node.Type == TokenTypeAssignment { kidx := i - 1 if kidx < 0 { return fmt.Errorf("expected identifier before assignment") } keyInTree := lexTree[i-1] - if keyInTree.Type != ConfigTokenTypeIdentifier { + if keyInTree.Type != TokenTypeIdentifier { return fmt.Errorf("expected identifier before assignment, got %s", keyInTree.Type) } key := string(keyInTree.Value) @@ -97,11 +84,11 @@ func (d *configLexDecoder) DecodeUnstructured(v any) error { } switch valueInTree.Type { - case ConfigTokenTypeString: + case TokenTypeString: m[key] = string(valueInTree.Value) - case ConfigTokenTypeInt64: + case TokenTypeInt64: if val, err := strconv.ParseInt(string(valueInTree.Value), 10, 64); err != nil { - return fmt.Errorf("could not parse int64: %w", err) + return fmt.Errorf("could not Parse int64: %w", err) } else { m[key] = val } @@ -113,58 +100,58 @@ func (d *configLexDecoder) DecodeUnstructured(v any) error { return nil } -func (d *configLexDecoder) DecodeStructured(v any) error { - fieldSpecs, err := DecodeLVMStructTagFieldMappings(v) +func (d *lexDecoder) DecodeStructured(v any) error { + fieldSpecs, err := DecodeFieldMappings(v) if err != nil { return err } - decoder := &structuredConfigLexDecoder{ - ConfigLexerReader: d.ConfigLexerReader, - StructuredFieldMapping: fieldSpecs, + decoder := &structuredLexingDecoder{ + LexerReader: d.LexerReader, + structuredFieldMapping: fieldSpecs, } return decoder.Decode() } -func newLexingConfigDecoderWithFieldMapping( +func NewLexingConfigDecoderWithFieldMapping( reader io.Reader, - fieldSpecs map[string]LVMStructTagFieldMapping, -) *structuredConfigLexDecoder { - return &structuredConfigLexDecoder{ - ConfigLexerReader: NewBufferedConfigLexer(reader), - StructuredFieldMapping: fieldSpecs, - MapHints: newHintsFromFieldSpecs(fieldSpecs), + fieldSpecs LVMStructTagFieldMappings, +) *structuredLexingDecoder { + return &structuredLexingDecoder{ + LexerReader: NewBufferedLexer(reader), + structuredFieldMapping: fieldSpecs, + mapHints: newHintsFromFieldSpecs(fieldSpecs), } } -func newHintsFromFieldSpecs(keys map[string]LVMStructTagFieldMapping) map[string]structuredDecodeHint { +func newHintsFromFieldSpecs(mappings LVMStructTagFieldMappings) map[string]structuredDecodeHint { hints := make(map[string]structuredDecodeHint) - for _, key := range keys { - hints[key.name] = structuredDecodeHint{ - section: key.prefix, + for _, key := range mappings { + hints[key.Name] = structuredDecodeHint{ + section: key.Prefix, } } return hints } -type structuredConfigLexDecoder struct { - ConfigLexerReader - StructuredFieldMapping map[string]LVMStructTagFieldMapping - MapHints map[string]structuredDecodeHint +type structuredLexingDecoder struct { + LexerReader + structuredFieldMapping LVMStructTagFieldMappings + mapHints map[string]structuredDecodeHint } type structuredDecodeHint struct { section string } -func (d *structuredConfigLexDecoder) Decode() error { - fieldSpecsKeyed := make(map[string]map[string]LVMStructTagFieldMapping) - for _, fieldSpec := range d.StructuredFieldMapping { - keyed, ok := fieldSpecsKeyed[fieldSpec.prefix] +func (d *structuredLexingDecoder) Decode() error { + fieldSpecsKeyed := make(map[string]LVMStructTagFieldMappings) + for _, fieldSpec := range d.structuredFieldMapping { + keyed, ok := fieldSpecsKeyed[fieldSpec.Prefix] if !ok { - fieldSpecsKeyed[fieldSpec.prefix] = make(map[string]LVMStructTagFieldMapping) - keyed = fieldSpecsKeyed[fieldSpec.prefix] + fieldSpecsKeyed[fieldSpec.Prefix] = make(LVMStructTagFieldMappings) + keyed = fieldSpecsKeyed[fieldSpec.Prefix] } - keyed[fieldSpec.name] = fieldSpec + keyed[fieldSpec.Name] = fieldSpec } lexTree, err := d.Lex() @@ -174,21 +161,21 @@ func (d *structuredConfigLexDecoder) Decode() error { var section string for i, node := range lexTree { - if node.Type == ConfigTokenTypeSection { + if node.Type == TokenTypeSection { section = string(node.Value) continue } - if node.Type == ConfigTokenTypeEndOfSection { + if node.Type == TokenTypeEndOfSection { section = "" continue } - if node.Type == ConfigTokenTypeAssignment { + if node.Type == TokenTypeAssignment { kidx := i - 1 if kidx < 0 { return fmt.Errorf("expected identifier before assignment") } keyInTree := lexTree[i-1] - if keyInTree.Type != ConfigTokenTypeIdentifier { + if keyInTree.Type != TokenTypeIdentifier { return fmt.Errorf("expected identifier before assignment, got %s", keyInTree.Type) } key := string(keyInTree.Value) @@ -199,8 +186,8 @@ func (d *structuredConfigLexDecoder) Decode() error { } valueInTree := lexTree[i+1] - if d.MapHints != nil { - if hint, ok := d.MapHints[key]; ok { + if d.mapHints != nil { + if hint, ok := d.mapHints[key]; ok { if hint.section != "" { section = hint.section } @@ -217,11 +204,11 @@ func (d *structuredConfigLexDecoder) Decode() error { } switch valueInTree.Type { - case ConfigTokenTypeString: + case TokenTypeString: field.SetString(string(valueInTree.Value)) - case ConfigTokenTypeInt64: + case TokenTypeInt64: if val, err := strconv.ParseInt(string(valueInTree.Value), 10, 64); err != nil { - return fmt.Errorf("could not parse int64: %w", err) + return fmt.Errorf("could not Parse int64: %w", err) } else { field.SetInt(val) } diff --git a/config/lexer_encoder.go b/config/lexer_encoder.go new file mode 100644 index 0000000..afea104 --- /dev/null +++ b/config/lexer_encoder.go @@ -0,0 +1,188 @@ +package config + +import ( + "bytes" + "fmt" + "io" + "slices" + "strings" +) + +type LexingEncoder interface { + Encode(v any) error +} + +type StructuredLexingEncoder interface { + EncodeStructured(v any) error +} + +type UnstructuredLexingEncoder interface { + EncodeUnstructured(v any) error +} + +func NewLexingEncoder(writer io.Writer) LexingEncoder { + return &lexEncoder{ + Writer: writer, + } +} + +type lexEncoder struct { + Writer io.Writer +} + +func (c *lexEncoder) Encode(v any) error { + switch v := v.(type) { + case Tokens: + return c.writeTokens(v) + } + if isUnstructuredMap(v) { + return c.EncodeUnstructured(v) + } + return c.EncodeStructured(v) +} + +func (c *lexEncoder) EncodeStructured(v any) error { + fieldSpecs, err := DecodeFieldMappings(v) + if err != nil { + return err + } + tokens := FieldMappingsToTokens(fieldSpecs) + return c.writeTokens(tokens) +} + +func (c *lexEncoder) EncodeUnstructured(v any) error { + m, ok := v.(map[string]interface{}) + if !ok { + mptr, ok := v.(*map[string]interface{}) + if !ok { + return fmt.Errorf("expected map[string]interface{} or *map[string]interface{}, got %T", v) + } + m = *mptr + } + + sectionKeys := make([]string, 0, len(m)) + fieldKeys := make([]string, 0, len(m)) + mbySection := make(map[string]map[string]interface{}) + for k, v := range m { + splitKey := strings.Split(k, "/") + section, key := splitKey[0], splitKey[1] + + if section == "" { + return fmt.Errorf("expected section prefix in key %q", k) + } + if key == "" { + return fmt.Errorf("expected key suffix in key %q", k) + } + + sectionMap, ok := mbySection[section] + if !ok { + mbySection[section] = make(map[string]interface{}) + sectionMap = mbySection[section] + sectionKeys = append(sectionKeys, section) + } + + sectionMap[key] = v + fieldKeys = append(fieldKeys, key) + } + + // Sort the keys to ensure a deterministic output + slices.SortStableFunc(sectionKeys, func(i, j string) int { + return strings.Compare(i, j) + }) + slices.SortStableFunc(fieldKeys, func(i, j string) int { + return strings.Compare(i, j) + }) + + ast := NewAST() + for _, section := range sectionKeys { + astSection := NewSection(section) + ast.Append(astSection) + for _, key := range fieldKeys { + value := mbySection[section][key] + astSection.Append(NewAssignment(key, value)) + } + } + + return c.writeTokens(ast.Tokens()) +} + +func (c *lexEncoder) writeTokens(tokens Tokens) error { + data, err := TokensToBytes(tokens) + if err != nil { + return fmt.Errorf("failed to write tokens into byte representation: %w", err) + } + + if _, err := io.Copy(c.Writer, bytes.NewReader(data)); err != nil { + return fmt.Errorf("failed to write tokens: %w", err) + } + return nil +} + +var _ LexingEncoder = &lexEncoder{} + +func TokensToBytes(tokens Tokens) ([]byte, error) { + // We can estimate a good buffer size by requesting 15% more than the minimum size for all values + // This way we are accommodating for the fact that we might need to add spaces or tabs for indentation. + expectedSize := int(float32(tokens.minimumSize()) * 1.15) + buf := bytes.NewBuffer(make([]byte, 0, expectedSize)) + + // writeTabOrSpaceIfInSection writes a tab if the line is in a section and has not been indented yet + // otherwise it writes a space + // we can use this for correct indentation of the configuration file + inSection := false + linesIndented := map[int]struct{}{} + writeTabOrSpaceIfInSection := func(line int) { + if inSection { + if _, ok := linesIndented[line]; !ok { + buf.WriteRune('\t') + linesIndented[line] = struct{}{} + } else { + buf.WriteRune(' ') + } + } + } + + for _, token := range tokens { + switch token.Type { + case TokenTypeComment: + writeTabOrSpaceIfInSection(token.Line) + buf.Write(token.Value) + buf.WriteRune(' ') // readability: Insert a space after the comment identifier + case TokenTypeCommentValue: + buf.Write(token.Value) + case TokenTypeEndOfStatement: + buf.Write(token.Value) + case TokenTypeSection: + buf.Write(token.Value) + buf.WriteRune(' ') // readability: Insert a space after the section identifier + case TokenTypeStartOfSection: + buf.Write(token.Value) + inSection = true + case TokenTypeEndOfSection: + buf.Write(token.Value) + inSection = false + case TokenTypeString: + buf.WriteRune('"') + buf.Write(token.Value) + buf.WriteRune('"') + case TokenTypeIdentifier: + writeTabOrSpaceIfInSection(token.Line) + buf.Write(token.Value) + buf.WriteRune(' ') // readability: Insert a space after the identifier + case TokenTypeAssignment: + buf.Write(token.Value) + buf.WriteRune(' ') // readability: Insert a space after the assignment + case TokenTypeInt64: + buf.Write(token.Value) + case TokenTypeSOF: + continue + case TokenTypeEOF: + break + case TokenTypeError: + return nil, token.Err + case TokenTypeNotYetKnown: + return nil, fmt.Errorf("unexpected token type %v", token.Type) + } + } + return buf.Bytes(), nil +} diff --git a/config_lexer_test.go b/config/lexer_test.go similarity index 62% rename from config_lexer_test.go rename to config/lexer_test.go index c4b8704..25a187b 100644 --- a/config_lexer_test.go +++ b/config/lexer_test.go @@ -1,11 +1,11 @@ -package lvm2go_test +package config_test import ( "bytes" _ "embed" "testing" - "github.com/jakobmoellerdev/lvm2go" + "github.com/jakobmoellerdev/lvm2go/config" ) //go:embed testdata/lextest.conf @@ -15,7 +15,7 @@ var lexerTest []byte var lexTestOutput string func TestConfigLexer(t *testing.T) { - lexer := lvm2go.NewBufferedConfigLexer(bytes.NewReader(lexerTest)) + lexer := config.NewBufferedLexer(bytes.NewReader(lexerTest)) tokens, err := lexer.Lex() if err != nil { @@ -25,7 +25,7 @@ func TestConfigLexer(t *testing.T) { t.Fatalf("unexpected output:\n%s", tokens.String()) } - data, err := lvm2go.ConfigTokensToBytes(tokens) + data, err := config.TokensToBytes(tokens) if err != nil { t.Fatalf("unexpected error: %v", err) } @@ -37,7 +37,7 @@ func TestConfigLexer(t *testing.T) { func TestNewLexingConfigDecoder(t *testing.T) { t.Run("structured", func(t *testing.T) { - decoder := lvm2go.NewLexingConfigDecoder(bytes.NewReader(lexerTest)) + decoder := config.NewLexingConfigDecoder(bytes.NewReader(lexerTest)) cfg := struct { Config struct { SomeField int64 `lvm:"some_field"` @@ -58,7 +58,7 @@ func TestNewLexingConfigDecoder(t *testing.T) { }) t.Run("unstructured", func(t *testing.T) { - decoder := lvm2go.NewLexingConfigDecoder(bytes.NewReader(lexerTest)) + decoder := config.NewLexingConfigDecoder(bytes.NewReader(lexerTest)) cfg := map[string]any{} if err := decoder.Decode(&cfg); err != nil { @@ -76,29 +76,31 @@ func TestNewLexingConfigDecoder(t *testing.T) { func TestNewLexingConfigEncoder(t *testing.T) { t.Run("structured", func(t *testing.T) { - cfg := struct { - Config struct { - SomeField int64 `lvm:"some_field"` - ProfileDir string `lvm:"profile_dir"` - } `lvm:"config"` - }{} + for i := 0; i < 20; i++ { + cfg := struct { + Config struct { + SomeField int64 `lvm:"some_field"` + ProfileDir string `lvm:"profile_dir"` + } `lvm:"config"` + }{} - cfg.Config.SomeField = 1 - cfg.Config.ProfileDir = "/my/custom/profile_dir" + cfg.Config.SomeField = 1 + cfg.Config.ProfileDir = "/my/custom/profile_dir" - testBuffer := &bytes.Buffer{} - encoder := lvm2go.NewLexingConfigEncoder(testBuffer) + testBuffer := &bytes.Buffer{} + encoder := config.NewLexingEncoder(testBuffer) - if err := encoder.Encode(&cfg); err != nil { - t.Fatalf("unexpected error: %v", err) - } + if err := encoder.Encode(&cfg); err != nil { + t.Fatalf("unexpected error: %v", err) + } - if testBuffer.String() != `config { - some_field = 1 + if testBuffer.String() != `config { profile_dir = "/my/custom/profile_dir" + some_field = 1 } ` { - t.Fatalf("unexpected output:\n%s", testBuffer.String()) + t.Fatalf("unexpected output:\n%s", testBuffer.String()) + } } }) @@ -108,20 +110,21 @@ func TestNewLexingConfigEncoder(t *testing.T) { cfg["config/some_field"] = int64(1) cfg["config/profile_dir"] = "/my/custom/profile_dir" - testBuffer := &bytes.Buffer{} - encoder := lvm2go.NewLexingConfigEncoder(testBuffer) + for i := 0; i < 20; i++ { + testBuffer := &bytes.Buffer{} + encoder := config.NewLexingEncoder(testBuffer) - if err := encoder.Encode(&cfg); err != nil { - t.Fatalf("unexpected error: %v", err) - } + if err := encoder.Encode(&cfg); err != nil { + t.Fatalf("unexpected error: %v", err) + } - if testBuffer.String() != `config { + if testBuffer.String() != `config { profile_dir = "/my/custom/profile_dir" some_field = 1 } ` { - // TODO Fix me sometimes failing - t.Fatalf("unexpected output:\n%s", testBuffer.String()) + t.Fatalf("unexpected output:\n%s", testBuffer.String()) + } } }) } diff --git a/config/struct_mapping.go b/config/struct_mapping.go new file mode 100644 index 0000000..1519eb7 --- /dev/null +++ b/config/struct_mapping.go @@ -0,0 +1,106 @@ +package config + +import ( + "fmt" + "reflect" + "slices" + "strings" + + lvmreflect "github.com/jakobmoellerdev/lvm2go/reflect" +) + +const LVMConfigStructTag = "lvm" +const LVMProfileExtension = ".profile" + +type LVMStructTagFieldMappings map[string]*LVMStructTagFieldMapping + +type LVMStructTagFieldMapping struct { + Prefix string + Name string + reflect.Value +} + +func (f LVMStructTagFieldMapping) String() string { + switch f.Kind() { + case reflect.Int64: + return fmt.Sprintf("%s = %d", f.Name, f.Int()) + default: + return fmt.Sprintf("%s = %q", f.Name, f.Value.String()) + } +} + +func DecodeFieldMappings(v any) (LVMStructTagFieldMappings, error) { + fields, typeAccessor, valueAccessor, err := lvmreflect.AccessStructOrPointerToStruct(v) + if err != nil { + return nil, err + } + + tagOrIgnore := func(tag reflect.StructTag) (string, bool) { + return tag.Get(LVMConfigStructTag), tag.Get(LVMConfigStructTag) == "-" + } + + fieldSpecs := make(LVMStructTagFieldMappings) + for i := range fields { + outerField := typeAccessor(i) + prefix, ignore := tagOrIgnore(outerField.Tag) + if ignore { + continue + } + fields, typeAccessor, valueAccessor, err := lvmreflect.AccessStructOrPointerToStruct(valueAccessor(i)) + if err != nil { + return nil, err + } + for j := range fields { + innerField := typeAccessor(j) + name, ignore := tagOrIgnore(innerField.Tag) + if ignore { + continue + } + fieldSpecs[name] = &LVMStructTagFieldMapping{ + prefix, + name, + valueAccessor(j), + } + } + } + return fieldSpecs, nil +} + +func FieldMappingsToTokens(mappings LVMStructTagFieldMappings) Tokens { + fieldSpecsKeyed := make(map[string]LVMStructTagFieldMappings) + + sectionKeys := make([]string, 0, len(fieldSpecsKeyed)) + fieldKeys := make([]string, 0, len(fieldSpecsKeyed)) + + for _, fieldSpec := range mappings { + keyed, ok := fieldSpecsKeyed[fieldSpec.Prefix] + if !ok { + fieldSpecsKeyed[fieldSpec.Prefix] = make(LVMStructTagFieldMappings) + keyed = fieldSpecsKeyed[fieldSpec.Prefix] + } + keyed[fieldSpec.Name] = fieldSpec + sectionKeys = append(sectionKeys, fieldSpec.Prefix) + fieldKeys = append(fieldKeys, fieldSpec.Name) + } + + // sections should appear only once + sectionKeys = slices.Compact(sectionKeys) + + // Sort the keys to ensure a deterministic output + slices.SortStableFunc(sectionKeys, func(i, j string) int { + return strings.Compare(i, j) + }) + slices.SortStableFunc(fieldKeys, func(i, j string) int { + return strings.Compare(i, j) + }) + + ast := NewAST() + for _, section := range sectionKeys { + astSection := NewSection(section) + ast.Append(astSection) + for _, field := range fieldKeys { + astSection.Append(NewAssignmentFromSpec(fieldSpecsKeyed[section][field])) + } + } + return ast.Tokens() +} diff --git a/config/testdata/lextest.conf b/config/testdata/lextest.conf new file mode 100644 index 0000000..2ab10bc --- /dev/null +++ b/config/testdata/lextest.conf @@ -0,0 +1,20 @@ +# Configuration section config. +# How LVM configuration settings are handled. +config { + # This configuration option has an automatic default value. + # checks = 1 + + # Configuration option config/abort_on_errors. + # Abort the LVM process if a configuration mismatch is found. + # This configuration option has an automatic default value. + # abort_on_errors = 0 + + some_field = 1 # This is a comment + + # Configuration option config/profile_dir. + # Directory where LVM looks for configuration profiles. + # This configuration option has an automatic default value. + # profile_dir = "/etc/lvm/profile" + + profile_dir = "/my/custom/profile_dir" +} diff --git a/config/testdata/lextest.output b/config/testdata/lextest.output new file mode 100644 index 0000000..59bde7a --- /dev/null +++ b/config/testdata/lextest.output @@ -0,0 +1,56 @@ +1:1 Comment "#" +1:3 CommentValue "Configuration section config." +1:32 EndOfStatement "\n" +2:33 Comment "#" +2:35 CommentValue "How LVM configuration settings are handled." +2:78 EndOfStatement "\n" +3:79 Section "config" +3:86 SectionStart "{" +4:87 EndOfStatement "\n" +4:89 Comment "#" +4:91 CommentValue "This configuration option has an automatic default value." +4:148 EndOfStatement "\n" +5:150 Comment "#" +5:152 CommentValue "checks = 1" +5:162 EndOfStatement "\n" +7:163 EndOfStatement "\n" +7:165 Comment "#" +7:167 CommentValue "Configuration option config/abort_on_errors." +7:211 EndOfStatement "\n" +8:213 Comment "#" +8:215 CommentValue "Abort the LVM process if a configuration mismatch is found." +8:274 EndOfStatement "\n" +9:276 Comment "#" +9:278 CommentValue "This configuration option has an automatic default value." +9:335 EndOfStatement "\n" +10:337 Comment "#" +10:339 CommentValue "abort_on_errors = 0" +10:358 EndOfStatement "\n" +12:359 EndOfStatement "\n" +12:361 Identifier "some_field" +12:372 Assignment "=" +12:374 Int64 "1" +12:376 Comment "#" +12:378 CommentValue "This is a comment" +12:395 EndOfStatement "\n" +14:396 EndOfStatement "\n" +14:398 Comment "#" +14:400 CommentValue "Configuration option config/profile_dir." +14:440 EndOfStatement "\n" +15:442 Comment "#" +15:444 CommentValue "Directory where LVM looks for configuration profiles." +15:497 EndOfStatement "\n" +16:499 Comment "#" +16:501 CommentValue "This configuration option has an automatic default value." +16:558 EndOfStatement "\n" +17:560 Comment "#" +17:562 CommentValue "profile_dir = \"/etc/lvm/profile\"" +17:594 EndOfStatement "\n" +19:595 EndOfStatement "\n" +19:597 Identifier "profile_dir" +19:609 Assignment "=" +19:611 String "/my/custom/profile_dir" +19:635 EndOfStatement "\n" +20:636 SectionEnd "}" +21:637 EndOfStatement "\n" +-1:-1 EOF "" diff --git a/config/util.go b/config/util.go new file mode 100644 index 0000000..f93e7d6 --- /dev/null +++ b/config/util.go @@ -0,0 +1,9 @@ +package config + +func isUnstructuredMap(v any) bool { + switch v.(type) { + case map[string]interface{}, *map[string]interface{}: + return true + } + return false +} diff --git a/config_lexer_encoder.go b/config_lexer_encoder.go deleted file mode 100644 index 5dfefd2..0000000 --- a/config_lexer_encoder.go +++ /dev/null @@ -1,307 +0,0 @@ -package lvm2go - -import ( - "bytes" - "fmt" - "io" - "reflect" - "sort" - "strings" -) - -type configLexEncoder struct { - Writer io.Writer -} - -func NewLexingConfigEncoder(writer io.Writer) LexingConfigEncoder { - return &configLexEncoder{ - Writer: writer, - } -} - -func (c *configLexEncoder) Encode(v any) error { - switch v := v.(type) { - case ConfigTokens: - return c.writeTokens(v) - } - if isUnstructuredMap(v) { - return c.EncodeUnstructured(v) - } - return c.EncodeStructured(v) -} - -func (c *configLexEncoder) EncodeStructured(v any) error { - fieldSpecs, err := DecodeLVMStructTagFieldMappings(v) - if err != nil { - return err - } - tokens := StructMappingsToConfigTokens(fieldSpecs) - return c.writeTokens(tokens) -} - -func StructMappingsToConfigTokens(mappings LVMStructTagFieldMappings) ConfigTokens { - fieldSpecsKeyed := make(map[string]map[string]LVMStructTagFieldMapping) - for _, fieldSpec := range mappings { - keyed, ok := fieldSpecsKeyed[fieldSpec.prefix] - if !ok { - fieldSpecsKeyed[fieldSpec.prefix] = make(map[string]LVMStructTagFieldMapping) - keyed = fieldSpecsKeyed[fieldSpec.prefix] - } - keyed[fieldSpec.name] = fieldSpec - } - - line := 1 - tokens := make(ConfigTokens, 0, len(mappings)) - for section, fields := range fieldSpecsKeyed { - tokens = append(tokens, &ConfigToken{ - Type: ConfigTokenTypeSection, - Value: []byte(section), - Line: line, - }, &ConfigToken{ - Type: ConfigTokenTypeStartOfSection, - Value: []byte{'{'}, - Line: line, - }, &ConfigToken{ - Type: ConfigTokenTypeEndOfStatement, - Value: []byte{'\n'}, - Line: line, - }) - line++ - - for _, fieldSpec := range fields { - tokens = append(tokens, &ConfigToken{ - Type: ConfigTokenTypeIdentifier, - Value: []byte(fieldSpec.name), - Line: line, - }, &ConfigToken{ - Type: ConfigTokenTypeAssignment, - Value: []byte{'='}, - Line: line, - }) - - switch fieldSpec.Kind() { - case reflect.Int64: - tokens = append(tokens, &ConfigToken{ - Type: ConfigTokenTypeInt64, - Value: []byte(fmt.Sprintf("%d", fieldSpec.Value.Int())), - Line: line, - }) - default: - tokens = append(tokens, &ConfigToken{ - Type: ConfigTokenTypeString, - Value: []byte(fieldSpec.Value.String()), - Line: line, - }) - } - - tokens = append(tokens, &ConfigToken{ - Type: ConfigTokenTypeEndOfStatement, - Value: []byte{'\n'}, - Line: line, - }) - line++ - } - - tokens = append(tokens, &ConfigToken{ - Type: ConfigTokenTypeEndOfSection, - Value: []byte{'}'}, - Line: line, - }, &ConfigToken{ - Type: ConfigTokenTypeEndOfStatement, - Value: []byte{'\n'}, - Line: line, - }) - line++ - } - - return tokens -} - -func (c *configLexEncoder) EncodeUnstructured(v any) error { - m, ok := v.(map[string]interface{}) - if !ok { - mptr, ok := v.(*map[string]interface{}) - if !ok { - return fmt.Errorf("expected map[string]interface{} or *map[string]interface{}, got %T", v) - } - m = *mptr - } - - sectionKeys := make([]string, 0, len(m)) - fieldKeys := make([]string, 0, len(m)) - mbySection := make(map[string]map[string]interface{}) - for k, v := range m { - splitKey := strings.Split(k, "/") - section, key := splitKey[0], splitKey[1] - - if section == "" { - return fmt.Errorf("expected section prefix in key %q", k) - } - if key == "" { - return fmt.Errorf("expected key suffix in key %q", k) - } - - sectionMap, ok := mbySection[section] - if !ok { - mbySection[section] = make(map[string]interface{}) - sectionMap = mbySection[section] - sectionKeys = append(sectionKeys, section) - } - - sectionMap[key] = v - fieldKeys = append(fieldKeys, key) - } - - // Sort the keys to ensure a deterministic output - sort.Strings(sectionKeys) - sort.Strings(fieldKeys) - - tokens := make(ConfigTokens, 0, len(m)) - - line := 1 - for _, section := range sectionKeys { - tokens = append(tokens, &ConfigToken{ - Type: ConfigTokenTypeSection, - Value: []byte(section), - Line: line, - }, &ConfigToken{ - Type: ConfigTokenTypeStartOfSection, - Value: []byte{'{'}, - Line: line, - }, &ConfigToken{ - Type: ConfigTokenTypeEndOfStatement, - Value: []byte{'\n'}, - Line: line, - }) - line++ - for _, key := range fieldKeys { - value := mbySection[section][key] - - tokens = append(tokens, &ConfigToken{ - Type: ConfigTokenTypeIdentifier, - Value: []byte(key), - Line: line, - }, &ConfigToken{ - Type: ConfigTokenTypeAssignment, - Value: []byte{'='}, - Line: line, - }) - - switch value := value.(type) { - case int64: - tokens = append(tokens, &ConfigToken{ - Type: ConfigTokenTypeInt64, - Value: []byte(fmt.Sprintf("%d", value)), - Line: line, - }) - default: - tokens = append(tokens, &ConfigToken{ - Type: ConfigTokenTypeString, - Value: []byte(fmt.Sprintf("%v", value)), - Line: line, - }) - } - - tokens = append(tokens, &ConfigToken{ - Type: ConfigTokenTypeEndOfStatement, - Value: []byte{'\n'}, - Line: line, - }) - line++ - } - - tokens = append(tokens, &ConfigToken{ - Type: ConfigTokenTypeEndOfSection, - Value: []byte{'}'}, - Line: line, - }, &ConfigToken{ - Type: ConfigTokenTypeEndOfStatement, - Value: []byte{'\n'}, - Line: line, - }) - line++ - } - - return c.writeTokens(tokens) -} - -func (c *configLexEncoder) writeTokens(tokens ConfigTokens) error { - data, err := ConfigTokensToBytes(tokens) - if err != nil { - return fmt.Errorf("failed to write tokens into byte representation: %w", err) - } - - if _, err := io.Copy(c.Writer, bytes.NewReader(data)); err != nil { - return fmt.Errorf("failed to write tokens: %w", err) - } - return nil -} - -var _ LexingConfigEncoder = &configLexEncoder{} - -func ConfigTokensToBytes(tokens ConfigTokens) ([]byte, error) { - // We can estimate a good buffer size by requesting 15% more than the minimum size for all values - // This way we are accommodating for the fact that we might need to add spaces or tabs for indentation. - expectedSize := int(float32(tokens.minimumSize()) * 1.15) - buf := bytes.NewBuffer(make([]byte, 0, expectedSize)) - - // writeTabOrSpaceIfInSection writes a tab if the line is in a section and has not been indented yet - // otherwise it writes a space - // we can use this for correct indentation of the configuration file - inSection := false - linesIndented := map[int]struct{}{} - writeTabOrSpaceIfInSection := func(line int) { - if inSection { - if _, ok := linesIndented[line]; !ok { - buf.WriteRune('\t') - linesIndented[line] = struct{}{} - } else { - buf.WriteRune(' ') - } - } - } - - for _, token := range tokens { - switch token.Type { - case ConfigTokenTypeComment: - writeTabOrSpaceIfInSection(token.Line) - buf.Write(token.Value) - buf.WriteRune(' ') // readability: Add a space after the comment identifier - case ConfigTokenTypeCommentValue: - buf.Write(token.Value) - case ConfigTokenTypeEndOfStatement: - buf.Write(token.Value) - case ConfigTokenTypeSection: - buf.Write(token.Value) - buf.WriteRune(' ') // readability: Add a space after the section identifier - case ConfigTokenTypeStartOfSection: - buf.Write(token.Value) - inSection = true - case ConfigTokenTypeEndOfSection: - buf.Write(token.Value) - inSection = false - case ConfigTokenTypeString: - buf.WriteRune('"') - buf.Write(token.Value) - buf.WriteRune('"') - case ConfigTokenTypeIdentifier: - writeTabOrSpaceIfInSection(token.Line) - buf.Write(token.Value) - buf.WriteRune(' ') // readability: Add a space after the identifier - case ConfigTokenTypeAssignment: - buf.Write(token.Value) - buf.WriteRune(' ') // readability: Add a space after the assignment - case ConfigTokenTypeInt64: - buf.Write(token.Value) - case ConfigTokenTypeSOF: - continue - case ConfigTokenTypeEOF: - break - case ConfigTokenTypeError: - return nil, token.Err - case configTokenTypeNotYetKnown: - return nil, fmt.Errorf("unexpected token type %v", token.Type) - } - } - return buf.Bytes(), nil -} diff --git a/config_struct.go b/config_struct.go deleted file mode 100644 index 2772935..0000000 --- a/config_struct.go +++ /dev/null @@ -1,60 +0,0 @@ -package lvm2go - -import ( - "fmt" - "reflect" -) - -type LVMStructTagFieldMappings map[string]LVMStructTagFieldMapping - -type LVMStructTagFieldMapping struct { - prefix string - name string - reflect.Value -} - -func (f LVMStructTagFieldMapping) String() string { - switch f.Kind() { - case reflect.Int64: - return fmt.Sprintf("%s = %d", f.name, f.Int()) - default: - return fmt.Sprintf("%s = %q", f.name, f.Value.String()) - } -} - -func DecodeLVMStructTagFieldMappings(v any) (map[string]LVMStructTagFieldMapping, error) { - fields, typeAccessor, valueAccessor, err := accessStructOrPointerToStruct(v) - if err != nil { - return nil, err - } - - tagOrIgnore := func(tag reflect.StructTag) (string, bool) { - return tag.Get(LVMConfigStructTag), tag.Get(LVMConfigStructTag) == "-" - } - - fieldSpecs := make(map[string]LVMStructTagFieldMapping) - for i := range fields { - outerField := typeAccessor(i) - prefix, ignore := tagOrIgnore(outerField.Tag) - if ignore { - continue - } - fields, typeAccessor, valueAccessor, err := accessStructOrPointerToStruct(valueAccessor(i)) - if err != nil { - return nil, err - } - for j := range fields { - innerField := typeAccessor(j) - name, ignore := tagOrIgnore(innerField.Tag) - if ignore { - continue - } - fieldSpecs[name] = LVMStructTagFieldMapping{ - prefix, - name, - valueAccessor(j), - } - } - } - return fieldSpecs, nil -} diff --git a/config_test.go b/config_test.go index 0ac6842..50da777 100644 --- a/config_test.go +++ b/config_test.go @@ -28,6 +28,7 @@ import ( "testing" . "github.com/jakobmoellerdev/lvm2go" + . "github.com/jakobmoellerdev/lvm2go/config" ) func Test_RawConfig(t *testing.T) { diff --git a/profile.go b/profile.go index 291a233..d06a50a 100644 --- a/profile.go +++ b/profile.go @@ -20,9 +20,11 @@ import ( "fmt" "path/filepath" "strings" + + "github.com/jakobmoellerdev/lvm2go/config" ) -var ErrInvalidProfileExtension = fmt.Errorf("profile extension must be empty or %q", LVMProfileExtension) +var ErrInvalidProfileExtension = fmt.Errorf("profile extension must be empty or %q", config.LVMProfileExtension) type Profile string @@ -31,7 +33,7 @@ func (opt Profile) ApplyToArgs(args Arguments) error { return nil } ext := filepath.Ext(string(opt)) - if ext != "" && ext != LVMProfileExtension { + if ext != "" && ext != config.LVMProfileExtension { return ErrInvalidProfileExtension } diff --git a/reflect.go b/reflect/reflect.go similarity index 94% rename from reflect.go rename to reflect/reflect.go index 19bf8c2..f6c464c 100644 --- a/reflect.go +++ b/reflect/reflect.go @@ -14,14 +14,14 @@ limitations under the License. */ -package lvm2go +package reflect import ( "fmt" "reflect" ) -// accessStructOrPointerToStruct returns the number of fields in the struct, +// AccessStructOrPointerToStruct returns the number of fields in the struct, // a function to access the fields, and a function to access the values of the fields. // If the value is a pointer, its reference will be used or initialized if it is nil. // If the value is not a struct or a pointer to a struct, an error will be returned. @@ -30,7 +30,7 @@ import ( // The fieldAccessor and valueAccessor functions are safe to use in a loop and will not panic for idx < fieldNum. // The valueAccessor function will dereference pointers if necessary and initialize them if they are nil. // The valueAccessor function will panic if idx >= fieldNum. -func accessStructOrPointerToStruct(v interface{}) ( +func AccessStructOrPointerToStruct(v interface{}) ( fieldNum int, fieldAccessor func(idx int) reflect.StructField, valueAccessor func(idx int) reflect.Value, diff --git a/build_id.go b/util/build_id.go similarity index 97% rename from build_id.go rename to util/build_id.go index 03efa6f..a91e650 100644 --- a/build_id.go +++ b/util/build_id.go @@ -1,4 +1,4 @@ -package lvm2go +package util import ( "fmt"