diff --git a/from_tag_test.go b/from_tag_test.go
new file mode 100644
index 0000000..e06cef9
--- /dev/null
+++ b/from_tag_test.go
@@ -0,0 +1,194 @@
+package twig
+
+import (
+ "strings"
+ "testing"
+)
+
+// TestFromTagBasic tests the most basic from tag use case
+func TestFromTagBasic(t *testing.T) {
+ engine := New()
+
+ // Macro library template with a simple macro
+ macroLib := `{% macro hello(name) %}Hello, {{ name }}!{% endmacro %}`
+
+ // Simple template using the from tag
+ mainTemplate := `{% from "macros.twig" import hello %}
+{{ hello('World') }}`
+
+ // Register templates
+ err := engine.RegisterString("macros.twig", macroLib)
+ if err != nil {
+ t.Fatalf("Error registering macros.twig: %v", err)
+ }
+
+ err = engine.RegisterString("main.twig", mainTemplate)
+ if err != nil {
+ t.Fatalf("Error registering main.twig: %v", err)
+ }
+
+ // Render the template
+ result, err := engine.Render("main.twig", nil)
+ if err != nil {
+ t.Fatalf("Error rendering template: %v", err)
+ }
+
+ // Check the output
+ expected := "Hello, World!"
+ if !strings.Contains(result, expected) {
+ t.Errorf("Expected %q in result, but got: %s", expected, result)
+ }
+}
+
+// TestFromTagWithAlias tests the from tag with an alias
+func TestFromTagWithAlias(t *testing.T) {
+ engine := New()
+
+ // Macro library template
+ macroLib := `{% macro greet(name) %}Hello, {{ name }}!{% endmacro %}
+{% macro farewell(name) %}Goodbye, {{ name }}!{% endmacro %}`
+
+ // Template using from import with aliases
+ template := `{% from "macros.twig" import greet as hello, farewell as bye %}
+{{ hello('John') }}
+{{ bye('Jane') }}`
+
+ // Register templates
+ err := engine.RegisterString("macros.twig", macroLib)
+ if err != nil {
+ t.Fatalf("Error registering macros.twig: %v", err)
+ }
+
+ err = engine.RegisterString("template.twig", template)
+ if err != nil {
+ t.Fatalf("Error registering template.twig: %v", err)
+ }
+
+ // Render the template
+ result, err := engine.Render("template.twig", nil)
+ if err != nil {
+ t.Fatalf("Error rendering template: %v", err)
+ }
+
+ // Check the output
+ expectedHello := "Hello, John!"
+ expectedBye := "Goodbye, Jane!"
+
+ if !strings.Contains(result, expectedHello) {
+ t.Errorf("Expected %q in result, but got: %s", expectedHello, result)
+ }
+
+ if !strings.Contains(result, expectedBye) {
+ t.Errorf("Expected %q in result, but got: %s", expectedBye, result)
+ }
+}
+
+// TestFromTagMultipleImports tests importing multiple macros from a template
+func TestFromTagMultipleImports(t *testing.T) {
+ engine := New()
+
+ // Macro library template with multiple macros
+ macroLib := `{% macro input(name, value) %}
+
+{% endmacro %}
+
+{% macro label(text) %}
+
+{% endmacro %}
+
+{% macro button(text) %}
+
+{% endmacro %}`
+
+ // Template importing multiple macros
+ template := `{% from "form_macros.twig" import input, label, button %}
+
`
+
+ // Register templates
+ err := engine.RegisterString("form_macros.twig", macroLib)
+ if err != nil {
+ t.Fatalf("Error registering form_macros.twig: %v", err)
+ }
+
+ err = engine.RegisterString("form.twig", template)
+ if err != nil {
+ t.Fatalf("Error registering form.twig: %v", err)
+ }
+
+ // Render the template
+ result, err := engine.Render("form.twig", nil)
+ if err != nil {
+ t.Fatalf("Error rendering template: %v", err)
+ }
+
+ // Check the output
+ expectedElements := []string{
+ ``,
+ ``,
+ ``,
+ }
+
+ for _, expected := range expectedElements {
+ if !strings.Contains(result, expected) {
+ t.Errorf("Expected %q in result, but got: %s", expected, result)
+ }
+ }
+}
+
+// TestFromTagMixedAliases tests importing some macros with aliases and some without
+func TestFromTagMixedAliases(t *testing.T) {
+ engine := New()
+
+ // Macro library template
+ macroLib := `{% macro header(text) %}
+{{ text }}
+{% endmacro %}
+
+{% macro paragraph(text) %}
+{{ text }}
+{% endmacro %}
+
+{% macro link(href, text) %}
+{{ text }}
+{% endmacro %}`
+
+ // Template with mixed alias usage
+ template := `{% from "content_macros.twig" import header, paragraph as p, link as a %}
+{{ header('Title') }}
+{{ p('This is a paragraph.') }}
+{{ a('#', 'Click here') }}`
+
+ // Register templates
+ err := engine.RegisterString("content_macros.twig", macroLib)
+ if err != nil {
+ t.Fatalf("Error registering content_macros.twig: %v", err)
+ }
+
+ err = engine.RegisterString("content.twig", template)
+ if err != nil {
+ t.Fatalf("Error registering content.twig: %v", err)
+ }
+
+ // Render the template
+ result, err := engine.Render("content.twig", nil)
+ if err != nil {
+ t.Fatalf("Error rendering template: %v", err)
+ }
+
+ // Check the output
+ expectedElements := []string{
+ `Title
`,
+ `This is a paragraph.
`,
+ `Click here`,
+ }
+
+ for _, expected := range expectedElements {
+ if !strings.Contains(result, expected) {
+ t.Errorf("Expected %q in result, but got: %s", expected, result)
+ }
+ }
+}
diff --git a/macros_test.go b/macros_test.go
index 46dbe7d..5b9fcec 100644
--- a/macros_test.go
+++ b/macros_test.go
@@ -125,7 +125,66 @@ func TestMacrosImport(t *testing.T) {
}
}
-// TestMacrosFromImport tests selective importing macros
+// TestMacrosImportAs tests importing macros using the import as syntax
+func TestMacrosImportAs(t *testing.T) {
+ engine := New()
+
+ // Macro library template
+ macroLib := `
+ {% macro input(name, value = '', type = 'text') %}
+
+ {% endmacro %}
+
+ {% macro textarea(name, value = '') %}
+
+ {% endmacro %}
+
+ {% macro button(name, value) %}
+
+ {% endmacro %}
+ `
+
+ // Main template that imports macros using import as syntax
+ mainTemplate := `
+ {% import "macro_lib.twig" as lib %}
+
+
+ `
+
+ // Register both templates
+ err := engine.RegisterString("macro_lib.twig", macroLib)
+ if err != nil {
+ t.Fatalf("Error registering macro_lib.twig: %v", err)
+ }
+
+ err = engine.RegisterString("import_as.twig", mainTemplate)
+ if err != nil {
+ t.Fatalf("Error registering import_as.twig: %v", err)
+ }
+
+ // Render the main template
+ result, err := engine.Render("import_as.twig", nil)
+ if err != nil {
+ t.Fatalf("Error parsing/rendering template: %v", err)
+ }
+
+ // Check the output
+ expectedHtml := []string{
+ ``,
+ ``,
+ }
+
+ for _, expected := range expectedHtml {
+ if !strings.Contains(result, expected) {
+ t.Errorf("Expected %q in result, but got: %s", expected, result)
+ }
+ }
+}
+
+// TestMacrosFromImport tests selective importing macros using the from import syntax
func TestMacrosFromImport(t *testing.T) {
engine := New()
@@ -144,20 +203,25 @@ func TestMacrosFromImport(t *testing.T) {
{% endmacro %}
`
- // Main template that selectively imports macros
- // Using import as syntax which has better support
- mainTemplate := `
- {% import "macro_lib.twig" as lib %}
+ // Main template that selectively imports macros using from import syntax
+ mainTemplate := `{% from "macro_lib.twig" import input, button %}
`
// Register both templates
- engine.RegisterString("macro_lib.twig", macroLib)
- engine.RegisterString("from_import.twig", mainTemplate)
+ err := engine.RegisterString("macro_lib.twig", macroLib)
+ if err != nil {
+ t.Fatalf("Error registering macro_lib.twig: %v", err)
+ }
+
+ err = engine.RegisterString("from_import.twig", mainTemplate)
+ if err != nil {
+ t.Fatalf("Error registering from_import.twig: %v", err)
+ }
// Render the main template
result, err := engine.Render("from_import.twig", nil)
@@ -178,6 +242,137 @@ func TestMacrosFromImport(t *testing.T) {
}
}
+// TestMacrosFromImportWithAliases tests importing macros with aliases using the from import syntax
+func TestMacrosFromImportWithAliases(t *testing.T) {
+ engine := New()
+
+ // Macro library template
+ macroLib := `
+ {% macro input(name, value = '', type = 'text') %}
+
+ {% endmacro %}
+
+ {% macro textarea(name, value = '') %}
+
+ {% endmacro %}
+
+ {% macro button(name, value) %}
+
+ {% endmacro %}
+ `
+
+ // Main template that imports macros with aliases using from import syntax
+ mainTemplate := `{% from "macro_lib.twig" import input as field, button as btn %}
+
+
+ `
+
+ // Register both templates
+ err := engine.RegisterString("macro_lib.twig", macroLib)
+ if err != nil {
+ t.Fatalf("Error registering macro_lib.twig: %v", err)
+ }
+
+ err = engine.RegisterString("from_import_aliases.twig", mainTemplate)
+ if err != nil {
+ t.Fatalf("Error registering from_import_aliases.twig: %v", err)
+ }
+
+ // Render the main template
+ result, err := engine.Render("from_import_aliases.twig", nil)
+ if err != nil {
+ t.Fatalf("Error parsing/rendering template: %v", err)
+ }
+
+ // Check the output
+ expectedHtml := []string{
+ ``,
+ ``,
+ }
+
+ for _, expected := range expectedHtml {
+ if !strings.Contains(result, expected) {
+ t.Errorf("Expected %q in result, but got: %s", expected, result)
+ }
+ }
+}
+
+// TestMixedImportApproaches tests using both import and from import syntax in the same template
+func TestMixedImportApproaches(t *testing.T) {
+ engine := New()
+
+ // First macro library template
+ formsMacroLib := `
+ {% macro input(name, value = '') %}
+
+ {% endmacro %}
+
+ {% macro bold(text) %}
+ {{ text }}
+ {% endmacro %}
+ `
+
+ // Second macro library template
+ layoutMacroLib := `
+ {% macro header(text) %}
+ {{ text }}
+ {% endmacro %}
+
+ {% macro box(content) %}
+ {{ content }}
+ {% endmacro %}
+ `
+
+ // Main template that uses both import approaches
+ mainTemplate := `{% import "forms_macros.twig" as forms %}
+{% from "layout_macros.twig" import header %}
+
+
+ {{ header('Hello') }}
+ {{ forms.input('username', 'john') }}
+ {{ forms.bold('Welcome') }}
+
+ `
+
+ // Register templates
+ err := engine.RegisterString("forms_macros.twig", formsMacroLib)
+ if err != nil {
+ t.Fatalf("Error registering forms_macros.twig: %v", err)
+ }
+
+ err = engine.RegisterString("layout_macros.twig", layoutMacroLib)
+ if err != nil {
+ t.Fatalf("Error registering layout_macros.twig: %v", err)
+ }
+
+ err = engine.RegisterString("mixed_imports.twig", mainTemplate)
+ if err != nil {
+ t.Fatalf("Error registering mixed_imports.twig: %v", err)
+ }
+
+ // Render the main template
+ result, err := engine.Render("mixed_imports.twig", nil)
+ if err != nil {
+ t.Fatalf("Error parsing/rendering template: %v", err)
+ }
+
+ // Check the output
+ expectedElements := []string{
+ `Hello
`,
+ ``,
+ `Welcome`,
+ }
+
+ for _, expected := range expectedElements {
+ if !strings.Contains(result, expected) {
+ t.Errorf("Expected %q in result, but got: %s", expected, result)
+ }
+ }
+}
+
// TestMacrosWithContext tests macros with context variables
func TestMacrosWithContext(t *testing.T) {
engine := New()
diff --git a/parse_block.go b/parse_block.go
new file mode 100644
index 0000000..04e7da3
--- /dev/null
+++ b/parse_block.go
@@ -0,0 +1,65 @@
+package twig
+
+import "fmt"
+
+func (p *Parser) parseBlock(parser *Parser) (Node, error) {
+ // Get the line number of the block token
+ blockLine := parser.tokens[parser.tokenIndex-2].Line
+
+ // Get the block name
+ if parser.tokenIndex >= len(parser.tokens) || parser.tokens[parser.tokenIndex].Type != TOKEN_NAME {
+ return nil, fmt.Errorf("expected block name at line %d", blockLine)
+ }
+
+ blockName := parser.tokens[parser.tokenIndex].Value
+ parser.tokenIndex++
+
+ // Expect the block end token
+ if parser.tokenIndex >= len(parser.tokens) || parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_END {
+ return nil, fmt.Errorf("expected block end token after block name at line %d", blockLine)
+ }
+ parser.tokenIndex++
+
+ // Parse the block body
+ blockBody, err := parser.parseOuterTemplate()
+ if err != nil {
+ return nil, err
+ }
+
+ // Expect endblock tag
+ if parser.tokenIndex >= len(parser.tokens) || parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_START {
+ return nil, fmt.Errorf("expected endblock tag at line %d", blockLine)
+ }
+ parser.tokenIndex++
+
+ if parser.tokenIndex >= len(parser.tokens) || parser.tokens[parser.tokenIndex].Type != TOKEN_NAME ||
+ parser.tokens[parser.tokenIndex].Value != "endblock" {
+ return nil, fmt.Errorf("expected endblock at line %d", parser.tokens[parser.tokenIndex-1].Line)
+ }
+ parser.tokenIndex++
+
+ // Check for optional block name in endblock
+ if parser.tokenIndex < len(parser.tokens) && parser.tokens[parser.tokenIndex].Type == TOKEN_NAME {
+ endBlockName := parser.tokens[parser.tokenIndex].Value
+ if endBlockName != blockName {
+ return nil, fmt.Errorf("mismatched block name, expected %s but got %s at line %d",
+ blockName, endBlockName, parser.tokens[parser.tokenIndex].Line)
+ }
+ parser.tokenIndex++
+ }
+
+ // Expect the final block end token
+ if parser.tokenIndex >= len(parser.tokens) || parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_END {
+ return nil, fmt.Errorf("expected block end token after endblock at line %d", parser.tokens[parser.tokenIndex-1].Line)
+ }
+ parser.tokenIndex++
+
+ // Create the block node
+ blockNode := &BlockNode{
+ name: blockName,
+ body: blockBody,
+ line: blockLine,
+ }
+
+ return blockNode, nil
+}
diff --git a/parse_do.go b/parse_do.go
new file mode 100644
index 0000000..be2b8bf
--- /dev/null
+++ b/parse_do.go
@@ -0,0 +1,70 @@
+package twig
+
+import (
+ "fmt"
+ "strconv"
+)
+
+func (p *Parser) parseDo(parser *Parser) (Node, error) {
+ // Get the line number for error reporting
+ doLine := parser.tokens[parser.tokenIndex-2].Line
+
+ // Check for special case: assignment expressions
+ // These need to be handled specially since they're not normal expressions
+ if parser.tokenIndex < len(parser.tokens) &&
+ parser.tokens[parser.tokenIndex].Type == TOKEN_NAME {
+
+ varName := parser.tokens[parser.tokenIndex].Value
+ parser.tokenIndex++
+
+ if parser.tokenIndex < len(parser.tokens) &&
+ parser.tokens[parser.tokenIndex].Type == TOKEN_OPERATOR &&
+ parser.tokens[parser.tokenIndex].Value == "=" {
+
+ // Skip the equals sign
+ parser.tokenIndex++
+
+ // Parse the right side expression
+ expr, err := parser.parseExpression()
+ if err != nil {
+ return nil, fmt.Errorf("error parsing expression in do assignment at line %d: %w", doLine, err)
+ }
+
+ // Make sure we have the closing tag
+ if parser.tokenIndex >= len(parser.tokens) || parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_END {
+ return nil, fmt.Errorf("expecting end of do tag at line %d", doLine)
+ }
+ parser.tokenIndex++
+
+ // Validate the variable name - it should not be a numeric literal
+ if _, err := strconv.Atoi(varName); err == nil {
+ return nil, fmt.Errorf("invalid variable name %q in do tag assignment at line %d", varName, doLine)
+ }
+
+ // Create a SetNode instead of DoNode for assignments
+ return &SetNode{
+ name: varName,
+ value: expr,
+ line: doLine,
+ }, nil
+ }
+
+ // If it wasn't an assignment, backtrack to parse it as a normal expression
+ parser.tokenIndex -= 1
+ }
+
+ // Parse the expression to be executed
+ expr, err := parser.parseExpression()
+ if err != nil {
+ return nil, fmt.Errorf("error parsing expression in do tag at line %d: %w", doLine, err)
+ }
+
+ // Make sure we have the closing tag
+ if parser.tokenIndex >= len(parser.tokens) || parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_END {
+ return nil, fmt.Errorf("expecting end of do tag at line %d", doLine)
+ }
+ parser.tokenIndex++
+
+ // Create and return the DoNode
+ return NewDoNode(expr, doLine), nil
+}
diff --git a/parse_extends.go b/parse_extends.go
new file mode 100644
index 0000000..4e4a379
--- /dev/null
+++ b/parse_extends.go
@@ -0,0 +1,28 @@
+package twig
+
+import "fmt"
+
+func (p *Parser) parseExtends(parser *Parser) (Node, error) {
+ // Get the line number of the extends token
+ extendsLine := parser.tokens[parser.tokenIndex-2].Line
+
+ // Get the parent template expression
+ parentExpr, err := parser.parseExpression()
+ if err != nil {
+ return nil, err
+ }
+
+ // Expect the block end token
+ if parser.tokenIndex >= len(parser.tokens) || parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_END {
+ return nil, fmt.Errorf("expected block end token after extends at line %d", extendsLine)
+ }
+ parser.tokenIndex++
+
+ // Create the extends node
+ extendsNode := &ExtendsNode{
+ parent: parentExpr,
+ line: extendsLine,
+ }
+
+ return extendsNode, nil
+}
diff --git a/parse_for.go b/parse_for.go
new file mode 100644
index 0000000..c87eaca
--- /dev/null
+++ b/parse_for.go
@@ -0,0 +1,143 @@
+package twig
+
+import (
+ "fmt"
+)
+
+// parseFor parses a for loop construct in Twig templates
+// Examples:
+// {% for item in items %}...{% endfor %}
+// {% for key, value in items %}...{% endfor %}
+// {% for item in items %}...{% else %}...{% endfor %}
+func (p *Parser) parseFor(parser *Parser) (Node, error) {
+ // Get the line number of the for token
+ forLine := parser.tokens[parser.tokenIndex-2].Line
+
+ // Parse the loop variable name(s)
+ if parser.tokenIndex >= len(parser.tokens) || parser.tokens[parser.tokenIndex].Type != TOKEN_NAME {
+ return nil, fmt.Errorf("expected variable name after for at line %d", forLine)
+ }
+
+ // Get value variable name
+ valueVar := parser.tokens[parser.tokenIndex].Value
+ parser.tokenIndex++
+
+ var keyVar string
+
+ // Check for key, value syntax
+ if parser.tokenIndex < len(parser.tokens) &&
+ parser.tokens[parser.tokenIndex].Type == TOKEN_PUNCTUATION &&
+ parser.tokens[parser.tokenIndex].Value == "," {
+
+ // Move past the comma
+ parser.tokenIndex++
+
+ // Now valueVar is actually the key, and we need to get the value
+ keyVar = valueVar
+
+ if parser.tokenIndex >= len(parser.tokens) || parser.tokens[parser.tokenIndex].Type != TOKEN_NAME {
+ return nil, fmt.Errorf("expected value variable name after comma at line %d", forLine)
+ }
+
+ valueVar = parser.tokens[parser.tokenIndex].Value
+ parser.tokenIndex++
+ }
+
+ // Expect 'in' keyword
+ if parser.tokenIndex >= len(parser.tokens) ||
+ parser.tokens[parser.tokenIndex].Type != TOKEN_NAME ||
+ parser.tokens[parser.tokenIndex].Value != "in" {
+ return nil, fmt.Errorf("expected 'in' keyword after variable name at line %d", forLine)
+ }
+ parser.tokenIndex++
+
+ // Parse the sequence expression
+ sequence, err := parser.parseExpression()
+ if err != nil {
+ return nil, err
+ }
+
+ // Check for filter operator (|) - needed for cases where filter detection might be missed
+ if IsDebugEnabled() {
+ LogDebug("For loop sequence expression type: %T", sequence)
+ }
+
+ // Expect the block end token (either regular or trim variant)
+ if parser.tokenIndex >= len(parser.tokens) || !isBlockEndToken(parser.tokens[parser.tokenIndex].Type) {
+ return nil, fmt.Errorf("expected block end after for statement at line %d", forLine)
+ }
+ parser.tokenIndex++
+
+ // Parse the for loop body
+ loopBody, err := parser.parseOuterTemplate()
+ if err != nil {
+ return nil, err
+ }
+
+ var elseBody []Node
+
+ // Check for else or endfor
+ if parser.tokenIndex < len(parser.tokens) && parser.tokens[parser.tokenIndex].Type == TOKEN_BLOCK_START {
+ parser.tokenIndex++
+
+ if parser.tokenIndex >= len(parser.tokens) || parser.tokens[parser.tokenIndex].Type != TOKEN_NAME {
+ return nil, fmt.Errorf("expected block name at line %d", parser.tokens[parser.tokenIndex-1].Line)
+ }
+
+ // Check if this is an else block
+ if parser.tokens[parser.tokenIndex].Value == "else" {
+ parser.tokenIndex++
+
+ // Expect the block end token
+ if parser.tokenIndex >= len(parser.tokens) || parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_END {
+ return nil, fmt.Errorf("expected block end after else at line %d", parser.tokens[parser.tokenIndex-1].Line)
+ }
+ parser.tokenIndex++
+
+ // Parse the else body
+ elseBody, err = parser.parseOuterTemplate()
+ if err != nil {
+ return nil, err
+ }
+
+ // Now expect the endfor
+ if parser.tokenIndex >= len(parser.tokens) || parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_START {
+ return nil, fmt.Errorf("expected endfor block at line %d", parser.tokens[parser.tokenIndex-1].Line)
+ }
+ parser.tokenIndex++
+
+ if parser.tokenIndex >= len(parser.tokens) || parser.tokens[parser.tokenIndex].Type != TOKEN_NAME {
+ return nil, fmt.Errorf("expected endfor at line %d", parser.tokens[parser.tokenIndex-1].Line)
+ }
+
+ if parser.tokens[parser.tokenIndex].Value != "endfor" {
+ return nil, fmt.Errorf("expected endfor, got %s at line %d", parser.tokens[parser.tokenIndex].Value, parser.tokens[parser.tokenIndex].Line)
+ }
+ parser.tokenIndex++
+ } else if parser.tokens[parser.tokenIndex].Value == "endfor" {
+ parser.tokenIndex++
+ } else {
+ return nil, fmt.Errorf("expected else or endfor, got %s at line %d", parser.tokens[parser.tokenIndex].Value, parser.tokens[parser.tokenIndex].Line)
+ }
+
+ // Expect the final block end token
+ if parser.tokenIndex >= len(parser.tokens) || parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_END {
+ return nil, fmt.Errorf("expected block end after endfor at line %d", parser.tokens[parser.tokenIndex-1].Line)
+ }
+ parser.tokenIndex++
+ } else {
+ return nil, fmt.Errorf("unexpected end of template, expected endfor at line %d", forLine)
+ }
+
+ // Create the for node
+ forNode := &ForNode{
+ keyVar: keyVar,
+ valueVar: valueVar,
+ sequence: sequence,
+ body: loopBody,
+ elseBranch: elseBody,
+ line: forLine,
+ }
+
+ return forNode, nil
+}
diff --git a/parse_from.go b/parse_from.go
new file mode 100644
index 0000000..51d2bef
--- /dev/null
+++ b/parse_from.go
@@ -0,0 +1,83 @@
+package twig
+
+import (
+ "fmt"
+ "strings"
+)
+
+// parseFrom handles the from tag which imports macros from another template
+// Example: {% from "macros.twig" import input, button %}
+// Example: {% from "macros.twig" import input as field, button as btn %}
+func (p *Parser) parseFrom(parser *Parser) (Node, error) {
+ // Get the line number of the from token
+ fromLine := parser.tokens[parser.tokenIndex-1].Line
+
+ // We need to manually extract the template path, import keyword, and macro(s) from
+ // the current token. The tokenizer seems to be combining them.
+ if parser.tokenIndex < len(parser.tokens) && parser.tokens[parser.tokenIndex].Type == TOKEN_NAME {
+ // Extract parts from the combined token value
+ tokenValue := parser.tokens[parser.tokenIndex].Value
+
+ // Try to extract template path and remaining parts
+ matches := strings.Split(tokenValue, " import ")
+ if len(matches) == 2 {
+ // We found the import keyword in the token value
+ templatePath := strings.TrimSpace(matches[0])
+ // Remove quotes if present
+ templatePath = strings.Trim(templatePath, "\"'")
+ macrosList := strings.TrimSpace(matches[1])
+
+ // Create template expression
+ templateExpr := &LiteralNode{
+ ExpressionNode: ExpressionNode{
+ exprType: ExprLiteral,
+ line: fromLine,
+ },
+ value: templatePath,
+ }
+
+ // Parse macros list
+ macros := []string{}
+ aliases := map[string]string{}
+
+ // Split macros by comma if multiple
+ macroItems := strings.Split(macrosList, ",")
+ for _, item := range macroItems {
+ item = strings.TrimSpace(item)
+
+ // Check for "as" alias
+ asParts := strings.Split(item, " as ")
+ if len(asParts) == 2 {
+ // We have an alias
+ macroName := strings.TrimSpace(asParts[0])
+ aliasName := strings.TrimSpace(asParts[1])
+ aliases[macroName] = aliasName
+ // Still add the macro name to macros list, even with alias
+ macros = append(macros, macroName)
+ } else {
+ // No alias
+ macros = append(macros, item)
+ }
+ }
+
+ // Skip the current token
+ parser.tokenIndex++
+
+ // Skip to the block end token
+ for parser.tokenIndex < len(parser.tokens) {
+ if parser.tokens[parser.tokenIndex].Type == TOKEN_BLOCK_END ||
+ parser.tokens[parser.tokenIndex].Type == TOKEN_BLOCK_END_TRIM {
+ parser.tokenIndex++
+ break
+ }
+ parser.tokenIndex++
+ }
+
+ // Create and return the FromImportNode
+ return NewFromImportNode(templateExpr, macros, aliases, fromLine), nil
+ }
+ }
+
+ // If we're here, the standard parsing approach failed, so return an error
+ return nil, fmt.Errorf("expected 'import' after template path at line %d", fromLine)
+}
diff --git a/parse_if.go b/parse_if.go
new file mode 100644
index 0000000..b903c83
--- /dev/null
+++ b/parse_if.go
@@ -0,0 +1,147 @@
+package twig
+
+import (
+ "fmt"
+)
+
+// parseIf parses if/elseif/else/endif block structure
+// Examples:
+// {% if condition %}...{% endif %}
+// {% if condition %}...{% else %}...{% endif %}
+// {% if condition %}...{% elseif condition2 %}...{% else %}...{% endif %}
+func (p *Parser) parseIf(parser *Parser) (Node, error) {
+ // Get the line number of the if token
+ ifLine := parser.tokens[parser.tokenIndex-2].Line
+
+ // Parse the condition expression
+ condition, err := parser.parseExpression()
+ if err != nil {
+ return nil, err
+ }
+
+ // Expect the block end token (either regular or whitespace-trimming variant)
+ if parser.tokenIndex >= len(parser.tokens) ||
+ (parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_END &&
+ parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_END_TRIM) {
+ return nil, fmt.Errorf("expected block end after if condition at line %d", ifLine)
+ }
+ parser.tokenIndex++
+
+ // Parse the if body (statements between if and endif/else/elseif)
+ ifBody, err := parser.parseOuterTemplate()
+ if err != nil {
+ return nil, err
+ }
+
+ // Initialize conditions and bodies arrays with the initial if condition and body
+ conditions := []Node{condition}
+ bodies := [][]Node{ifBody}
+ var elseBody []Node
+
+ // Keep track of whether we've seen an else block
+ var hasElseBlock bool
+
+ // Process subsequent tags (elseif, else, endif)
+ for {
+ // We expect a block start token for elseif, else, or endif
+ if parser.tokenIndex >= len(parser.tokens) || parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_START {
+ return nil, fmt.Errorf("unexpected end of template, expected endif at line %d", ifLine)
+ }
+ parser.tokenIndex++
+
+ // We expect a name token (elseif, else, or endif)
+ if parser.tokenIndex >= len(parser.tokens) || parser.tokens[parser.tokenIndex].Type != TOKEN_NAME {
+ return nil, fmt.Errorf("expected block name at line %d", parser.tokens[parser.tokenIndex-1].Line)
+ }
+
+ // Get the tag name
+ blockName := parser.tokens[parser.tokenIndex].Value
+ blockLine := parser.tokens[parser.tokenIndex].Line
+ parser.tokenIndex++
+
+ // Process based on the tag type
+ if blockName == "elseif" {
+ // Check if we've already seen an else block - elseif can't come after else
+ if hasElseBlock {
+ return nil, fmt.Errorf("unexpected elseif after else at line %d", blockLine)
+ }
+
+ // Handle elseif condition
+ elseifCondition, err := parser.parseExpression()
+ if err != nil {
+ return nil, err
+ }
+
+ // Expect block end token
+ if parser.tokenIndex >= len(parser.tokens) || !isBlockEndToken(parser.tokens[parser.tokenIndex].Type) {
+ return nil, fmt.Errorf("expected block end after elseif condition at line %d", blockLine)
+ }
+ parser.tokenIndex++
+
+ // Parse the elseif body
+ elseifBody, err := parser.parseOuterTemplate()
+ if err != nil {
+ return nil, err
+ }
+
+ // Add condition and body to our arrays
+ conditions = append(conditions, elseifCondition)
+ bodies = append(bodies, elseifBody)
+
+ // Continue checking for more elseif/else/endif tags
+ } else if blockName == "else" {
+ // Check if we've already seen an else block - can't have multiple else blocks
+ if hasElseBlock {
+ return nil, fmt.Errorf("multiple else blocks found at line %d", blockLine)
+ }
+
+ // Mark that we've seen an else block
+ hasElseBlock = true
+
+ // Expect block end token
+ if parser.tokenIndex >= len(parser.tokens) || !isBlockEndToken(parser.tokens[parser.tokenIndex].Type) {
+ return nil, fmt.Errorf("expected block end after else tag at line %d", blockLine)
+ }
+ parser.tokenIndex++
+
+ // Parse the else body
+ elseBody, err = parser.parseOuterTemplate()
+ if err != nil {
+ return nil, err
+ }
+
+ // After else, we need to find endif next (handled in next iteration)
+ } else if blockName == "endif" {
+ // Expect block end token
+ if parser.tokenIndex >= len(parser.tokens) || !isBlockEndToken(parser.tokens[parser.tokenIndex].Type) {
+ return nil, fmt.Errorf("expected block end after endif at line %d", blockLine)
+ }
+ parser.tokenIndex++
+
+ // We found the endif, we're done
+ break
+ } else {
+ return nil, fmt.Errorf("expected elseif, else, or endif, got %s at line %d", blockName, blockLine)
+ }
+ }
+
+ // Create and return the if node
+ ifNode := &IfNode{
+ conditions: conditions,
+ bodies: bodies,
+ elseBranch: elseBody,
+ line: ifLine,
+ }
+
+ return ifNode, nil
+}
+
+// Helper function to check if a token type is a block end token
+func isBlockEndToken(tokenType int) bool {
+ return tokenType == TOKEN_BLOCK_END || tokenType == TOKEN_BLOCK_END_TRIM
+}
+
+// Helper function to check if a token is any kind of variable end token (regular or trim variant)
+func isVarEndToken(tokenType int) bool {
+ return tokenType == TOKEN_VAR_END || tokenType == TOKEN_VAR_END_TRIM
+}
diff --git a/parse_import.go b/parse_import.go
new file mode 100644
index 0000000..8e1b4bf
--- /dev/null
+++ b/parse_import.go
@@ -0,0 +1,96 @@
+package twig
+
+import (
+ "fmt"
+ "strings"
+)
+
+func (p *Parser) parseImport(parser *Parser) (Node, error) {
+ // Use debug logging if enabled
+ if IsDebugEnabled() && debugger.level >= DebugVerbose {
+ tokenIndex := parser.tokenIndex - 2
+ LogVerbose("Parsing import, tokens available:")
+ for i := 0; i < 10 && tokenIndex+i < len(parser.tokens); i++ {
+ token := parser.tokens[tokenIndex+i]
+ LogVerbose(" Token %d: Type=%d, Value=%q, Line=%d", i, token.Type, token.Value, token.Line)
+ }
+ }
+
+ // Get the line number of the import token
+ importLine := parser.tokens[parser.tokenIndex-2].Line
+
+ // Check for incorrectly tokenized import syntax
+ if parser.tokenIndex < len(parser.tokens) &&
+ parser.tokens[parser.tokenIndex].Type == TOKEN_NAME &&
+ strings.Contains(parser.tokens[parser.tokenIndex].Value, " as ") {
+
+ // Special handling for combined syntax like "path.twig as alias"
+ parts := strings.SplitN(parser.tokens[parser.tokenIndex].Value, " as ", 2)
+ if len(parts) == 2 {
+ templatePath := strings.TrimSpace(parts[0])
+ alias := strings.TrimSpace(parts[1])
+
+ if IsDebugEnabled() && debugger.level >= DebugVerbose {
+ LogVerbose("Found combined import syntax: template=%q, alias=%q", templatePath, alias)
+ }
+
+ // Create an expression node for the template path
+ var templateExpr Node
+ if strings.HasPrefix(templatePath, "\"") && strings.HasSuffix(templatePath, "\"") {
+ // It's already a quoted string
+ templateExpr = NewLiteralNode(templatePath[1:len(templatePath)-1], importLine)
+ } else {
+ // Create a string literal node
+ templateExpr = NewLiteralNode(templatePath, importLine)
+ }
+
+ // Skip to end of token
+ parser.tokenIndex++
+
+ // Expect block end
+ if parser.tokenIndex >= len(parser.tokens) ||
+ (parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_END &&
+ parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_END_TRIM) {
+ return nil, fmt.Errorf("expected block end token after import statement at line %d", importLine)
+ }
+ parser.tokenIndex++
+
+ // Create import node
+ return NewImportNode(templateExpr, alias, importLine), nil
+ }
+ }
+
+ // Standard parsing path
+ // Get the template to import
+ templateExpr, err := parser.parseExpression()
+ if err != nil {
+ return nil, err
+ }
+
+ // Expect 'as' keyword
+ if parser.tokenIndex >= len(parser.tokens) ||
+ parser.tokens[parser.tokenIndex].Type != TOKEN_NAME ||
+ parser.tokens[parser.tokenIndex].Value != "as" {
+ return nil, fmt.Errorf("expected 'as' after template path at line %d", importLine)
+ }
+ parser.tokenIndex++
+
+ // Get the alias name
+ if parser.tokenIndex >= len(parser.tokens) || parser.tokens[parser.tokenIndex].Type != TOKEN_NAME {
+ return nil, fmt.Errorf("expected identifier after 'as' at line %d", importLine)
+ }
+
+ alias := parser.tokens[parser.tokenIndex].Value
+ parser.tokenIndex++
+
+ // Expect block end
+ if parser.tokenIndex >= len(parser.tokens) ||
+ (parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_END &&
+ parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_END_TRIM) {
+ return nil, fmt.Errorf("expected block end token after import statement at line %d", importLine)
+ }
+ parser.tokenIndex++
+
+ // Create import node
+ return NewImportNode(templateExpr, alias, importLine), nil
+}
diff --git a/parse_macro.go b/parse_macro.go
new file mode 100644
index 0000000..c4865ae
--- /dev/null
+++ b/parse_macro.go
@@ -0,0 +1,245 @@
+package twig
+
+import (
+ "fmt"
+ "strconv"
+ "strings"
+)
+
+func (p *Parser) parseMacro(parser *Parser) (Node, error) {
+ // Use debug logging if enabled
+ if IsDebugEnabled() && debugger.level >= DebugVerbose {
+ tokenIndex := parser.tokenIndex - 2
+ LogVerbose("Parsing macro, tokens available:")
+ for i := 0; i < 10 && tokenIndex+i < len(parser.tokens); i++ {
+ token := parser.tokens[tokenIndex+i]
+ LogVerbose(" Token %d: Type=%d, Value=%q, Line=%d", i, token.Type, token.Value, token.Line)
+ }
+ }
+
+ // Get the line number of the macro token
+ macroLine := parser.tokens[parser.tokenIndex-2].Line
+
+ // Get the macro name
+ if parser.tokenIndex >= len(parser.tokens) || parser.tokens[parser.tokenIndex].Type != TOKEN_NAME {
+ return nil, fmt.Errorf("expected macro name after macro keyword at line %d", macroLine)
+ }
+
+ // Special handling for incorrectly tokenized macro declarations
+ macroNameRaw := parser.tokens[parser.tokenIndex].Value
+ if IsDebugEnabled() && debugger.level >= DebugVerbose {
+ LogVerbose("Raw macro name: %s", macroNameRaw)
+ }
+
+ // Check if the name contains parentheses (incorrectly tokenized)
+ if strings.Contains(macroNameRaw, "(") {
+ // Extract the actual name before the parenthesis
+ parts := strings.SplitN(macroNameRaw, "(", 2)
+ if len(parts) == 2 {
+ macroName := parts[0]
+ paramStr := "(" + parts[1]
+ if IsDebugEnabled() && debugger.level >= DebugVerbose {
+ LogVerbose("Fixed macro name: %s", macroName)
+ LogVerbose("Parameter string: %s", paramStr)
+ }
+
+ // Parse parameters
+ var params []string
+ defaults := make(map[string]Node)
+
+ // Simple parameter parsing - split by comma
+ paramList := strings.TrimRight(paramStr[1:], ")")
+ if paramList != "" {
+ paramItems := strings.Split(paramList, ",")
+
+ for _, param := range paramItems {
+ param = strings.TrimSpace(param)
+
+ // Check for default value
+ if strings.Contains(param, "=") {
+ parts := strings.SplitN(param, "=", 2)
+ paramName := strings.TrimSpace(parts[0])
+ defaultValue := strings.TrimSpace(parts[1])
+
+ params = append(params, paramName)
+
+ // Handle quoted strings in default values
+ if (strings.HasPrefix(defaultValue, "'") && strings.HasSuffix(defaultValue, "'")) ||
+ (strings.HasPrefix(defaultValue, "\"") && strings.HasSuffix(defaultValue, "\"")) {
+ // Remove quotes
+ strValue := defaultValue[1 : len(defaultValue)-1]
+ defaults[paramName] = NewLiteralNode(strValue, macroLine)
+ } else if defaultValue == "true" {
+ defaults[paramName] = NewLiteralNode(true, macroLine)
+ } else if defaultValue == "false" {
+ defaults[paramName] = NewLiteralNode(false, macroLine)
+ } else if i, err := strconv.Atoi(defaultValue); err == nil {
+ defaults[paramName] = NewLiteralNode(i, macroLine)
+ } else {
+ // Fallback to string
+ defaults[paramName] = NewLiteralNode(defaultValue, macroLine)
+ }
+ } else {
+ params = append(params, param)
+ }
+ }
+ }
+
+ // Skip to the end of the token
+ parser.tokenIndex++
+
+ // Expect block end
+ if parser.tokenIndex >= len(parser.tokens) ||
+ (parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_END &&
+ parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_END_TRIM) {
+ return nil, fmt.Errorf("expected block end token after macro declaration at line %d", macroLine)
+ }
+ parser.tokenIndex++
+
+ // Parse the macro body
+ bodyNodes, err := parser.parseOuterTemplate()
+ if err != nil {
+ return nil, err
+ }
+
+ // Expect endmacro tag
+ if parser.tokenIndex+1 >= len(parser.tokens) ||
+ (parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_START &&
+ parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_START_TRIM) ||
+ parser.tokens[parser.tokenIndex+1].Type != TOKEN_NAME ||
+ parser.tokens[parser.tokenIndex+1].Value != "endmacro" {
+ return nil, fmt.Errorf("missing endmacro tag for macro '%s' at line %d",
+ macroName, macroLine)
+ }
+
+ // Skip {% endmacro %}
+ parser.tokenIndex += 2 // Skip {% endmacro
+
+ // Expect block end
+ if parser.tokenIndex >= len(parser.tokens) ||
+ (parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_END &&
+ parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_END_TRIM) {
+ return nil, fmt.Errorf("expected block end token after endmacro at line %d", parser.tokens[parser.tokenIndex].Line)
+ }
+ parser.tokenIndex++
+
+ // Create the macro node
+ if IsDebugEnabled() && debugger.level >= DebugVerbose {
+ LogVerbose("Creating MacroNode with %d parameters and %d defaults", len(params), len(defaults))
+ }
+ return NewMacroNode(macroName, params, defaults, bodyNodes, macroLine), nil
+ }
+ }
+
+ // Regular parsing path
+ macroName := parser.tokens[parser.tokenIndex].Value
+ if IsDebugEnabled() && debugger.level >= DebugVerbose {
+ LogVerbose("Macro name: %s", macroName)
+ }
+ parser.tokenIndex++
+
+ // Expect opening parenthesis for parameters
+ if parser.tokenIndex >= len(parser.tokens) ||
+ parser.tokens[parser.tokenIndex].Type != TOKEN_PUNCTUATION ||
+ parser.tokens[parser.tokenIndex].Value != "(" {
+ return nil, fmt.Errorf("expected '(' after macro name at line %d", macroLine)
+ }
+ parser.tokenIndex++
+
+ // Parse parameters
+ var params []string
+ defaults := make(map[string]Node)
+
+ // If we don't have a closing parenthesis immediately, we have parameters
+ if parser.tokenIndex < len(parser.tokens) &&
+ (parser.tokens[parser.tokenIndex].Type != TOKEN_PUNCTUATION ||
+ parser.tokens[parser.tokenIndex].Value != ")") {
+
+ for {
+ // Get parameter name
+ if parser.tokenIndex >= len(parser.tokens) || parser.tokens[parser.tokenIndex].Type != TOKEN_NAME {
+ return nil, fmt.Errorf("expected parameter name at line %d", macroLine)
+ }
+
+ paramName := parser.tokens[parser.tokenIndex].Value
+ fmt.Println("DEBUG: Parameter name:", paramName)
+ params = append(params, paramName)
+ parser.tokenIndex++
+
+ // Check for default value
+ if parser.tokenIndex < len(parser.tokens) &&
+ parser.tokens[parser.tokenIndex].Type == TOKEN_OPERATOR &&
+ parser.tokens[parser.tokenIndex].Value == "=" {
+ parser.tokenIndex++ // Skip =
+
+ // Parse default value expression
+ defaultExpr, err := parser.parseExpression()
+ if err != nil {
+ fmt.Println("DEBUG: Error parsing default value:", err)
+ return nil, err
+ }
+
+ defaults[paramName] = defaultExpr
+ }
+
+ // Check if we have more parameters
+ if parser.tokenIndex < len(parser.tokens) &&
+ parser.tokens[parser.tokenIndex].Type == TOKEN_PUNCTUATION &&
+ parser.tokens[parser.tokenIndex].Value == "," {
+ parser.tokenIndex++ // Skip comma
+ continue
+ }
+
+ break
+ }
+ }
+
+ // Expect closing parenthesis
+ if parser.tokenIndex >= len(parser.tokens) ||
+ parser.tokens[parser.tokenIndex].Type != TOKEN_PUNCTUATION ||
+ parser.tokens[parser.tokenIndex].Value != ")" {
+ return nil, fmt.Errorf("expected ')' after macro parameters at line %d", macroLine)
+ }
+ parser.tokenIndex++
+
+ // Expect block end
+ if parser.tokenIndex >= len(parser.tokens) ||
+ (parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_END &&
+ parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_END_TRIM) {
+ return nil, fmt.Errorf("expected block end token after macro declaration at line %d", macroLine)
+ }
+ parser.tokenIndex++
+
+ // Parse the macro body
+ bodyNodes, err := parser.parseOuterTemplate()
+ if err != nil {
+ return nil, err
+ }
+
+ // Expect endmacro tag
+ if parser.tokenIndex+1 >= len(parser.tokens) ||
+ (parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_START &&
+ parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_START_TRIM) ||
+ parser.tokens[parser.tokenIndex+1].Type != TOKEN_NAME ||
+ parser.tokens[parser.tokenIndex+1].Value != "endmacro" {
+ return nil, fmt.Errorf("missing endmacro tag for macro '%s' at line %d",
+ macroName, macroLine)
+ }
+
+ // Skip {% endmacro %}
+ parser.tokenIndex += 2 // Skip {% endmacro
+
+ // Expect block end
+ if parser.tokenIndex >= len(parser.tokens) ||
+ (parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_END &&
+ parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_END_TRIM) {
+ return nil, fmt.Errorf("expected block end token after endmacro at line %d", parser.tokens[parser.tokenIndex].Line)
+ }
+ parser.tokenIndex++
+
+ // Create the macro node
+ if IsDebugEnabled() && debugger.level >= DebugVerbose {
+ LogVerbose("Creating MacroNode with %d parameters and %d defaults", len(params), len(defaults))
+ }
+ return NewMacroNode(macroName, params, defaults, bodyNodes, macroLine), nil
+}
diff --git a/parse_set.go b/parse_set.go
new file mode 100644
index 0000000..efa752e
--- /dev/null
+++ b/parse_set.go
@@ -0,0 +1,61 @@
+package twig
+
+import "fmt"
+
+func (p *Parser) parseSet(parser *Parser) (Node, error) {
+ // Get the line number of the set token
+ setLine := parser.tokens[parser.tokenIndex-2].Line
+
+ // Get the variable name
+ if parser.tokenIndex >= len(parser.tokens) || parser.tokens[parser.tokenIndex].Type != TOKEN_NAME {
+ return nil, fmt.Errorf("expected variable name after set at line %d", setLine)
+ }
+
+ varName := parser.tokens[parser.tokenIndex].Value
+ parser.tokenIndex++
+
+ // Expect '='
+ if parser.tokenIndex >= len(parser.tokens) ||
+ parser.tokens[parser.tokenIndex].Type != TOKEN_OPERATOR ||
+ parser.tokens[parser.tokenIndex].Value != "=" {
+ return nil, fmt.Errorf("expected '=' after variable name at line %d", setLine)
+ }
+ parser.tokenIndex++
+
+ // Parse the value expression
+ valueExpr, err := parser.parseExpression()
+ if err != nil {
+ return nil, err
+ }
+
+ // For expressions like 5 + 10, we need to parse both sides and make a binary node
+ // Check if there's an operator after the first token
+ if parser.tokenIndex < len(parser.tokens) &&
+ parser.tokens[parser.tokenIndex].Type == TOKEN_OPERATOR &&
+ parser.tokens[parser.tokenIndex].Value != "=" {
+
+ // Get the operator
+ operator := parser.tokens[parser.tokenIndex].Value
+ parser.tokenIndex++
+
+ // Parse the right side
+ rightExpr, err := parser.parseExpression()
+ if err != nil {
+ return nil, err
+ }
+
+ // Create a binary node
+ valueExpr = NewBinaryNode(operator, valueExpr, rightExpr, setLine)
+ }
+
+ // Expect the block end token
+ if parser.tokenIndex >= len(parser.tokens) || parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_END {
+ return nil, fmt.Errorf("expected block end token after set expression at line %d", setLine)
+ }
+ parser.tokenIndex++
+
+ // Create the set node
+ setNode := NewSetNode(varName, valueExpr, setLine)
+
+ return setNode, nil
+}
diff --git a/parse_verbatim.go b/parse_verbatim.go
new file mode 100644
index 0000000..d670fd5
--- /dev/null
+++ b/parse_verbatim.go
@@ -0,0 +1,132 @@
+package twig
+
+import (
+ "fmt"
+ "strings"
+)
+
+// parseVerbatim parses a verbatim tag and its content
+func (p *Parser) parseVerbatim(parser *Parser) (Node, error) {
+ // Get the line number of the verbatim token
+ verbatimLine := parser.tokens[parser.tokenIndex-2].Line
+
+ // Expect the block end token
+ if parser.tokenIndex >= len(parser.tokens) || !isBlockEndToken(parser.tokens[parser.tokenIndex].Type) {
+ return nil, fmt.Errorf("expected block end after verbatim tag at line %d", verbatimLine)
+ }
+ parser.tokenIndex++
+
+ // Collect all content until we find the endverbatim tag
+ var contentBuilder strings.Builder
+
+ for parser.tokenIndex < len(parser.tokens) {
+ token := parser.tokens[parser.tokenIndex]
+
+ // Look for the endverbatim tag
+ if token.Type == TOKEN_BLOCK_START || token.Type == TOKEN_BLOCK_START_TRIM {
+ // Check if this is the endverbatim tag
+ if parser.tokenIndex+1 < len(parser.tokens) &&
+ parser.tokens[parser.tokenIndex+1].Type == TOKEN_NAME &&
+ parser.tokens[parser.tokenIndex+1].Value == "endverbatim" {
+
+ // Skip the block start and endverbatim name
+ parser.tokenIndex += 2 // Now at the endverbatim token
+
+ // Expect the block end token
+ if parser.tokenIndex >= len(parser.tokens) || !isBlockEndToken(parser.tokens[parser.tokenIndex].Type) {
+ return nil, fmt.Errorf("expected block end after endverbatim at line %d", token.Line)
+ }
+ parser.tokenIndex++ // Skip the block end token
+
+ // Create a verbatim node with the collected content
+ return NewVerbatimNode(contentBuilder.String(), verbatimLine), nil
+ }
+ }
+
+ // Add this token's content to our verbatim content
+ if token.Type == TOKEN_TEXT {
+ contentBuilder.WriteString(token.Value)
+ } else if token.Type == TOKEN_VAR_START || token.Type == TOKEN_VAR_START_TRIM {
+ // For variable tags, preserve them as literal text
+ contentBuilder.WriteString("{{")
+
+ // Skip variable start token and process until variable end
+ parser.tokenIndex++
+
+ // Process tokens until variable end
+ for parser.tokenIndex < len(parser.tokens) {
+ innerToken := parser.tokens[parser.tokenIndex]
+
+ if innerToken.Type == TOKEN_VAR_END || innerToken.Type == TOKEN_VAR_END_TRIM {
+ contentBuilder.WriteString("}}")
+ break
+ } else if innerToken.Type == TOKEN_NAME || innerToken.Type == TOKEN_STRING ||
+ innerToken.Type == TOKEN_NUMBER || innerToken.Type == TOKEN_OPERATOR ||
+ innerToken.Type == TOKEN_PUNCTUATION {
+ contentBuilder.WriteString(innerToken.Value)
+ }
+
+ parser.tokenIndex++
+ }
+ } else if token.Type == TOKEN_BLOCK_START || token.Type == TOKEN_BLOCK_START_TRIM {
+ // For block tags, preserve them as literal text
+ contentBuilder.WriteString("{%")
+
+ // Skip block start token and process until block end
+ parser.tokenIndex++
+
+ // Process tokens until block end
+ for parser.tokenIndex < len(parser.tokens) {
+ innerToken := parser.tokens[parser.tokenIndex]
+
+ if innerToken.Type == TOKEN_BLOCK_END || innerToken.Type == TOKEN_BLOCK_END_TRIM {
+ contentBuilder.WriteString("%}")
+ break
+ } else if innerToken.Type == TOKEN_NAME || innerToken.Type == TOKEN_STRING ||
+ innerToken.Type == TOKEN_NUMBER || innerToken.Type == TOKEN_OPERATOR ||
+ innerToken.Type == TOKEN_PUNCTUATION {
+ // If this is the first TOKEN_NAME in a block, add a space after it
+ if innerToken.Type == TOKEN_NAME && parser.tokenIndex > 0 &&
+ (parser.tokens[parser.tokenIndex-1].Type == TOKEN_BLOCK_START ||
+ parser.tokens[parser.tokenIndex-1].Type == TOKEN_BLOCK_START_TRIM) {
+ contentBuilder.WriteString(innerToken.Value + " ")
+ } else {
+ contentBuilder.WriteString(innerToken.Value)
+ }
+ }
+
+ parser.tokenIndex++
+ }
+ } else if token.Type == TOKEN_COMMENT_START {
+ // For comment tags, preserve them as literal text
+ contentBuilder.WriteString("{#")
+
+ // Skip comment start token and process until comment end
+ parser.tokenIndex++
+
+ // Process tokens until comment end
+ for parser.tokenIndex < len(parser.tokens) {
+ innerToken := parser.tokens[parser.tokenIndex]
+
+ if innerToken.Type == TOKEN_COMMENT_END {
+ contentBuilder.WriteString("#}")
+ break
+ } else if innerToken.Type == TOKEN_TEXT {
+ contentBuilder.WriteString(innerToken.Value)
+ }
+
+ parser.tokenIndex++
+ }
+ }
+
+ parser.tokenIndex++
+
+ // Check for end of tokens
+ if parser.tokenIndex >= len(parser.tokens) {
+ return nil, fmt.Errorf("unexpected end of template, unclosed verbatim tag at line %d", verbatimLine)
+ }
+ }
+
+ // If we get here, we never found the endverbatim tag
+ return nil, fmt.Errorf("unclosed verbatim tag at line %d", verbatimLine)
+}
diff --git a/parser.go b/parser.go
index 7f9daa0..35ae65c 100644
--- a/parser.go
+++ b/parser.go
@@ -115,257 +115,10 @@ func (p *Parser) initBlockHandlers() {
}
}
-// Tokenize the source into a list of tokens
-func (p *Parser) tokenize() ([]Token, error) {
- var tokens []Token
-
- for p.cursor < len(p.source) {
- // Check for variable syntax with whitespace control {{ }} or {{- -}}
- if p.matchString("{{-") {
- tokens = append(tokens, Token{Type: TOKEN_VAR_START_TRIM, Line: p.line})
- p.cursor += 3
- // Skip whitespace after opening braces
- for p.cursor < len(p.source) && isWhitespace(p.current()) {
- if p.current() == '\n' {
- p.line++
- }
- p.cursor++
- }
- continue
- } else if p.matchString("{{") {
- tokens = append(tokens, Token{Type: TOKEN_VAR_START, Line: p.line})
- p.cursor += 2
- // Skip whitespace after opening braces
- for p.cursor < len(p.source) && isWhitespace(p.current()) {
- if p.current() == '\n' {
- p.line++
- }
- p.cursor++
- }
- continue
- }
-
- if p.matchString("-}}") {
- tokens = append(tokens, Token{Type: TOKEN_VAR_END_TRIM, Line: p.line})
- p.cursor += 3
- continue
- } else if p.matchString("}}") {
- tokens = append(tokens, Token{Type: TOKEN_VAR_END, Line: p.line})
- p.cursor += 2
- continue
- }
-
- // Check for block syntax with whitespace control {% %} or {%- -%}
- if p.matchString("{%-") {
- tokens = append(tokens, Token{Type: TOKEN_BLOCK_START_TRIM, Line: p.line})
- p.cursor += 3
- // Skip whitespace after opening braces
- for p.cursor < len(p.source) && isWhitespace(p.current()) {
- if p.current() == '\n' {
- p.line++
- }
- p.cursor++
- }
- continue
- } else if p.matchString("{%") {
- tokens = append(tokens, Token{Type: TOKEN_BLOCK_START, Line: p.line})
- p.cursor += 2
- // Skip whitespace after opening braces
- for p.cursor < len(p.source) && isWhitespace(p.current()) {
- if p.current() == '\n' {
- p.line++
- }
- p.cursor++
- }
- continue
- }
-
- if p.matchString("-%}") {
- tokens = append(tokens, Token{Type: TOKEN_BLOCK_END_TRIM, Line: p.line})
- p.cursor += 3
- continue
- } else if p.matchString("%}") {
- tokens = append(tokens, Token{Type: TOKEN_BLOCK_END, Line: p.line})
- p.cursor += 2
- continue
- }
-
- // Check for comment syntax {# #}
- if p.matchString("{#") {
- tokens = append(tokens, Token{Type: TOKEN_COMMENT_START, Line: p.line})
- p.cursor += 2
- // Skip whitespace after opening braces
- for p.cursor < len(p.source) && isWhitespace(p.current()) {
- if p.current() == '\n' {
- p.line++
- }
- p.cursor++
- }
- continue
- }
-
- if p.matchString("#}") {
- tokens = append(tokens, Token{Type: TOKEN_COMMENT_END, Line: p.line})
- p.cursor += 2
- continue
- }
-
- // Check for string literals
- if p.current() == '"' || p.current() == '\'' {
- quote := p.current()
- startLine := p.line
- p.cursor++
-
- var sb strings.Builder
-
- for p.cursor < len(p.source) && p.current() != quote {
- // Handle escape sequences properly
- if p.current() == '\\' && p.cursor+1 < len(p.source) {
- p.cursor++ // Skip the backslash
- // Just collect the escaped character
- sb.WriteByte(p.current())
- p.cursor++
- continue
- }
-
- // Just add the character
- sb.WriteByte(p.current())
-
- if p.current() == '\n' {
- p.line++
- }
- p.cursor++
- }
-
- if p.cursor >= len(p.source) {
- return nil, fmt.Errorf("unterminated string at line %d", startLine)
- }
-
- tokens = append(tokens, Token{Type: TOKEN_STRING, Value: sb.String(), Line: startLine})
- p.cursor++ // Skip closing quote
- continue
- }
-
- // Check for numbers
- if isDigit(p.current()) {
- start := p.cursor
- for p.cursor < len(p.source) && (isDigit(p.current()) || p.current() == '.') {
- p.cursor++
- }
-
- value := p.source[start:p.cursor]
- tokens = append(tokens, Token{Type: TOKEN_NUMBER, Value: value, Line: p.line})
- continue
- }
-
- // Check for identifiers/names
- if isAlpha(p.current()) {
- start := p.cursor
- for p.cursor < len(p.source) && isAlphaNumeric(p.current()) {
- p.cursor++
- }
-
- value := p.source[start:p.cursor]
- tokens = append(tokens, Token{Type: TOKEN_NAME, Value: value, Line: p.line})
- continue
- }
-
- // Check for operators
- if isOperator(p.current()) {
- start := p.cursor
- for p.cursor < len(p.source) && isOperator(p.current()) {
- p.cursor++
- }
-
- value := p.source[start:p.cursor]
- tokens = append(tokens, Token{Type: TOKEN_OPERATOR, Value: value, Line: p.line})
- continue
- }
-
- // Check for punctuation
- if isPunctuation(p.current()) {
- tokens = append(tokens, Token{
- Type: TOKEN_PUNCTUATION,
- Value: string(p.current()),
- Line: p.line,
- })
- p.cursor++
- continue
- }
-
- // Check for whitespace and newlines
- if isWhitespace(p.current()) {
- if p.current() == '\n' {
- p.line++
- }
- p.cursor++
- continue
- }
-
- // Handle plain text - this is the entire HTML content
- // We should collect all text up to the next twig tag start
- start := p.cursor
- for p.cursor < len(p.source) &&
- !p.matchString("{{-") && !p.matchString("{{") &&
- !p.matchString("-}}") && !p.matchString("}}") &&
- !p.matchString("{%-") && !p.matchString("{%") &&
- !p.matchString("-%}") && !p.matchString("%}") &&
- !p.matchString("{#") && !p.matchString("#}") {
- if p.current() == '\n' {
- p.line++
- }
- p.cursor++
- }
-
- if start != p.cursor {
- // Get the text segment as a single token, preserving ALL characters
- // This is critical for HTML parsing - we do not want to tokenize HTML!
- value := p.source[start:p.cursor]
- tokens = append(tokens, Token{Type: TOKEN_TEXT, Value: value, Line: p.line})
- }
- }
-
- tokens = append(tokens, Token{Type: TOKEN_EOF, Line: p.line})
- return tokens, nil
-}
-
-// Helper methods for tokenization
-func (p *Parser) current() byte {
- if p.cursor >= len(p.source) {
- return 0
- }
- return p.source[p.cursor]
-}
-
-// Helper function to check if a token is any kind of block end token (regular or trim variant)
-func isBlockEndToken(tokenType int) bool {
- return tokenType == TOKEN_BLOCK_END || tokenType == TOKEN_BLOCK_END_TRIM
-}
-
-// Helper function to check if a token is any kind of variable end token (regular or trim variant)
-func isVarEndToken(tokenType int) bool {
- return tokenType == TOKEN_VAR_END || tokenType == TOKEN_VAR_END_TRIM
-}
-
-func (p *Parser) matchString(s string) bool {
- if p.cursor+len(s) > len(p.source) {
- return false
- }
- return p.source[p.cursor:p.cursor+len(s)] == s
-}
-
func isDigit(c byte) bool {
return c >= '0' && c <= '9'
}
-func isAlpha(c byte) bool {
- return (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || c == '_'
-}
-
-func isAlphaNumeric(c byte) bool {
- return isAlpha(c) || isDigit(c)
-}
-
func isOperator(c byte) bool {
return strings.ContainsRune("+-*/=<>!&~^%", rune(c))
}
@@ -1396,1054 +1149,6 @@ func (p *Parser) parseBinaryExpression(left Node) (Node, error) {
return binaryNode, nil
}
-// Parse if statement
-func (p *Parser) parseIf(parser *Parser) (Node, error) {
- // Get the line number of the if token
- ifLine := parser.tokens[parser.tokenIndex-2].Line
-
- // Parse the condition expression
- condition, err := parser.parseExpression()
- if err != nil {
- return nil, err
- }
-
- // Expect the block end token (either regular or whitespace-trimming variant)
- if parser.tokenIndex >= len(parser.tokens) ||
- (parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_END &&
- parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_END_TRIM) {
- return nil, fmt.Errorf("expected block end after if condition at line %d", ifLine)
- }
- parser.tokenIndex++
-
- // Parse the if body (statements between if and endif/else/elseif)
- ifBody, err := parser.parseOuterTemplate()
- if err != nil {
- return nil, err
- }
-
- // Initialize conditions and bodies arrays with the initial if condition and body
- conditions := []Node{condition}
- bodies := [][]Node{ifBody}
- var elseBody []Node
-
- // Keep track of whether we've seen an else block
- var hasElseBlock bool
-
- // Process subsequent tags (elseif, else, endif)
- for {
- // We expect a block start token for elseif, else, or endif
- if parser.tokenIndex >= len(parser.tokens) || parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_START {
- return nil, fmt.Errorf("unexpected end of template, expected endif at line %d", ifLine)
- }
- parser.tokenIndex++
-
- // We expect a name token (elseif, else, or endif)
- if parser.tokenIndex >= len(parser.tokens) || parser.tokens[parser.tokenIndex].Type != TOKEN_NAME {
- return nil, fmt.Errorf("expected block name at line %d", parser.tokens[parser.tokenIndex-1].Line)
- }
-
- // Get the tag name
- blockName := parser.tokens[parser.tokenIndex].Value
- blockLine := parser.tokens[parser.tokenIndex].Line
- parser.tokenIndex++
-
- // Process based on the tag type
- if blockName == "elseif" {
- // Check if we've already seen an else block - elseif can't come after else
- if hasElseBlock {
- return nil, fmt.Errorf("unexpected elseif after else at line %d", blockLine)
- }
-
- // Handle elseif condition
- elseifCondition, err := parser.parseExpression()
- if err != nil {
- return nil, err
- }
-
- // Expect block end token
- if parser.tokenIndex >= len(parser.tokens) || !isBlockEndToken(parser.tokens[parser.tokenIndex].Type) {
- return nil, fmt.Errorf("expected block end after elseif condition at line %d", blockLine)
- }
- parser.tokenIndex++
-
- // Parse the elseif body
- elseifBody, err := parser.parseOuterTemplate()
- if err != nil {
- return nil, err
- }
-
- // Add condition and body to our arrays
- conditions = append(conditions, elseifCondition)
- bodies = append(bodies, elseifBody)
-
- // Continue checking for more elseif/else/endif tags
- } else if blockName == "else" {
- // Check if we've already seen an else block - can't have multiple else blocks
- if hasElseBlock {
- return nil, fmt.Errorf("multiple else blocks found at line %d", blockLine)
- }
-
- // Mark that we've seen an else block
- hasElseBlock = true
-
- // Expect block end token
- if parser.tokenIndex >= len(parser.tokens) || !isBlockEndToken(parser.tokens[parser.tokenIndex].Type) {
- return nil, fmt.Errorf("expected block end after else tag at line %d", blockLine)
- }
- parser.tokenIndex++
-
- // Parse the else body
- elseBody, err = parser.parseOuterTemplate()
- if err != nil {
- return nil, err
- }
-
- // After else, we need to find endif next (handled in next iteration)
- } else if blockName == "endif" {
- // Expect block end token
- if parser.tokenIndex >= len(parser.tokens) || !isBlockEndToken(parser.tokens[parser.tokenIndex].Type) {
- return nil, fmt.Errorf("expected block end after endif at line %d", blockLine)
- }
- parser.tokenIndex++
-
- // We found the endif, we're done
- break
- } else {
- return nil, fmt.Errorf("expected elseif, else, or endif, got %s at line %d", blockName, blockLine)
- }
- }
-
- // Create and return the if node
- ifNode := &IfNode{
- conditions: conditions,
- bodies: bodies,
- elseBranch: elseBody,
- line: ifLine,
- }
-
- return ifNode, nil
-}
-
-func (p *Parser) parseFor(parser *Parser) (Node, error) {
- // Get the line number of the for token
- forLine := parser.tokens[parser.tokenIndex-2].Line
-
- // Parse the loop variable name(s)
- if parser.tokenIndex >= len(parser.tokens) || parser.tokens[parser.tokenIndex].Type != TOKEN_NAME {
- return nil, fmt.Errorf("expected variable name after for at line %d", forLine)
- }
-
- // Get value variable name
- valueVar := parser.tokens[parser.tokenIndex].Value
- parser.tokenIndex++
-
- var keyVar string
-
- // Check for key, value syntax
- if parser.tokenIndex < len(parser.tokens) &&
- parser.tokens[parser.tokenIndex].Type == TOKEN_PUNCTUATION &&
- parser.tokens[parser.tokenIndex].Value == "," {
-
- // Move past the comma
- parser.tokenIndex++
-
- // Now valueVar is actually the key, and we need to get the value
- keyVar = valueVar
-
- if parser.tokenIndex >= len(parser.tokens) || parser.tokens[parser.tokenIndex].Type != TOKEN_NAME {
- return nil, fmt.Errorf("expected value variable name after comma at line %d", forLine)
- }
-
- valueVar = parser.tokens[parser.tokenIndex].Value
- parser.tokenIndex++
- }
-
- // Expect 'in' keyword
- if parser.tokenIndex >= len(parser.tokens) ||
- parser.tokens[parser.tokenIndex].Type != TOKEN_NAME ||
- parser.tokens[parser.tokenIndex].Value != "in" {
- return nil, fmt.Errorf("expected 'in' keyword after variable name at line %d", forLine)
- }
- parser.tokenIndex++
-
- // Parse the sequence expression
- sequence, err := parser.parseExpression()
- if err != nil {
- return nil, err
- }
-
- // Check for filter operator (|) - needed for cases where filter detection might be missed
- if IsDebugEnabled() {
- LogDebug("For loop sequence expression type: %T", sequence)
- }
-
- // Expect the block end token (either regular or trim variant)
- if parser.tokenIndex >= len(parser.tokens) || !isBlockEndToken(parser.tokens[parser.tokenIndex].Type) {
- return nil, fmt.Errorf("expected block end after for statement at line %d", forLine)
- }
- parser.tokenIndex++
-
- // Parse the for loop body
- loopBody, err := parser.parseOuterTemplate()
- if err != nil {
- return nil, err
- }
-
- var elseBody []Node
-
- // Check for else or endfor
- if parser.tokenIndex < len(parser.tokens) && parser.tokens[parser.tokenIndex].Type == TOKEN_BLOCK_START {
- parser.tokenIndex++
-
- if parser.tokenIndex >= len(parser.tokens) || parser.tokens[parser.tokenIndex].Type != TOKEN_NAME {
- return nil, fmt.Errorf("expected block name at line %d", parser.tokens[parser.tokenIndex-1].Line)
- }
-
- // Check if this is an else block
- if parser.tokens[parser.tokenIndex].Value == "else" {
- parser.tokenIndex++
-
- // Expect the block end token
- if parser.tokenIndex >= len(parser.tokens) || parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_END {
- return nil, fmt.Errorf("expected block end after else at line %d", parser.tokens[parser.tokenIndex-1].Line)
- }
- parser.tokenIndex++
-
- // Parse the else body
- elseBody, err = parser.parseOuterTemplate()
- if err != nil {
- return nil, err
- }
-
- // Now expect the endfor
- if parser.tokenIndex >= len(parser.tokens) || parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_START {
- return nil, fmt.Errorf("expected endfor block at line %d", parser.tokens[parser.tokenIndex-1].Line)
- }
- parser.tokenIndex++
-
- if parser.tokenIndex >= len(parser.tokens) || parser.tokens[parser.tokenIndex].Type != TOKEN_NAME {
- return nil, fmt.Errorf("expected endfor at line %d", parser.tokens[parser.tokenIndex-1].Line)
- }
-
- if parser.tokens[parser.tokenIndex].Value != "endfor" {
- return nil, fmt.Errorf("expected endfor, got %s at line %d", parser.tokens[parser.tokenIndex].Value, parser.tokens[parser.tokenIndex].Line)
- }
- parser.tokenIndex++
- } else if parser.tokens[parser.tokenIndex].Value == "endfor" {
- parser.tokenIndex++
- } else {
- return nil, fmt.Errorf("expected else or endfor, got %s at line %d", parser.tokens[parser.tokenIndex].Value, parser.tokens[parser.tokenIndex].Line)
- }
-
- // Expect the final block end token
- if parser.tokenIndex >= len(parser.tokens) || parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_END {
- return nil, fmt.Errorf("expected block end after endfor at line %d", parser.tokens[parser.tokenIndex-1].Line)
- }
- parser.tokenIndex++
- } else {
- return nil, fmt.Errorf("unexpected end of template, expected endfor at line %d", forLine)
- }
-
- // Create the for node
- forNode := &ForNode{
- keyVar: keyVar,
- valueVar: valueVar,
- sequence: sequence,
- body: loopBody,
- elseBranch: elseBody,
- line: forLine,
- }
-
- return forNode, nil
-}
-
-func (p *Parser) parseBlock(parser *Parser) (Node, error) {
- // Get the line number of the block token
- blockLine := parser.tokens[parser.tokenIndex-2].Line
-
- // Get the block name
- if parser.tokenIndex >= len(parser.tokens) || parser.tokens[parser.tokenIndex].Type != TOKEN_NAME {
- return nil, fmt.Errorf("expected block name at line %d", blockLine)
- }
-
- blockName := parser.tokens[parser.tokenIndex].Value
- parser.tokenIndex++
-
- // Expect the block end token
- if parser.tokenIndex >= len(parser.tokens) || parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_END {
- return nil, fmt.Errorf("expected block end token after block name at line %d", blockLine)
- }
- parser.tokenIndex++
-
- // Parse the block body
- blockBody, err := parser.parseOuterTemplate()
- if err != nil {
- return nil, err
- }
-
- // Expect endblock tag
- if parser.tokenIndex >= len(parser.tokens) || parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_START {
- return nil, fmt.Errorf("expected endblock tag at line %d", blockLine)
- }
- parser.tokenIndex++
-
- if parser.tokenIndex >= len(parser.tokens) || parser.tokens[parser.tokenIndex].Type != TOKEN_NAME ||
- parser.tokens[parser.tokenIndex].Value != "endblock" {
- return nil, fmt.Errorf("expected endblock at line %d", parser.tokens[parser.tokenIndex-1].Line)
- }
- parser.tokenIndex++
-
- // Check for optional block name in endblock
- if parser.tokenIndex < len(parser.tokens) && parser.tokens[parser.tokenIndex].Type == TOKEN_NAME {
- endBlockName := parser.tokens[parser.tokenIndex].Value
- if endBlockName != blockName {
- return nil, fmt.Errorf("mismatched block name, expected %s but got %s at line %d",
- blockName, endBlockName, parser.tokens[parser.tokenIndex].Line)
- }
- parser.tokenIndex++
- }
-
- // Expect the final block end token
- if parser.tokenIndex >= len(parser.tokens) || parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_END {
- return nil, fmt.Errorf("expected block end token after endblock at line %d", parser.tokens[parser.tokenIndex-1].Line)
- }
- parser.tokenIndex++
-
- // Create the block node
- blockNode := &BlockNode{
- name: blockName,
- body: blockBody,
- line: blockLine,
- }
-
- return blockNode, nil
-}
-
-func (p *Parser) parseExtends(parser *Parser) (Node, error) {
- // Get the line number of the extends token
- extendsLine := parser.tokens[parser.tokenIndex-2].Line
-
- // Get the parent template expression
- parentExpr, err := parser.parseExpression()
- if err != nil {
- return nil, err
- }
-
- // Expect the block end token
- if parser.tokenIndex >= len(parser.tokens) || parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_END {
- return nil, fmt.Errorf("expected block end token after extends at line %d", extendsLine)
- }
- parser.tokenIndex++
-
- // Create the extends node
- extendsNode := &ExtendsNode{
- parent: parentExpr,
- line: extendsLine,
- }
-
- return extendsNode, nil
-}
-
-func (p *Parser) parseInclude(parser *Parser) (Node, error) {
- // Get the line number of the include token
- includeLine := parser.tokens[parser.tokenIndex-2].Line
-
- // Get the template expression
- templateExpr, err := parser.parseExpression()
- if err != nil {
- return nil, err
- }
-
- // Check for optional parameters
- var variables map[string]Node
- var ignoreMissing bool
- var onlyContext bool
-
- // Look for 'with', 'ignore missing', or 'only'
- for parser.tokenIndex < len(parser.tokens) &&
- parser.tokens[parser.tokenIndex].Type == TOKEN_NAME {
-
- keyword := parser.tokens[parser.tokenIndex].Value
- parser.tokenIndex++
-
- switch keyword {
- case "with":
- // Parse variables as a hash
- if variables == nil {
- variables = make(map[string]Node)
- }
-
- // Check for opening brace
- if parser.tokenIndex < len(parser.tokens) &&
- parser.tokens[parser.tokenIndex].Type == TOKEN_PUNCTUATION &&
- parser.tokens[parser.tokenIndex].Value == "{" {
- parser.tokenIndex++ // Skip opening brace
-
- // Parse key-value pairs
- for {
- // If we see a closing brace, we're done
- if parser.tokenIndex < len(parser.tokens) &&
- parser.tokens[parser.tokenIndex].Type == TOKEN_PUNCTUATION &&
- parser.tokens[parser.tokenIndex].Value == "}" {
- parser.tokenIndex++ // Skip closing brace
- break
- }
-
- // Get the variable name - can be string literal or name token
- var varName string
- if parser.tokenIndex < len(parser.tokens) && parser.tokens[parser.tokenIndex].Type == TOKEN_STRING {
- // It's a quoted string key
- varName = parser.tokens[parser.tokenIndex].Value
- parser.tokenIndex++
- } else if parser.tokenIndex < len(parser.tokens) && parser.tokens[parser.tokenIndex].Type == TOKEN_NAME {
- // It's an unquoted key
- varName = parser.tokens[parser.tokenIndex].Value
- parser.tokenIndex++
- } else {
- return nil, fmt.Errorf("expected variable name or string at line %d", includeLine)
- }
-
- // Expect colon or equals
- if parser.tokenIndex >= len(parser.tokens) ||
- ((parser.tokens[parser.tokenIndex].Type != TOKEN_PUNCTUATION &&
- parser.tokens[parser.tokenIndex].Value != ":") &&
- (parser.tokens[parser.tokenIndex].Type != TOKEN_OPERATOR &&
- parser.tokens[parser.tokenIndex].Value != "=")) {
- return nil, fmt.Errorf("expected ':' or '=' after variable name at line %d", includeLine)
- }
- parser.tokenIndex++ // Skip : or =
-
- // Parse the value expression
- varExpr, err := parser.parseExpression()
- if err != nil {
- return nil, err
- }
-
- // Add to variables map
- variables[varName] = varExpr
-
- // If there's a comma, skip it
- if parser.tokenIndex < len(parser.tokens) &&
- parser.tokens[parser.tokenIndex].Type == TOKEN_PUNCTUATION &&
- parser.tokens[parser.tokenIndex].Value == "," {
- parser.tokenIndex++
- }
-
- // If we see whitespace, skip it
- for parser.tokenIndex < len(parser.tokens) &&
- parser.tokens[parser.tokenIndex].Type == TOKEN_TEXT &&
- strings.TrimSpace(parser.tokens[parser.tokenIndex].Value) == "" {
- parser.tokenIndex++
- }
- }
- } else {
- // If there's no opening brace, expect name-value pairs in the old format
- for parser.tokenIndex < len(parser.tokens) &&
- parser.tokens[parser.tokenIndex].Type == TOKEN_NAME {
-
- // Get the variable name
- varName := parser.tokens[parser.tokenIndex].Value
- parser.tokenIndex++
-
- // Expect '='
- if parser.tokenIndex >= len(parser.tokens) ||
- parser.tokens[parser.tokenIndex].Type != TOKEN_OPERATOR ||
- parser.tokens[parser.tokenIndex].Value != "=" {
- return nil, fmt.Errorf("expected '=' after variable name at line %d", includeLine)
- }
- parser.tokenIndex++
-
- // Parse the value expression
- varExpr, err := parser.parseExpression()
- if err != nil {
- return nil, err
- }
-
- // Add to variables map
- variables[varName] = varExpr
-
- // If there's a comma, skip it
- if parser.tokenIndex < len(parser.tokens) &&
- parser.tokens[parser.tokenIndex].Type == TOKEN_PUNCTUATION &&
- parser.tokens[parser.tokenIndex].Value == "," {
- parser.tokenIndex++
- } else {
- break
- }
- }
- }
-
- case "ignore":
- // Check for 'missing' keyword
- if parser.tokenIndex >= len(parser.tokens) ||
- parser.tokens[parser.tokenIndex].Type != TOKEN_NAME ||
- parser.tokens[parser.tokenIndex].Value != "missing" {
- return nil, fmt.Errorf("expected 'missing' after 'ignore' at line %d", includeLine)
- }
- parser.tokenIndex++
-
- ignoreMissing = true
-
- case "only":
- onlyContext = true
-
- default:
- return nil, fmt.Errorf("unexpected keyword '%s' in include at line %d", keyword, includeLine)
- }
- }
-
- // Expect the block end token
- if parser.tokenIndex >= len(parser.tokens) ||
- (parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_END &&
- parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_END_TRIM) {
- return nil, fmt.Errorf("expected block end token after include at line %d, found token type %d with value '%s'",
- includeLine,
- parser.tokens[parser.tokenIndex].Type,
- parser.tokens[parser.tokenIndex].Value)
- }
- parser.tokenIndex++
-
- // Create the include node
- includeNode := &IncludeNode{
- template: templateExpr,
- variables: variables,
- ignoreMissing: ignoreMissing,
- only: onlyContext,
- line: includeLine,
- }
-
- return includeNode, nil
-}
-
-func (p *Parser) parseSet(parser *Parser) (Node, error) {
- // Get the line number of the set token
- setLine := parser.tokens[parser.tokenIndex-2].Line
-
- // Get the variable name
- if parser.tokenIndex >= len(parser.tokens) || parser.tokens[parser.tokenIndex].Type != TOKEN_NAME {
- return nil, fmt.Errorf("expected variable name after set at line %d", setLine)
- }
-
- varName := parser.tokens[parser.tokenIndex].Value
- parser.tokenIndex++
-
- // Expect '='
- if parser.tokenIndex >= len(parser.tokens) ||
- parser.tokens[parser.tokenIndex].Type != TOKEN_OPERATOR ||
- parser.tokens[parser.tokenIndex].Value != "=" {
- return nil, fmt.Errorf("expected '=' after variable name at line %d", setLine)
- }
- parser.tokenIndex++
-
- // Parse the value expression
- valueExpr, err := parser.parseExpression()
- if err != nil {
- return nil, err
- }
-
- // For expressions like 5 + 10, we need to parse both sides and make a binary node
- // Check if there's an operator after the first token
- if parser.tokenIndex < len(parser.tokens) &&
- parser.tokens[parser.tokenIndex].Type == TOKEN_OPERATOR &&
- parser.tokens[parser.tokenIndex].Value != "=" {
-
- // Get the operator
- operator := parser.tokens[parser.tokenIndex].Value
- parser.tokenIndex++
-
- // Parse the right side
- rightExpr, err := parser.parseExpression()
- if err != nil {
- return nil, err
- }
-
- // Create a binary node
- valueExpr = NewBinaryNode(operator, valueExpr, rightExpr, setLine)
- }
-
- // Expect the block end token
- if parser.tokenIndex >= len(parser.tokens) || parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_END {
- return nil, fmt.Errorf("expected block end token after set expression at line %d", setLine)
- }
- parser.tokenIndex++
-
- // Create the set node
- setNode := NewSetNode(varName, valueExpr, setLine)
-
- return setNode, nil
-}
-
-func (p *Parser) parseDo(parser *Parser) (Node, error) {
- // Get the line number for error reporting
- doLine := parser.tokens[parser.tokenIndex-2].Line
-
- // Check for special case: assignment expressions
- // These need to be handled specially since they're not normal expressions
- if parser.tokenIndex < len(parser.tokens) &&
- parser.tokens[parser.tokenIndex].Type == TOKEN_NAME {
-
- varName := parser.tokens[parser.tokenIndex].Value
- parser.tokenIndex++
-
- if parser.tokenIndex < len(parser.tokens) &&
- parser.tokens[parser.tokenIndex].Type == TOKEN_OPERATOR &&
- parser.tokens[parser.tokenIndex].Value == "=" {
-
- // Skip the equals sign
- parser.tokenIndex++
-
- // Parse the right side expression
- expr, err := parser.parseExpression()
- if err != nil {
- return nil, fmt.Errorf("error parsing expression in do assignment at line %d: %w", doLine, err)
- }
-
- // Make sure we have the closing tag
- if parser.tokenIndex >= len(parser.tokens) || parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_END {
- return nil, fmt.Errorf("expecting end of do tag at line %d", doLine)
- }
- parser.tokenIndex++
-
- // Validate the variable name - it should not be a numeric literal
- if _, err := strconv.Atoi(varName); err == nil {
- return nil, fmt.Errorf("invalid variable name %q in do tag assignment at line %d", varName, doLine)
- }
-
- // Create a SetNode instead of DoNode for assignments
- return &SetNode{
- name: varName,
- value: expr,
- line: doLine,
- }, nil
- }
-
- // If it wasn't an assignment, backtrack to parse it as a normal expression
- parser.tokenIndex -= 1
- }
-
- // Parse the expression to be executed
- expr, err := parser.parseExpression()
- if err != nil {
- return nil, fmt.Errorf("error parsing expression in do tag at line %d: %w", doLine, err)
- }
-
- // Make sure we have the closing tag
- if parser.tokenIndex >= len(parser.tokens) || parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_END {
- return nil, fmt.Errorf("expecting end of do tag at line %d", doLine)
- }
- parser.tokenIndex++
-
- // Create and return the DoNode
- return NewDoNode(expr, doLine), nil
-}
-
-func (p *Parser) parseMacro(parser *Parser) (Node, error) {
- // Use debug logging if enabled
- if IsDebugEnabled() && debugger.level >= DebugVerbose {
- tokenIndex := parser.tokenIndex - 2
- LogVerbose("Parsing macro, tokens available:")
- for i := 0; i < 10 && tokenIndex+i < len(parser.tokens); i++ {
- token := parser.tokens[tokenIndex+i]
- LogVerbose(" Token %d: Type=%d, Value=%q, Line=%d", i, token.Type, token.Value, token.Line)
- }
- }
-
- // Get the line number of the macro token
- macroLine := parser.tokens[parser.tokenIndex-2].Line
-
- // Get the macro name
- if parser.tokenIndex >= len(parser.tokens) || parser.tokens[parser.tokenIndex].Type != TOKEN_NAME {
- return nil, fmt.Errorf("expected macro name after macro keyword at line %d", macroLine)
- }
-
- // Special handling for incorrectly tokenized macro declarations
- macroNameRaw := parser.tokens[parser.tokenIndex].Value
- if IsDebugEnabled() && debugger.level >= DebugVerbose {
- LogVerbose("Raw macro name: %s", macroNameRaw)
- }
-
- // Check if the name contains parentheses (incorrectly tokenized)
- if strings.Contains(macroNameRaw, "(") {
- // Extract the actual name before the parenthesis
- parts := strings.SplitN(macroNameRaw, "(", 2)
- if len(parts) == 2 {
- macroName := parts[0]
- paramStr := "(" + parts[1]
- if IsDebugEnabled() && debugger.level >= DebugVerbose {
- LogVerbose("Fixed macro name: %s", macroName)
- LogVerbose("Parameter string: %s", paramStr)
- }
-
- // Parse parameters
- var params []string
- defaults := make(map[string]Node)
-
- // Simple parameter parsing - split by comma
- paramList := strings.TrimRight(paramStr[1:], ")")
- if paramList != "" {
- paramItems := strings.Split(paramList, ",")
-
- for _, param := range paramItems {
- param = strings.TrimSpace(param)
-
- // Check for default value
- if strings.Contains(param, "=") {
- parts := strings.SplitN(param, "=", 2)
- paramName := strings.TrimSpace(parts[0])
- defaultValue := strings.TrimSpace(parts[1])
-
- params = append(params, paramName)
-
- // Handle quoted strings in default values
- if (strings.HasPrefix(defaultValue, "'") && strings.HasSuffix(defaultValue, "'")) ||
- (strings.HasPrefix(defaultValue, "\"") && strings.HasSuffix(defaultValue, "\"")) {
- // Remove quotes
- strValue := defaultValue[1 : len(defaultValue)-1]
- defaults[paramName] = NewLiteralNode(strValue, macroLine)
- } else if defaultValue == "true" {
- defaults[paramName] = NewLiteralNode(true, macroLine)
- } else if defaultValue == "false" {
- defaults[paramName] = NewLiteralNode(false, macroLine)
- } else if i, err := strconv.Atoi(defaultValue); err == nil {
- defaults[paramName] = NewLiteralNode(i, macroLine)
- } else {
- // Fallback to string
- defaults[paramName] = NewLiteralNode(defaultValue, macroLine)
- }
- } else {
- params = append(params, param)
- }
- }
- }
-
- // Skip to the end of the token
- parser.tokenIndex++
-
- // Expect block end
- if parser.tokenIndex >= len(parser.tokens) ||
- (parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_END &&
- parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_END_TRIM) {
- return nil, fmt.Errorf("expected block end token after macro declaration at line %d", macroLine)
- }
- parser.tokenIndex++
-
- // Parse the macro body
- bodyNodes, err := parser.parseOuterTemplate()
- if err != nil {
- return nil, err
- }
-
- // Expect endmacro tag
- if parser.tokenIndex+1 >= len(parser.tokens) ||
- (parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_START &&
- parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_START_TRIM) ||
- parser.tokens[parser.tokenIndex+1].Type != TOKEN_NAME ||
- parser.tokens[parser.tokenIndex+1].Value != "endmacro" {
- return nil, fmt.Errorf("missing endmacro tag for macro '%s' at line %d",
- macroName, macroLine)
- }
-
- // Skip {% endmacro %}
- parser.tokenIndex += 2 // Skip {% endmacro
-
- // Expect block end
- if parser.tokenIndex >= len(parser.tokens) ||
- (parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_END &&
- parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_END_TRIM) {
- return nil, fmt.Errorf("expected block end token after endmacro at line %d", parser.tokens[parser.tokenIndex].Line)
- }
- parser.tokenIndex++
-
- // Create the macro node
- if IsDebugEnabled() && debugger.level >= DebugVerbose {
- LogVerbose("Creating MacroNode with %d parameters and %d defaults", len(params), len(defaults))
- }
- return NewMacroNode(macroName, params, defaults, bodyNodes, macroLine), nil
- }
- }
-
- // Regular parsing path
- macroName := parser.tokens[parser.tokenIndex].Value
- if IsDebugEnabled() && debugger.level >= DebugVerbose {
- LogVerbose("Macro name: %s", macroName)
- }
- parser.tokenIndex++
-
- // Expect opening parenthesis for parameters
- if parser.tokenIndex >= len(parser.tokens) ||
- parser.tokens[parser.tokenIndex].Type != TOKEN_PUNCTUATION ||
- parser.tokens[parser.tokenIndex].Value != "(" {
- return nil, fmt.Errorf("expected '(' after macro name at line %d", macroLine)
- }
- parser.tokenIndex++
-
- // Parse parameters
- var params []string
- defaults := make(map[string]Node)
-
- // If we don't have a closing parenthesis immediately, we have parameters
- if parser.tokenIndex < len(parser.tokens) &&
- (parser.tokens[parser.tokenIndex].Type != TOKEN_PUNCTUATION ||
- parser.tokens[parser.tokenIndex].Value != ")") {
-
- for {
- // Get parameter name
- if parser.tokenIndex >= len(parser.tokens) || parser.tokens[parser.tokenIndex].Type != TOKEN_NAME {
- return nil, fmt.Errorf("expected parameter name at line %d", macroLine)
- }
-
- paramName := parser.tokens[parser.tokenIndex].Value
- fmt.Println("DEBUG: Parameter name:", paramName)
- params = append(params, paramName)
- parser.tokenIndex++
-
- // Check for default value
- if parser.tokenIndex < len(parser.tokens) &&
- parser.tokens[parser.tokenIndex].Type == TOKEN_OPERATOR &&
- parser.tokens[parser.tokenIndex].Value == "=" {
- parser.tokenIndex++ // Skip =
-
- // Parse default value expression
- defaultExpr, err := parser.parseExpression()
- if err != nil {
- fmt.Println("DEBUG: Error parsing default value:", err)
- return nil, err
- }
-
- defaults[paramName] = defaultExpr
- }
-
- // Check if we have more parameters
- if parser.tokenIndex < len(parser.tokens) &&
- parser.tokens[parser.tokenIndex].Type == TOKEN_PUNCTUATION &&
- parser.tokens[parser.tokenIndex].Value == "," {
- parser.tokenIndex++ // Skip comma
- continue
- }
-
- break
- }
- }
-
- // Expect closing parenthesis
- if parser.tokenIndex >= len(parser.tokens) ||
- parser.tokens[parser.tokenIndex].Type != TOKEN_PUNCTUATION ||
- parser.tokens[parser.tokenIndex].Value != ")" {
- return nil, fmt.Errorf("expected ')' after macro parameters at line %d", macroLine)
- }
- parser.tokenIndex++
-
- // Expect block end
- if parser.tokenIndex >= len(parser.tokens) ||
- (parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_END &&
- parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_END_TRIM) {
- return nil, fmt.Errorf("expected block end token after macro declaration at line %d", macroLine)
- }
- parser.tokenIndex++
-
- // Parse the macro body
- bodyNodes, err := parser.parseOuterTemplate()
- if err != nil {
- return nil, err
- }
-
- // Expect endmacro tag
- if parser.tokenIndex+1 >= len(parser.tokens) ||
- (parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_START &&
- parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_START_TRIM) ||
- parser.tokens[parser.tokenIndex+1].Type != TOKEN_NAME ||
- parser.tokens[parser.tokenIndex+1].Value != "endmacro" {
- return nil, fmt.Errorf("missing endmacro tag for macro '%s' at line %d",
- macroName, macroLine)
- }
-
- // Skip {% endmacro %}
- parser.tokenIndex += 2 // Skip {% endmacro
-
- // Expect block end
- if parser.tokenIndex >= len(parser.tokens) ||
- (parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_END &&
- parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_END_TRIM) {
- return nil, fmt.Errorf("expected block end token after endmacro at line %d", parser.tokens[parser.tokenIndex].Line)
- }
- parser.tokenIndex++
-
- // Create the macro node
- if IsDebugEnabled() && debugger.level >= DebugVerbose {
- LogVerbose("Creating MacroNode with %d parameters and %d defaults", len(params), len(defaults))
- }
- return NewMacroNode(macroName, params, defaults, bodyNodes, macroLine), nil
-}
-
-func (p *Parser) parseImport(parser *Parser) (Node, error) {
- // Use debug logging if enabled
- if IsDebugEnabled() && debugger.level >= DebugVerbose {
- tokenIndex := parser.tokenIndex - 2
- LogVerbose("Parsing import, tokens available:")
- for i := 0; i < 10 && tokenIndex+i < len(parser.tokens); i++ {
- token := parser.tokens[tokenIndex+i]
- LogVerbose(" Token %d: Type=%d, Value=%q, Line=%d", i, token.Type, token.Value, token.Line)
- }
- }
-
- // Get the line number of the import token
- importLine := parser.tokens[parser.tokenIndex-2].Line
-
- // Check for incorrectly tokenized import syntax
- if parser.tokenIndex < len(parser.tokens) &&
- parser.tokens[parser.tokenIndex].Type == TOKEN_NAME &&
- strings.Contains(parser.tokens[parser.tokenIndex].Value, " as ") {
-
- // Special handling for combined syntax like "path.twig as alias"
- parts := strings.SplitN(parser.tokens[parser.tokenIndex].Value, " as ", 2)
- if len(parts) == 2 {
- templatePath := strings.TrimSpace(parts[0])
- alias := strings.TrimSpace(parts[1])
-
- if IsDebugEnabled() && debugger.level >= DebugVerbose {
- LogVerbose("Found combined import syntax: template=%q, alias=%q", templatePath, alias)
- }
-
- // Create an expression node for the template path
- var templateExpr Node
- if strings.HasPrefix(templatePath, "\"") && strings.HasSuffix(templatePath, "\"") {
- // It's already a quoted string
- templateExpr = NewLiteralNode(templatePath[1:len(templatePath)-1], importLine)
- } else {
- // Create a string literal node
- templateExpr = NewLiteralNode(templatePath, importLine)
- }
-
- // Skip to end of token
- parser.tokenIndex++
-
- // Expect block end
- if parser.tokenIndex >= len(parser.tokens) ||
- (parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_END &&
- parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_END_TRIM) {
- return nil, fmt.Errorf("expected block end token after import statement at line %d", importLine)
- }
- parser.tokenIndex++
-
- // Create import node
- return NewImportNode(templateExpr, alias, importLine), nil
- }
- }
-
- // Standard parsing path
- // Get the template to import
- templateExpr, err := parser.parseExpression()
- if err != nil {
- return nil, err
- }
-
- // Expect 'as' keyword
- if parser.tokenIndex >= len(parser.tokens) ||
- parser.tokens[parser.tokenIndex].Type != TOKEN_NAME ||
- parser.tokens[parser.tokenIndex].Value != "as" {
- return nil, fmt.Errorf("expected 'as' after template path at line %d", importLine)
- }
- parser.tokenIndex++
-
- // Get the alias name
- if parser.tokenIndex >= len(parser.tokens) || parser.tokens[parser.tokenIndex].Type != TOKEN_NAME {
- return nil, fmt.Errorf("expected identifier after 'as' at line %d", importLine)
- }
-
- alias := parser.tokens[parser.tokenIndex].Value
- parser.tokenIndex++
-
- // Expect block end
- if parser.tokenIndex >= len(parser.tokens) ||
- (parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_END &&
- parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_END_TRIM) {
- return nil, fmt.Errorf("expected block end token after import statement at line %d", importLine)
- }
- parser.tokenIndex++
-
- // Create import node
- return NewImportNode(templateExpr, alias, importLine), nil
-}
-
-func (p *Parser) parseFrom(parser *Parser) (Node, error) {
- // Get the line number of the from token
- fromLine := parser.tokens[parser.tokenIndex-2].Line
-
- // Get the template to import from
- templateExpr, err := parser.parseExpression()
- if err != nil {
- return nil, err
- }
-
- // Expect 'import' keyword
- if parser.tokenIndex >= len(parser.tokens) ||
- parser.tokens[parser.tokenIndex].Type != TOKEN_NAME ||
- parser.tokens[parser.tokenIndex].Value != "import" {
- return nil, fmt.Errorf("expected 'import' after template path at line %d", fromLine)
- }
- parser.tokenIndex++
-
- // Parse the imported items
- var macros []string
- aliases := make(map[string]string)
-
- // We need at least one macro to import
- if parser.tokenIndex >= len(parser.tokens) || parser.tokens[parser.tokenIndex].Type != TOKEN_NAME {
- return nil, fmt.Errorf("expected at least one identifier after 'import' at line %d", fromLine)
- }
-
- for parser.tokenIndex < len(parser.tokens) && parser.tokens[parser.tokenIndex].Type == TOKEN_NAME {
- // Get macro name
- macroName := parser.tokens[parser.tokenIndex].Value
- parser.tokenIndex++
-
- // Check for 'as' keyword for aliasing
- if parser.tokenIndex < len(parser.tokens) &&
- parser.tokens[parser.tokenIndex].Type == TOKEN_NAME &&
- parser.tokens[parser.tokenIndex].Value == "as" {
- parser.tokenIndex++ // Skip 'as'
-
- // Get alias name
- if parser.tokenIndex >= len(parser.tokens) || parser.tokens[parser.tokenIndex].Type != TOKEN_NAME {
- return nil, fmt.Errorf("expected identifier after 'as' at line %d", fromLine)
- }
-
- aliasName := parser.tokens[parser.tokenIndex].Value
- aliases[macroName] = aliasName
- parser.tokenIndex++
- } else {
- // No alias, just add to macros list
- macros = append(macros, macroName)
- }
-
- // Check for comma to separate items
- if parser.tokenIndex < len(parser.tokens) &&
- parser.tokens[parser.tokenIndex].Type == TOKEN_PUNCTUATION &&
- parser.tokens[parser.tokenIndex].Value == "," {
- parser.tokenIndex++ // Skip comma
-
- // Expect another identifier after comma
- if parser.tokenIndex >= len(parser.tokens) || parser.tokens[parser.tokenIndex].Type != TOKEN_NAME {
- return nil, fmt.Errorf("expected identifier after ',' at line %d", fromLine)
- }
- } else {
- // End of imports
- break
- }
- }
-
- // Expect block end
- if parser.tokenIndex >= len(parser.tokens) ||
- (parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_END &&
- parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_END_TRIM) {
- return nil, fmt.Errorf("expected block end token after from import statement at line %d", fromLine)
- }
- parser.tokenIndex++
-
- // Create from import node
- return NewFromImportNode(templateExpr, macros, aliases, fromLine), nil
-}
-
// parseEndTag handles closing tags like endif, endfor, endblock, etc.
// These tags should only be encountered inside their respective block parsing methods,
// so if we reach here directly, it's an error.
@@ -2508,129 +1213,3 @@ func (p *Parser) HtmlPreservingTokenize() ([]Token, error) {
func (p *Parser) SetSource(source string) {
p.source = source
}
-
-// parseVerbatim parses a verbatim tag and its content
-func (p *Parser) parseVerbatim(parser *Parser) (Node, error) {
- // Get the line number of the verbatim token
- verbatimLine := parser.tokens[parser.tokenIndex-2].Line
-
- // Expect the block end token
- if parser.tokenIndex >= len(parser.tokens) || !isBlockEndToken(parser.tokens[parser.tokenIndex].Type) {
- return nil, fmt.Errorf("expected block end after verbatim tag at line %d", verbatimLine)
- }
- parser.tokenIndex++
-
- // Collect all content until we find the endverbatim tag
- var contentBuilder strings.Builder
-
- for parser.tokenIndex < len(parser.tokens) {
- token := parser.tokens[parser.tokenIndex]
-
- // Look for the endverbatim tag
- if token.Type == TOKEN_BLOCK_START || token.Type == TOKEN_BLOCK_START_TRIM {
- // Check if this is the endverbatim tag
- if parser.tokenIndex+1 < len(parser.tokens) &&
- parser.tokens[parser.tokenIndex+1].Type == TOKEN_NAME &&
- parser.tokens[parser.tokenIndex+1].Value == "endverbatim" {
-
- // Skip the block start and endverbatim name
- parser.tokenIndex += 2 // Now at the endverbatim token
-
- // Expect the block end token
- if parser.tokenIndex >= len(parser.tokens) || !isBlockEndToken(parser.tokens[parser.tokenIndex].Type) {
- return nil, fmt.Errorf("expected block end after endverbatim at line %d", token.Line)
- }
- parser.tokenIndex++ // Skip the block end token
-
- // Create a verbatim node with the collected content
- return NewVerbatimNode(contentBuilder.String(), verbatimLine), nil
- }
- }
-
- // Add this token's content to our verbatim content
- if token.Type == TOKEN_TEXT {
- contentBuilder.WriteString(token.Value)
- } else if token.Type == TOKEN_VAR_START || token.Type == TOKEN_VAR_START_TRIM {
- // For variable tags, preserve them as literal text
- contentBuilder.WriteString("{{")
-
- // Skip variable start token and process until variable end
- parser.tokenIndex++
-
- // Process tokens until variable end
- for parser.tokenIndex < len(parser.tokens) {
- innerToken := parser.tokens[parser.tokenIndex]
-
- if innerToken.Type == TOKEN_VAR_END || innerToken.Type == TOKEN_VAR_END_TRIM {
- contentBuilder.WriteString("}}")
- break
- } else if innerToken.Type == TOKEN_NAME || innerToken.Type == TOKEN_STRING ||
- innerToken.Type == TOKEN_NUMBER || innerToken.Type == TOKEN_OPERATOR ||
- innerToken.Type == TOKEN_PUNCTUATION {
- contentBuilder.WriteString(innerToken.Value)
- }
-
- parser.tokenIndex++
- }
- } else if token.Type == TOKEN_BLOCK_START || token.Type == TOKEN_BLOCK_START_TRIM {
- // For block tags, preserve them as literal text
- contentBuilder.WriteString("{%")
-
- // Skip block start token and process until block end
- parser.tokenIndex++
-
- // Process tokens until block end
- for parser.tokenIndex < len(parser.tokens) {
- innerToken := parser.tokens[parser.tokenIndex]
-
- if innerToken.Type == TOKEN_BLOCK_END || innerToken.Type == TOKEN_BLOCK_END_TRIM {
- contentBuilder.WriteString("%}")
- break
- } else if innerToken.Type == TOKEN_NAME || innerToken.Type == TOKEN_STRING ||
- innerToken.Type == TOKEN_NUMBER || innerToken.Type == TOKEN_OPERATOR ||
- innerToken.Type == TOKEN_PUNCTUATION {
- // If this is the first TOKEN_NAME in a block, add a space after it
- if innerToken.Type == TOKEN_NAME && parser.tokenIndex > 0 &&
- (parser.tokens[parser.tokenIndex-1].Type == TOKEN_BLOCK_START ||
- parser.tokens[parser.tokenIndex-1].Type == TOKEN_BLOCK_START_TRIM) {
- contentBuilder.WriteString(innerToken.Value + " ")
- } else {
- contentBuilder.WriteString(innerToken.Value)
- }
- }
-
- parser.tokenIndex++
- }
- } else if token.Type == TOKEN_COMMENT_START {
- // For comment tags, preserve them as literal text
- contentBuilder.WriteString("{#")
-
- // Skip comment start token and process until comment end
- parser.tokenIndex++
-
- // Process tokens until comment end
- for parser.tokenIndex < len(parser.tokens) {
- innerToken := parser.tokens[parser.tokenIndex]
-
- if innerToken.Type == TOKEN_COMMENT_END {
- contentBuilder.WriteString("#}")
- break
- } else if innerToken.Type == TOKEN_TEXT {
- contentBuilder.WriteString(innerToken.Value)
- }
-
- parser.tokenIndex++
- }
- }
-
- parser.tokenIndex++
-
- // Check for end of tokens
- if parser.tokenIndex >= len(parser.tokens) {
- return nil, fmt.Errorf("unexpected end of template, unclosed verbatim tag at line %d", verbatimLine)
- }
- }
-
- // If we get here, we never found the endverbatim tag
- return nil, fmt.Errorf("unclosed verbatim tag at line %d", verbatimLine)
-}
diff --git a/parser_include.go b/parser_include.go
new file mode 100644
index 0000000..8071f5d
--- /dev/null
+++ b/parser_include.go
@@ -0,0 +1,177 @@
+package twig
+
+import (
+ "fmt"
+ "strings"
+)
+
+func (p *Parser) parseInclude(parser *Parser) (Node, error) {
+ // Get the line number of the include token
+ includeLine := parser.tokens[parser.tokenIndex-2].Line
+
+ // Get the template expression
+ templateExpr, err := parser.parseExpression()
+ if err != nil {
+ return nil, err
+ }
+
+ // Check for optional parameters
+ var variables map[string]Node
+ var ignoreMissing bool
+ var onlyContext bool
+
+ // Look for 'with', 'ignore missing', or 'only'
+ for parser.tokenIndex < len(parser.tokens) &&
+ parser.tokens[parser.tokenIndex].Type == TOKEN_NAME {
+
+ keyword := parser.tokens[parser.tokenIndex].Value
+ parser.tokenIndex++
+
+ switch keyword {
+ case "with":
+ // Parse variables as a hash
+ if variables == nil {
+ variables = make(map[string]Node)
+ }
+
+ // Check for opening brace
+ if parser.tokenIndex < len(parser.tokens) &&
+ parser.tokens[parser.tokenIndex].Type == TOKEN_PUNCTUATION &&
+ parser.tokens[parser.tokenIndex].Value == "{" {
+ parser.tokenIndex++ // Skip opening brace
+
+ // Parse key-value pairs
+ for {
+ // If we see a closing brace, we're done
+ if parser.tokenIndex < len(parser.tokens) &&
+ parser.tokens[parser.tokenIndex].Type == TOKEN_PUNCTUATION &&
+ parser.tokens[parser.tokenIndex].Value == "}" {
+ parser.tokenIndex++ // Skip closing brace
+ break
+ }
+
+ // Get the variable name - can be string literal or name token
+ var varName string
+ if parser.tokenIndex < len(parser.tokens) && parser.tokens[parser.tokenIndex].Type == TOKEN_STRING {
+ // It's a quoted string key
+ varName = parser.tokens[parser.tokenIndex].Value
+ parser.tokenIndex++
+ } else if parser.tokenIndex < len(parser.tokens) && parser.tokens[parser.tokenIndex].Type == TOKEN_NAME {
+ // It's an unquoted key
+ varName = parser.tokens[parser.tokenIndex].Value
+ parser.tokenIndex++
+ } else {
+ return nil, fmt.Errorf("expected variable name or string at line %d", includeLine)
+ }
+
+ // Expect colon or equals
+ if parser.tokenIndex >= len(parser.tokens) ||
+ ((parser.tokens[parser.tokenIndex].Type != TOKEN_PUNCTUATION &&
+ parser.tokens[parser.tokenIndex].Value != ":") &&
+ (parser.tokens[parser.tokenIndex].Type != TOKEN_OPERATOR &&
+ parser.tokens[parser.tokenIndex].Value != "=")) {
+ return nil, fmt.Errorf("expected ':' or '=' after variable name at line %d", includeLine)
+ }
+ parser.tokenIndex++ // Skip : or =
+
+ // Parse the value expression
+ varExpr, err := parser.parseExpression()
+ if err != nil {
+ return nil, err
+ }
+
+ // Add to variables map
+ variables[varName] = varExpr
+
+ // If there's a comma, skip it
+ if parser.tokenIndex < len(parser.tokens) &&
+ parser.tokens[parser.tokenIndex].Type == TOKEN_PUNCTUATION &&
+ parser.tokens[parser.tokenIndex].Value == "," {
+ parser.tokenIndex++
+ }
+
+ // If we see whitespace, skip it
+ for parser.tokenIndex < len(parser.tokens) &&
+ parser.tokens[parser.tokenIndex].Type == TOKEN_TEXT &&
+ strings.TrimSpace(parser.tokens[parser.tokenIndex].Value) == "" {
+ parser.tokenIndex++
+ }
+ }
+ } else {
+ // If there's no opening brace, expect name-value pairs in the old format
+ for parser.tokenIndex < len(parser.tokens) &&
+ parser.tokens[parser.tokenIndex].Type == TOKEN_NAME {
+
+ // Get the variable name
+ varName := parser.tokens[parser.tokenIndex].Value
+ parser.tokenIndex++
+
+ // Expect '='
+ if parser.tokenIndex >= len(parser.tokens) ||
+ parser.tokens[parser.tokenIndex].Type != TOKEN_OPERATOR ||
+ parser.tokens[parser.tokenIndex].Value != "=" {
+ return nil, fmt.Errorf("expected '=' after variable name at line %d", includeLine)
+ }
+ parser.tokenIndex++
+
+ // Parse the value expression
+ varExpr, err := parser.parseExpression()
+ if err != nil {
+ return nil, err
+ }
+
+ // Add to variables map
+ variables[varName] = varExpr
+
+ // If there's a comma, skip it
+ if parser.tokenIndex < len(parser.tokens) &&
+ parser.tokens[parser.tokenIndex].Type == TOKEN_PUNCTUATION &&
+ parser.tokens[parser.tokenIndex].Value == "," {
+ parser.tokenIndex++
+ } else {
+ break
+ }
+ }
+ }
+
+ case "ignore":
+ // Check for 'missing' keyword
+ if parser.tokenIndex >= len(parser.tokens) ||
+ parser.tokens[parser.tokenIndex].Type != TOKEN_NAME ||
+ parser.tokens[parser.tokenIndex].Value != "missing" {
+ return nil, fmt.Errorf("expected 'missing' after 'ignore' at line %d", includeLine)
+ }
+ parser.tokenIndex++
+
+ ignoreMissing = true
+
+ case "only":
+ onlyContext = true
+
+ default:
+ return nil, fmt.Errorf("unexpected keyword '%s' in include at line %d", keyword, includeLine)
+ }
+ }
+
+ // Expect the block end token
+ if parser.tokenIndex >= len(parser.tokens) ||
+ (parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_END &&
+ parser.tokens[parser.tokenIndex].Type != TOKEN_BLOCK_END_TRIM) {
+ return nil, fmt.Errorf("expected block end token after include at line %d, found token type %d with value '%s'",
+ includeLine,
+ parser.tokens[parser.tokenIndex].Type,
+ parser.tokens[parser.tokenIndex].Value)
+ }
+ parser.tokenIndex++
+
+ // Create the include node
+ includeNode := &IncludeNode{
+ template: templateExpr,
+ variables: variables,
+ ignoreMissing: ignoreMissing,
+ only: onlyContext,
+ line: includeLine,
+ }
+
+ return includeNode, nil
+}