123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267 |
- package pongo2
- import (
- "bytes"
- "fmt"
- "strings"
- )
- type INode interface {
- Execute(*ExecutionContext, *bytes.Buffer) *Error
- }
- type IEvaluator interface {
- INode
- GetPositionToken() *Token
- Evaluate(*ExecutionContext) (*Value, *Error)
- FilterApplied(name string) bool
- }
- // The parser provides you a comprehensive and easy tool to
- // work with the template document and arguments provided by
- // the user for your custom tag.
- //
- // The parser works on a token list which will be provided by pongo2.
- // A token is a unit you can work with. Tokens are either of type identifier,
- // string, number, keyword, HTML or symbol.
- //
- // (See Token's documentation for more about tokens)
- type Parser struct {
- name string
- idx int
- tokens []*Token
- last_token *Token
- // if the parser parses a template document, here will be
- // a reference to it (needed to access the template through Tags)
- template *Template
- }
- // Creates a new parser to parse tokens.
- // Used inside pongo2 to parse documents and to provide an easy-to-use
- // parser for tag authors
- func newParser(name string, tokens []*Token, template *Template) *Parser {
- p := &Parser{
- name: name,
- tokens: tokens,
- template: template,
- }
- if len(tokens) > 0 {
- p.last_token = tokens[len(tokens)-1]
- }
- return p
- }
- // Consume one token. It will be gone forever.
- func (p *Parser) Consume() {
- p.ConsumeN(1)
- }
- // Consume N tokens. They will be gone forever.
- func (p *Parser) ConsumeN(count int) {
- p.idx += count
- }
- // Returns the current token.
- func (p *Parser) Current() *Token {
- return p.Get(p.idx)
- }
- // Returns the CURRENT token if the given type matches.
- // Consumes this token on success.
- func (p *Parser) MatchType(typ TokenType) *Token {
- if t := p.PeekType(typ); t != nil {
- p.Consume()
- return t
- }
- return nil
- }
- // Returns the CURRENT token if the given type AND value matches.
- // Consumes this token on success.
- func (p *Parser) Match(typ TokenType, val string) *Token {
- if t := p.Peek(typ, val); t != nil {
- p.Consume()
- return t
- }
- return nil
- }
- // Returns the CURRENT token if the given type AND *one* of
- // the given values matches.
- // Consumes this token on success.
- func (p *Parser) MatchOne(typ TokenType, vals ...string) *Token {
- for _, val := range vals {
- if t := p.Peek(typ, val); t != nil {
- p.Consume()
- return t
- }
- }
- return nil
- }
- // Returns the CURRENT token if the given type matches.
- // It DOES NOT consume the token.
- func (p *Parser) PeekType(typ TokenType) *Token {
- return p.PeekTypeN(0, typ)
- }
- // Returns the CURRENT token if the given type AND value matches.
- // It DOES NOT consume the token.
- func (p *Parser) Peek(typ TokenType, val string) *Token {
- return p.PeekN(0, typ, val)
- }
- // Returns the CURRENT token if the given type AND *one* of
- // the given values matches.
- // It DOES NOT consume the token.
- func (p *Parser) PeekOne(typ TokenType, vals ...string) *Token {
- for _, v := range vals {
- t := p.PeekN(0, typ, v)
- if t != nil {
- return t
- }
- }
- return nil
- }
- // Returns the tokens[current position + shift] token if the
- // given type AND value matches for that token.
- // DOES NOT consume the token.
- func (p *Parser) PeekN(shift int, typ TokenType, val string) *Token {
- t := p.Get(p.idx + shift)
- if t != nil {
- if t.Typ == typ && t.Val == val {
- return t
- }
- }
- return nil
- }
- // Returns the tokens[current position + shift] token if the given type matches.
- // DOES NOT consume the token for that token.
- func (p *Parser) PeekTypeN(shift int, typ TokenType) *Token {
- t := p.Get(p.idx + shift)
- if t != nil {
- if t.Typ == typ {
- return t
- }
- }
- return nil
- }
- // Returns the UNCONSUMED token count.
- func (p *Parser) Remaining() int {
- return len(p.tokens) - p.idx
- }
- // Returns the total token count.
- func (p *Parser) Count() int {
- return len(p.tokens)
- }
- // Returns tokens[i] or NIL (if i >= len(tokens))
- func (p *Parser) Get(i int) *Token {
- if i < len(p.tokens) {
- return p.tokens[i]
- }
- return nil
- }
- // Returns tokens[current-position + shift] or NIL
- // (if (current-position + i) >= len(tokens))
- func (p *Parser) GetR(shift int) *Token {
- i := p.idx + shift
- return p.Get(i)
- }
- // Produces a nice error message and returns an error-object.
- // The 'token'-argument is optional. If provided, it will take
- // the token's position information. If not provided, it will
- // automatically use the CURRENT token's position information.
- func (p *Parser) Error(msg string, token *Token) *Error {
- if token == nil {
- // Set current token
- token = p.Current()
- if token == nil {
- // Set to last token
- if len(p.tokens) > 0 {
- token = p.tokens[len(p.tokens)-1]
- }
- }
- }
- var line, col int
- if token != nil {
- line = token.Line
- col = token.Col
- }
- return &Error{
- Template: p.template,
- Filename: p.name,
- Sender: "parser",
- Line: line,
- Column: col,
- Token: token,
- ErrorMsg: msg,
- }
- }
- // Wraps all nodes between starting tag and "{% endtag %}" and provides
- // one simple interface to execute the wrapped nodes.
- // It returns a parser to process provided arguments to the tag.
- func (p *Parser) WrapUntilTag(names ...string) (*NodeWrapper, *Parser, *Error) {
- wrapper := &NodeWrapper{}
- tagArgs := make([]*Token, 0)
- for p.Remaining() > 0 {
- // New tag, check whether we have to stop wrapping here
- if p.Peek(TokenSymbol, "{%") != nil {
- tag_ident := p.PeekTypeN(1, TokenIdentifier)
- if tag_ident != nil {
- // We've found a (!) end-tag
- found := false
- for _, n := range names {
- if tag_ident.Val == n {
- found = true
- break
- }
- }
- // We only process the tag if we've found an end tag
- if found {
- // Okay, endtag found.
- p.ConsumeN(2) // '{%' tagname
- for {
- if p.Match(TokenSymbol, "%}") != nil {
- // Okay, end the wrapping here
- wrapper.Endtag = tag_ident.Val
- return wrapper, newParser(p.template.name, tagArgs, p.template), nil
- } else {
- t := p.Current()
- p.Consume()
- if t == nil {
- return nil, nil, p.Error("Unexpected EOF.", p.last_token)
- }
- tagArgs = append(tagArgs, t)
- }
- }
- }
- }
- }
- // Otherwise process next element to be wrapped
- node, err := p.parseDocElement()
- if err != nil {
- return nil, nil, err
- }
- wrapper.nodes = append(wrapper.nodes, node)
- }
- return nil, nil, p.Error(fmt.Sprintf("Unexpected EOF, expected tag %s.", strings.Join(names, " or ")),
- p.last_token)
- }
|