最近复习设计模式
拜读谭勇德的<<设计模式就该这样学>>
本系列笔记拟采用golang练习之node
解释器模式(Interpreter Pattern)指给定一门语言, 定义它的文法的一种表示, 并定义一个解释器 ,该解释器使用该表示来解释语言中的句子。 解释器模式是一种按照规定的文法(语法)进行解析的模式, 属于行为型设计模式。 (摘自 谭勇德 <<设计模式就该这样学>>)
SQL: select + FIELD_LIST + from TABLE_NAME + (where BOOL_EXPRESSION)? FIELD_LIST: * | COLUMN_LIST COLUMN_LIST: COLUMN_NAME + (,COLUMN_NAME)* COLUMN_NAME: IDENTIFIER IDENTIFIER: [_a-zA-Z] + [_a-zA-Z0-9]* TABLE_NAME: IDENTIFIER BOOL_EXPRESSION: STRING_FIELD = STRING_LITERAL | STRING_FIELD <> STRING_LITERAL | STRING_FIELD like STRING_LITERAL | STRING_FIELD not like STRING_LITERAL | INT_FIELD = INT_LITERAL | INT_FIELD <> INT_LITERAL | INT_FIELD > INT_LITERAL | INT_FIELD >= INT_LITERAL | INT_FIELD < INT_LITERAL | INT_FIELD <= INT_LITERAL | ( + BOOL_EXPRESSION + ) | BOOL_EXPRESSION and BOOL_EXPRESSION | BOOL_EXPRESSION or BOOL_EXPRESSION STRING_FIELD: IDENTIFIER INT_FIELD: IDENTIFIER STRING_LITERAL: \' + [^"]* + \' INT_LIETRAL: [1-9] + [0-9]*
interpreter_pattern_test.go, 模拟销售订单的保存与SQL查询golang
package behavioral_patterns import ( "learning/gooop/behavioral_patterns/interpreter" "testing" ) func Test_Interpreter(t *testing.T) { service := interpreter.MockSaleOrderService service.Save(interpreter.NewSaleOrder(1, "张三", "广州", "电视", 10)) service.Save(interpreter.NewSaleOrder(11, "张三三", "广州", "电视", 11)) service.Save(interpreter.NewSaleOrder(2, "李四", "深圳", "冰箱", 20)) service.Save(interpreter.NewSaleOrder(22, "李四四", "深圳", "冰箱", 21)) service.Save(interpreter.NewSaleOrder(3, "王五", "东莞", "空调", 30)) service.Save(interpreter.NewSaleOrder(33, "王五五", "东莞", "空调", 31)) db := interpreter.MockDatabase e,rows := db.Query("select * from sale_order where (city='广州' or city='深圳') and quantity>10") if e != nil { t.Error(e) } else { for _,it := range rows { t.Logf("%s", it) } } }
$ go test -v interpreter_pattern_test.go === RUN Test_Interpreter interpreter_pattern_test.go:24: id=2, customer=李四, city=深圳, product=冰箱, quantity=20 interpreter_pattern_test.go:24: id=22, customer=李四四, city=深圳, product=冰箱, quantity=21 interpreter_pattern_test.go:24: id=11, customer=张三三, city=广州, product=电视, quantity=11 --- PASS: Test_Interpreter (0.00s) PASS ok command-line-arguments 0.002s
数据库接口sql
package interpreter type IDatabase interface { Register(table IDataTable) Query(sql string) (error, []IDataRow) }
数据表接口数据库
package interpreter type IDataTable interface { Name() string Filter(filter IRowFilter) []IDataRow }
数据行接口express
package interpreter type IDataRow interface { GetField(field string) (error,IDataField) }
数据字段接口设计模式
package interpreter import ( "errors" ) type IDataField interface { Name() string DataType() DataTypes GetString() (error,string) GetInt() (error, int) } type DataTypes int const StringDataType DataTypes = 1 const IntDataType DataTypes = 2 type tIntField struct { name string value int } func newIntField(name string, value int) *tIntField { return &tIntField{ name,value, } } func (me *tIntField) Name() string { return me.name } func (me *tIntField) DataType() DataTypes { return IntDataType } func (me *tIntField) GetString() (error,string) { return errors.New("not implemented"), "" } func (me *tIntField) GetInt() (error,int) { return nil, me.value } type tStringField struct { name string value string } func newStringField(name string, value string) *tStringField { return &tStringField{ name,value, } } func (me *tStringField) Name() string { return me.name } func (me *tStringField) DataType() DataTypes { return StringDataType } func (me *tStringField) GetString() (error,string) { return nil, me.value } func (me *tStringField) GetInt() (error,int) { return errors.New("not implemented"), 0 }
数据行过滤器接口数组
package interpreter type IRowFilter interface { Filter(row IDataRow) bool }
静态为true/false的行过滤器缓存
package interpreter type tEmptyRowFilter struct { accept bool } func newEmptyRowFilter(accept bool) IRowFilter { return &tEmptyRowFilter{ accept: accept, } } func (me *tEmptyRowFilter) Filter(row IDataRow) bool { return me.accept }
基于布尔表达式的行过滤器app
package interpreter type tExpressionRowFilter struct { expression IBoolExpression } func newExpressionRowFilter(e IBoolExpression) IRowFilter { return &tExpressionRowFilter{ expression: e, } } func (me *tExpressionRowFilter) Filter(row IDataRow) bool { return me.expression.Eval(row) }
SQL记号枚举ide
package tokens type Tokens string const Select Tokens = "select" const Star Tokens = "*" const Comma Tokens = "," const From Tokens = "from" const Where Tokens = "where" const Identifier Tokens = "identifier" const LB Tokens = "(" const RB Tokens = ")" const And Tokens = "and" const OR Tokens = "or" const Equal Tokens = "=" const NotEqual Tokens = "<>" const Greater Tokens = ">" const GreaterEqual Tokens = ">=" const Less Tokens = "<" const LessEqual Tokens = "<=" const Like Tokens = "like" const NotLike Tokens = "not like" const StringLiteral Tokens = "string_literal" const IntLiteral Tokens = "int_literal"
SQL语法树节点枚举
package nodes type Nodes int const TokenNode Nodes = 1 const ExpressionNode Nodes = 2
词法分析辅助类
package interpreter type tChars struct { } func newCharsLib() *tChars { return &tChars{} } func (me *tChars) IsSpace(it rune) bool { switch it { case ' ': return true case '\t': return true case '\r': return true case '\n': return true } return false } func (me *tChars) Is09(it rune) bool { return it >= '0' && it <= '9' } func (me *tChars) Is19(it rune) bool { return it >= '1' && it <= '9' } func (me *tChars) IsLetter(it rune) bool { return (it >= 'a' && it <= 'z') || (it >= 'A' && it <= 'Z') } func (me *tChars) IsUnderscore(it rune) bool { return it == '_' } func (me *tChars) IsLB(it rune) bool { return it == '(' } func (me *tChars) IsRB(it rune) bool { return it == ')' } func (me *tChars) IsChar(it rune, args... rune) bool { for _,v := range args { if v == it { return true } } return false } var chars = newCharsLib()
SQL词法分析器接口
package interpreter type ISQLParser interface { Parse(sql string) (error,*ParseResult) } type ParseResult struct { Fields []string Table string RowFilter IRowFilter } func newParseResult() *ParseResult { return &ParseResult{ make([]string, 0), "", nil, } }
SQL(子集)词法分析实现
package interpreter import ( "errors" "fmt" "learning/gooop/behavioral_patterns/interpreter/tokens" ) type tLexer struct { chars []rune count int pos int tokens []*tTokenNode } func newLexer(sql string) *tLexer { chars := []rune(sql) return &tLexer{ chars: chars, count: len(chars), pos: 0, tokens: make([]*tTokenNode, 0), } } func (me *tLexer) push(it *tTokenNode) { me.tokens = append(me.tokens, it) } func (me *tLexer) Parse() (error, []*tTokenNode) { fnMatchingWord := func(t tokens.Tokens) bool { pattern := string(t) from := me.pos if me.MatchingConst(pattern) && me.MatchingSpace() { me.push(newTokenNode(t, string(t), from, me.pos - 1)) return true } return false } fnMatchingOP := func(t tokens.Tokens) bool { pattern := string(t) from := me.pos if me.MatchingConst(pattern) { me.push(newTokenNode(t, string(t), from, me.pos - 1)) return true } return false } for { // eof if me.IsEof() { return nil, me.tokens } me.SkipSpace() from := me.pos // select, from, where if fnMatchingWord(tokens.Select) || fnMatchingWord(tokens.From) || fnMatchingWord(tokens.Where) { continue } // start,comma if fnMatchingWord(tokens.Star) || fnMatchingWord(tokens.Comma) { continue } // and, or if fnMatchingWord(tokens.And) { continue } else if fnMatchingWord(tokens.OR) { continue } // like, not like if fnMatchingWord(tokens.Like) { continue } else if fnMatchingWord(tokens.NotLike) { continue } // (, ) if fnMatchingOP(tokens.LB) { continue } else if fnMatchingOP(tokens.RB) { continue } // =,<>,>,>=,<,<= if fnMatchingOP(tokens.Equal) { continue } else if fnMatchingOP(tokens.NotEqual) { continue } else if fnMatchingOP(tokens.GreaterEqual) { continue } else if fnMatchingOP(tokens.Greater) { continue } else if fnMatchingOP(tokens.LessEqual) { continue } else if fnMatchingOP(tokens.Less) { continue } // identifier b,v := me.MatchingIdentifier() if b { me.push(newTokenNode(tokens.Identifier, v, from, from + len(v))) continue } // string literal b,v = me.MatchingString() if b { me.push(newTokenNode(tokens.StringLiteral, v, from, from + len(v))) continue } // int literal b,v = me.MatchingInt() if b { me.push(newTokenNode(tokens.IntLiteral, v, from, from + len(v))) continue } // unknown return errors.New(fmt.Sprintf("unknown token at %v", from)), nil } } func (me *tLexer) IsEof() bool { return me.pos >= me.count } func (me *tLexer) SkipSpace() { for { if me.pos >= me.count { break } if chars.IsSpace(me.Char()) { me.pos++ } else { break } } } func (me *tLexer) MatchingConst(s string) bool { pattern := []rune(s) for i,it := range pattern { n := me.pos + i if n >= me.count { return false } if me.chars[n] != it { return false } } me.pos += len(pattern) return true } func (me *tLexer) Char() rune { if me.pos >= me.count { return 0 } return me.chars[me.pos] } func (me *tLexer) MatchingSpace() bool { if chars.IsSpace(me.Char()) { me.pos++ return true } return false } func (me *tLexer) MatchingString() (bool,string) { mark := me.pos if me.Char() != '\'' { return false, "" } i := mark for { i++ switch me.chars[i] { case '\'': me.pos = i + 1 return true, string(me.chars[mark:me.pos]) default: if i >= me.count { return false, "" } } } } func (me *tLexer) MatchingIdentifier() (bool, string) { mark := me.pos c := me.Char() if !(chars.IsUnderscore(c) || chars.IsLetter(c)) { return false, "" } i := mark for { i++ if i > mark + 30 { return false,"" } it := me.chars[i] if chars.IsLetter(it) || chars.Is09(it) || chars.IsUnderscore(it) { continue } else { me.pos = i return true, string(me.chars[mark:i]) } } } func (me *tLexer) MatchingInt() (bool, string) { if me.Char() == '0' { me.pos++ return true, "0" } mark := me.pos if !chars.Is19(me.Char()) { return false, "" } i := mark for { i++ if i >= me.count { me.pos = me.count return true, string(me.chars[mark:]) } if i > mark + 10 { return false, "" } it := me.chars[i] if chars.Is09(it) { continue } if chars.IsSpace(it) { me.pos = i - 1 return true, string(me.chars[mark:i]) } else { return false, "" } } }
语法分析实现
package interpreter import ( "errors" "fmt" "learning/gooop/behavioral_patterns/interpreter/tokens" "strconv" ) type tSQLParser struct { result *ParseResult } func newSQLParser() ISQLParser { return &tSQLParser{ newParseResult(), } } func (me *tSQLParser) Parse(sql string) (error,*ParseResult) { lexer := newLexer(sql) e, tks := lexer.Parse() if e != nil { return e, nil } queue := newTokenQueue(tks) // select (* | field-list) e = me.ParseSelectPart(queue) if e != nil { return e, nil } // from e = me.ParseFromPart(queue) if e != nil { return e, nil } // where + bool_expression e = me.ParseWherePart(queue) if e != nil { return e, nil } // eof if !queue.IsEmpty() { _,t := queue.Poll() return errors.New(fmt.Sprintf("expecting EOF at %v", t.from)), nil } return nil, me.result } func (me *tSQLParser) ParseSelectPart(queue *tTokenQueue) error { b,v := queue.Poll() if !b { return errors.New("expecting SELECT keyword") } if v.token != tokens.Select { return errors.New(fmt.Sprintf("expecting SELECT keyword but found '%s'", v.value)) } fields := make([]string, 0) b,v = queue.Peek() if !b { return errors.New("unexpected EOF") } switch v.token { case tokens.Star: queue.Poll() fields = append(fields, v.value) break case tokens.Identifier: queue.Poll() fields = append(fields, v.value) break default: return errors.New(fmt.Sprintf("expecting column name but found '%s'", v.value)) } for { b,v := queue.Peek() if !b { break } if v.token != tokens.Comma { break } queue.Poll() b,v = queue.Peek() if !b || v.token != tokens.Identifier { return errors.New(fmt.Sprintf("expecting column name but found '%s'", v.value)) } queue.Poll() fields = append(fields, v.value) } if len(fields) > 0 { me.result.Fields = fields return nil } return errors.New("expecting column names") } func (me *tSQLParser) ParseFromPart(queue *tTokenQueue) error { b,v1 := queue.Poll() if !b { return errors.New("expecting 'from', but eof") } if v1.token != tokens.From { return errors.New(fmt.Sprintf("expecting 'from' at %v", v1.from)) } b,v2 := queue.Poll() if !b { return errors.New("expecting table name, but eof") } if v2.token == tokens.Identifier { me.result.Table = v2.value return nil } return errors.New(fmt.Sprintf("expecting table name at %v", v2.from)) } func (me *tSQLParser) ParseWherePart(queue *tTokenQueue) error { if queue.IsEmpty() { // no where clause me.result.RowFilter = newEmptyRowFilter(true) return nil } _,v1 := queue.Poll() if v1.token != tokens.Where { return errors.New(fmt.Sprintf("expecting 'where' keyword at %v", v1.from)) } stack := newArrayStack() e, expression := me.ParseWhereExpression(queue, stack) if e != nil { return e } me.result.RowFilter = newExpressionRowFilter(expression) return nil } func (me *tSQLParser) ParseWhereExpression(queue *tTokenQueue, stack *tArrayStack) (error, IBoolExpression) { for { if queue.IsEmpty() { break } _,t := queue.Poll() switch t.token { case tokens.LB: stack.Push(t) break case tokens.Identifier: stack.Push(t) break case tokens.Equal: stack.Push(t) break case tokens.NotEqual: stack.Push(t) break case tokens.Like: stack.Push(t) break case tokens.NotLike: stack.Push(t) break case tokens.Greater: stack.Push(t) break case tokens.GreaterEqual: stack.Push(t) break case tokens.Less: stack.Push(t) break case tokens.LessEqual: stack.Push(t) break case tokens.And: stack.Push(t) break case tokens.OR: stack.Push(t) break case tokens.StringLiteral: // field op string b, v := stack.Pop() if !b || !v.IsToken() { return errors.New(fmt.Sprintf("expecting operator before %s", t.value)), nil } op := v.(*tTokenNode) if !me.TokenIn(op.token, tokens.Equal, tokens.NotEqual, tokens.Like, tokens.NotLike) { return errors.New(fmt.Sprintf("expecting string operator before %s", t.value)), nil } b,v = stack.Pop() if !b || !v.IsToken() { return errors.New(fmt.Sprintf("expecting column name before %s", op.from)), nil } field := v.(*tTokenNode) if field.token != tokens.Identifier { return errors.New(fmt.Sprintf("expecting column name at %v", field.from)), nil } exp := newStringFieldExpression(op.token, field.value, t.value) e := me.PushExpression(exp, stack) if e != nil { return e, nil } break case tokens.IntLiteral: // field op int b, v := stack.Pop() if !b || !v.IsToken() { return errors.New(fmt.Sprintf("expecting operator before %s", t.value)), nil } op := v.(*tTokenNode) if !me.TokenIn(op.token, tokens.Equal, tokens.NotEqual, tokens.Greater, tokens.GreaterEqual, tokens.Less, tokens.LessEqual) { return errors.New(fmt.Sprintf("expecting int operator before %s", t.value)), nil } b,v = stack.Pop() if !b || !v.IsToken() { return errors.New(fmt.Sprintf("expecting column name before %v", op.from)), nil } field := v.(*tTokenNode) if field.token != tokens.Identifier { return errors.New(fmt.Sprintf("expecting column name at %v", field.from)), nil } i,_ := strconv.Atoi(t.value) exp := newIntFieldExpression(op.token, field.value, i) e := me.PushExpression(exp, stack) if e != nil { return e, nil } break case tokens.RB: // ) b,v := stack.Pop() if !b || !v.IsExpression() { return errors.New(fmt.Sprintf("expecting expression before %v", t.from)), nil } expression := v.(*tExpressionNode).Expression b,v = stack.Pop() if !b || !v.IsToken() { return errors.New(fmt.Sprintf("expected ( not found at %v", t.from)), nil } lb := v.(*tTokenNode) if lb.token != tokens.LB { return errors.New(fmt.Sprintf("expected ( not found at %v", t.from)), nil } e := me.PushExpression(expression, stack) if e != nil { return e, nil } break } } if stack.Size() != 1{ return errors.New("invalid expression"), nil } ok, node := stack.Peek() if !ok || !node.IsExpression() { return errors.New("invalid expression"), nil } return nil,node.(*tExpressionNode).Expression } func (me *tSQLParser) TokenIn(t tokens.Tokens, args... tokens.Tokens) bool { for _,it := range args { if it == t { return true } } return false } func (me *tSQLParser) PushExpression(exp IBoolExpression, stack *tArrayStack) error { b,n := stack.Peek() if !b { stack.Push(newExpressionNode(exp)) return nil } if !n.IsToken() { return errors.New("expecting and/or/(") } t := n.(*tTokenNode) if !me.TokenIn(t.token, tokens.And, tokens.OR, tokens.LB) { return errors.New("expecting and/or/(") } if me.TokenIn(t.token, tokens.And, tokens.OR) { stack.Pop() b,n = stack.Pop() if !b || !n.IsExpression(){ return errors.New("expecting bool expression") } e := n.(*tExpressionNode) return me.PushExpression(newLogicNode(t.token, e.Expression, exp), stack) } else { stack.Push(newExpressionNode(exp)) return nil } }
词法节点队列
package interpreter type tTokenQueue struct { items []*tTokenNode size int next int } func newTokenQueue(nodes []*tTokenNode) *tTokenQueue { return &tTokenQueue{ nodes, len(nodes), 0, } } func (me *tTokenQueue) Poll() (bool,*tTokenNode) { b,v := me.Peek() if b { me.next++ } return b,v } func (me *tTokenQueue) Peek() (bool, *tTokenNode) { if me.next >= me.size { return false, nil } it := me.items[me.next] return true, it } func (me *tTokenQueue) IsEmpty() bool { return me.next >= me.size }
基于数组实现的LIFO堆栈
package interpreter import "learning/gooop/behavioral_patterns/interpreter/tokens" type tArrayStack struct { items []iStackNode } type iStackNode interface { IsToken() bool IsExpression() bool } func newArrayStack() *tArrayStack { return &tArrayStack{ make([]iStackNode, 0), } } func (me *tArrayStack) Push(node iStackNode) { me.items = append(me.items, node) } func (me *tArrayStack) Size() int { return len(me.items) } func (me *tArrayStack) Peek() (bool,iStackNode) { if me.Size() <= 0 { return false, nil } return true, me.items[me.Size() - 1] } func (me *tArrayStack) Pop() (bool, iStackNode) { if me.Size() <= 0 { return false, nil } it := me.items[me.Size() - 1] me.items = me.items[:me.Size() - 1] return true, it } type tTokenNode struct { token tokens.Tokens value string from int to int } func newTokenNode(t tokens.Tokens, v string, from int, to int) *tTokenNode { return &tTokenNode{ t,v, from, to, } } func (me *tTokenNode) IsToken() bool { return true } func (me *tTokenNode) IsExpression() bool { return false } type tExpressionNode struct { Expression IBoolExpression } func newExpressionNode(e IBoolExpression) *tExpressionNode { return &tExpressionNode{ e, } } func (me *tExpressionNode) IsToken() bool { return false } func (me *tExpressionNode) IsExpression() bool { return true }
布尔表达式接口
package interpreter type IBoolExpression interface { Eval(row IDataRow) bool }
基于字段计算的布尔表达式
package interpreter import ( "errors" "fmt" "learning/gooop/behavioral_patterns/interpreter/tokens" "strings" ) type tFieldExpression struct { op tokens.Tokens field string stringLiteral string intLiteral int } func newStringFieldExpression(op tokens.Tokens, field string, s string) *tFieldExpression { return &tFieldExpression{ op, field, strings.Trim(s, "'"), 0, } } func newIntFieldExpression(op tokens.Tokens, field string, value int) *tFieldExpression { return &tFieldExpression{ op, field, "", value, } } func (me *tFieldExpression) Eval(row IDataRow) bool { e, fld := row.GetField(me.field) if e != nil { panic(e) } switch fld.DataType() { case StringDataType: e,v := fld.GetString() if e != nil { panic(e) } return me.EvalString(v) case IntDataType: e,v := fld.GetInt() if e != nil { panic(e) } return me.EvalInt(v) default: panic(errors.New("unknown data type")) } } func (me *tFieldExpression) EvalString(value string) bool { switch me.op { case tokens.Equal: return value == me.stringLiteral case tokens.NotEqual: return value != me.stringLiteral case tokens.Like: fallthrough case tokens.NotLike: like := false p := strings.HasPrefix(value, "%") s := strings.HasPrefix(value, "%") if p && s { like = strings.Contains(value, me.stringLiteral) } else if p { like = strings.HasSuffix(value, me.stringLiteral) } else if s { like = strings.HasPrefix(value, me.stringLiteral) } else { like = value == me.stringLiteral } if me.op == tokens.Like { return like } else { return !like } break default: panic(errors.New(fmt.Sprintf("unsupported string operation: %s", me.op))) } return false } func (me *tFieldExpression) EvalInt(value int) bool { switch me.op { case tokens.Equal: return value == me.intLiteral case tokens.NotEqual: return value != me.intLiteral case tokens.Greater: return value > me.intLiteral case tokens.GreaterEqual: return value >= me.intLiteral case tokens.Less: return value < me.intLiteral case tokens.LessEqual: return value <= me.intLiteral default: panic(errors.New(fmt.Sprintf("unsupported int operation: %s", me.op))) } }
基于关系(AND/OR)计算的布尔表达式
package interpreter import ( "fmt" "learning/gooop/behavioral_patterns/interpreter/tokens" ) type tLogicExpression struct { op tokens.Tokens left IBoolExpression right IBoolExpression } func newLogicNode(op tokens.Tokens, left IBoolExpression, right IBoolExpression) *tLogicExpression { return &tLogicExpression{ op, left, right, } } func (me *tLogicExpression) Eval(row IDataRow) bool { switch me.op { case tokens.And: return me.left.Eval(row) && me.right.Eval(row) case tokens.OR: return me.left.Eval(row) || me.right.Eval(row) default: panic(fmt.Sprintf("unsupported bool operation: %s", me.op)) } }
销售订单实体类
package interpreter import ( "errors" "fmt" "strings" ) type SaleOrder struct { id *tIntField customer *tStringField city *tStringField product *tStringField quantity *tIntField } func NewSaleOrder(id int, customer string, city string, product string, quantity int) *SaleOrder { return &SaleOrder{ id: newIntField("id", id), customer: newStringField("customer", customer), city: newStringField("city", city), product: newStringField("product", product), quantity: newIntField("quantity", quantity), } } func (me *SaleOrder) ID() int { return me.id.value } func (me *SaleOrder) Customer() string { return me.customer.value } func (me *SaleOrder) City() string { return me.city.value } func (me *SaleOrder) Product() string { return me.product.value } func (me *SaleOrder) Quantity() int { return me.quantity.value } func (me *SaleOrder) GetField(field string) (error,IDataField) { if strings.EqualFold(field, "ID") { return nil, me.id } if strings.EqualFold(field, "Customer") { return nil, me.customer } if strings.EqualFold(field, "City") { return nil, me.city } if strings.EqualFold(field, "Product") { return nil, me.product } if strings.EqualFold(field, "Quantity") { return nil, me.quantity } return errors.New("no such field"), nil } func (me *SaleOrder) String() string { return fmt.Sprintf("id=%v, customer=%v, city=%v, product=%v, quantity=%v", me.ID(), me.Customer(), me.City(), me.Product(), me.Quantity()) }
销售订单服务接口, 继承自IDataTable接口
package interpreter type ISaleOrderService interface { IDataTable Save(order *SaleOrder) }
虚拟的数据库服务, 实现IDatabase接口
package interpreter import "errors" type tMockDatabase struct { tables map[string]IDataTable } func newMockDatabase() IDatabase { return &tMockDatabase{ tables: make(map[string]IDataTable, 0), } } func (me *tMockDatabase) Register(table IDataTable) { me.tables[table.Name()] = table } func (me *tMockDatabase) Query(sql string) (error, []IDataRow) { parser := newSQLParser() e, result := parser.Parse(sql) if e != nil { return e, nil } table, ok := me.tables[result.Table] if !ok { return errors.New("table not found"), nil } return nil, table.Filter(result.RowFilter) } var MockDatabase = newMockDatabase()
虚拟的销售订单服务, 实现ISaleOrderService接口
package interpreter type tMockSaleOrderService struct { items map[int]*SaleOrder } func (me *tMockSaleOrderService) Save(it *SaleOrder) { me.items[it.ID()] = it } func (me *tMockSaleOrderService) Name() string { return "sale_order" } func (me *tMockSaleOrderService) Filter(filter IRowFilter) []IDataRow { rows := make([]IDataRow, 0) for _,it := range me.items { if filter.Filter(it) { rows = append(rows, it) } } return rows } func newMockSaleOrderService() ISaleOrderService { it := &tMockSaleOrderService{ items: make(map[int]*SaleOrder, 0), } MockDatabase.Register(it) return it } var MockSaleOrderService = newMockSaleOrderService()
解释器模式的优势 (1)在解释器模式中,因为语法是由不少类表示的,当语法规则更改时, 只需修改相应的非终结符表达式便可; 当扩展语法时,只需添加相应的非终结符类便可。 (2)增长了新的解释表达式的方式。 (3)解释器模式对应的文法应当是比较简单且易于实现的,过于复杂的语法并不适合使用解释器模式。 解释器模式的缺点 (1)解释器模式的每一个语法都要产生一个非终结符表达式,当语法规则比较复杂时, 就会产生大量解释类,引发类膨胀,增长系统维护的难度。 (2)解释器模式采用递归调用方法, 每一个非终结符表达式都只关心与本身有关的表达式, 每一个表达式都须要知道最终的结果, 所以完整表达式的最终结果是经过从后往前递归调用的方式获取的。 当完整表达式层级较深时,解释效率降低, 且出错时调试困难,由于递归迭代的层级太深。 (摘自 谭勇德 <<设计模式就该这样学>>)
(end)