github.com/krum110487/go-htaccess@v0.0.0-20240316004156-60641c8e7598/tests/parser/testCaseParser.go (about) 1 package parser 2 3 import ( 4 "regexp" 5 "strings" 6 7 "github.com/alecthomas/participle/v2" 8 "github.com/alecthomas/participle/v2/lexer" 9 ) 10 11 var collectOps string = `(?i)^[\s\t]*(collection-name)(?:[ \t])` 12 var collectClean string = `(?i)^[\s\t]*(collection-cleanup)[ \t]*` 13 var testOps string = `(?i)^[\s\t]*(test-name|collection-name)(?:[ \t])` 14 var setupOps string = `(?i)^[\s\t]*(setup-makeFile)(?:[ \t])` 15 var cleanupOps string = `(?i)^[\s\t]*(cleanup-delFile)(?:[ \t])` 16 var requestOps string = `(?i)^[\s\t]*(request-url|request-header|request-cookie|request-body)[ \t]*` 17 var responseOps string = `(?i)^[\s\t]*(response-status|response-header|response-cookie|response-body)[ \t]*` 18 19 var testLexer = lexer.MustStateful(lexer.Rules{ 20 "Root": { 21 {"MLComment", `[^\n\r]*##.*([\n\r]\s*##.*)*`, nil}, 22 {"EOL", `[\r\n]`, nil}, 23 {"SHash", `[\s\t]*#`, lexer.Push("Comment")}, 24 {"Code", `[^\n\r]*`, nil}, 25 {"WS", `[\s\t]+`, nil}, 26 }, 27 "Comment": { 28 {"EOL", `[\r\n]`, lexer.Pop()}, 29 {"CollectName", collectOps, nil}, 30 {"CollectClean", collectClean, nil}, 31 {"TestName", testOps, nil}, 32 {"SetupOps", setupOps, nil}, 33 {"CleanupOps", cleanupOps, nil}, 34 {"RequestOps", requestOps, nil}, 35 {"ResponseOps", responseOps, nil}, 36 {"CommentText", `[^\n\r]+`, nil}, 37 {"WS", `[\s\t]+`, nil}, 38 }, 39 }) 40 41 type TestCollection struct { 42 FolderPath string 43 TEST_URL string 44 TEST_PATH string 45 Name *string `parser:"(SHash CollectName @CommentText)*"` 46 Setup []*SetupInfo `parser:"@@*"` 47 TestList []*TestInfo `parser:"(@@ | SHash CommentText)*"` 48 Cleanup []*CleanupInfo `parser:"(SHash CollectClean @@ | SHash CommentText)*"` 49 } 50 51 type TestInfo struct { 52 Name string `parser:"SHash TestName @CommentText"` 53 Setup []*SetupInfo `parser:"( @@ "` 54 Request []*RequestInfo `parser:" | @@ "` 55 Response []*ResponseInfo `parser:" | @@ "` 56 Cleanup []*CleanupInfo `parser:" | @@ | SHash CommentText)*"` 57 } 58 59 type SetupInfo struct { 60 Operation string `parser:"SHash @SetupOps"` 61 Parameter string `parser:"@CommentText*"` 62 MultiLineValue string `parser:"@MLComment*"` 63 } 64 65 type CleanupInfo struct { 66 Operation string `parser:"SHash @CleanupOps"` 67 Parameter string `parser:"@CommentText*"` 68 MultiLineValue string `parser:"@MLComment*"` 69 } 70 71 type RequestInfo struct { 72 Operation string `parser:"SHash @RequestOps"` 73 Parameter string `parser:"@CommentText*"` 74 MultiLineValue string `parser:"@MLComment*"` 75 } 76 77 type ResponseInfo struct { 78 Operation string `parser:"SHash @ResponseOps"` 79 Parameter string `parser:"@CommentText*"` 80 MultiLineValue string `parser:"@MLComment*"` 81 } 82 83 var TestParser = participle.MustBuild[TestCollection]( 84 participle.Lexer(testLexer), 85 participle.CaseInsensitive("SetupOps", "CleanupOps", "RequestOps", "CollectName"), 86 participle.CaseInsensitive("TestName", "ResponseOps", "CollectClean"), 87 participle.Elide("EOL", "WS", "Code"), 88 trim("CommentText", "SetupOps", "CleanupOps", "RequestOps", "TestName", "ResponseOps"), 89 trim("CollectClean", "CollectName"), 90 cleanMulticomments("MLComment"), 91 ) 92 93 func trim(types ...string) participle.Option { 94 if len(types) == 0 { 95 types = []string{"String"} 96 } 97 return participle.Map(func(t lexer.Token) (lexer.Token, error) { 98 value := strings.TrimSpace(t.Value) 99 t.Value = value 100 return t, nil 101 }, types...) 102 } 103 104 func cleanMulticomments(types ...string) participle.Option { 105 if len(types) == 0 { 106 types = []string{"String"} 107 } 108 return participle.Map(func(t lexer.Token) (lexer.Token, error) { 109 lines := strings.Split(t.Value, "\n") 110 111 newLines := []string{} 112 re := regexp.MustCompile(`^[\t\s]*##[ ]*`) 113 for _, line := range lines { 114 if strings.TrimSpace(line) != "" { 115 newLines = append(newLines, re.ReplaceAllString(line, "")) 116 } 117 } 118 119 t.Value = strings.Join(newLines, "\n") 120 return t, nil 121 }, types...) 122 }