Index: lucene/queryparser/aqp-macros.xml =================================================================== --- lucene/queryparser/aqp-macros.xml (revision 0) +++ lucene/queryparser/aqp-macros.xml (revision 0) @@ -0,0 +1,258 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Regenerating: @{grammar-name} + Output: @{parser-dir} + + + + + + + + + + + + + + + + + + + + + + + + + + + + Generating DOT: @{grammar-name} + Query: @{query} + Rule: @{rule} + + + + + + + + + + + + + + + + + + Generating TREE: @{grammar-name} + Query: @{query} + Rule: @{rule} + + + + + + + + + + + + + + + + + Running GUNIT: @{grammar-name} + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The html with charts is at: @{build-dir}/@{grammar-name}.html + + + + + + + + + + 0) { + throw new Exception("The parser reported a syntax error, antlrqueryparser hates errors!"); + } + astTree = (AqpCommonTree) returnValue.getTree(); + return astTree.toQueryNodeTree(); + } catch (RecognitionException e) { + throw new QueryNodeParseException(new MessageImpl(query + " " + parser.getErrorMessage(e, parser.getTokenNames()))); + } catch (Exception e) { + Message message = new MessageImpl( + QueryParserMessages.INVALID_SYNTAX_CANNOT_PARSE, query, + e.getMessage()); + QueryNodeParseException ee = new QueryNodeParseException(e); + ee.setQuery(query); + ee.setNonLocalizedMessage(message); + throw ee; + } catch (Error e) { + Message message = new MessageImpl( + QueryParserMessages.INVALID_SYNTAX_CANNOT_PARSE, query, + e.getMessage()); + QueryNodeParseException ee = new QueryNodeParseException(e); + ee.setQuery(query); + ee.setNonLocalizedMessage(message); + throw ee; + } + } +} +]]> + + + + + + Index: lucene/queryparser/ivy.xml =================================================================== --- lucene/queryparser/ivy.xml (revision 1484512) +++ lucene/queryparser/ivy.xml (working copy) @@ -18,4 +18,17 @@ --> + + + + + + + + + + + + + Index: lucene/queryparser/lib/antlr-runtime-3.4.jar =================================================================== --- lucene/queryparser/lib/antlr-runtime-3.4.jar (revision 0) +++ lucene/queryparser/lib/antlr-runtime-3.4.jar (revision 0) @@ -0,0 +1,846 @@ +PK +L> META-INF/PK +L>aSf}META-INF/MANIFEST.MFMLK-. K-*ϳR03r,J,K-BV+$xRKRSt* 3R|RxJ3sJY%]lfzF@;xPK +L>org/PK +L> +org/antlr/PK +L>org/antlr/runtime/PK +L>org/antlr/runtime/misc/PK +L>org/antlr/runtime/debug/PK +L>org/antlr/runtime/tree/PK +L> y3%org/antlr/runtime/misc/IntArray.classROP=oZe NTtTXI0G(KuȢA]%[t_b4?x萐s=#Scp[eE4] & hK7%{1~3~XՖ'5A3^5yExvLnUt5Y%dPwxO;n{MB=زP5z5תyMDzp)Jv)lۄ7 Y(M=&7[=wߖ$EDd# ϯR8'|]n` C7>$ \Ɛ S 40 * J{Э1rv 09sMZPstRtjP;0X;G̻ 0: ʴp"%dbq8JI5MB*eS ,org/antlr/runtime/misc/LookaheadStream.classTSUnI R~A!Hi%hV~$Xf:;dujg/>ЙN}_,Աܻ;r?zaL04pt+Af9kU\un0*|$@2f,2_^H ` :@.T"J$s|<w [3j(_LMSV<'4%hX3庳%М\U/O,ʴuV~R[UhJx$̩k\5ҤCy +F嚵"tsў傑.S4Ŝd4#fTS㵳X+ZA 4~*!gE?IȪ+RLZTĦmok^2 5L|,HRAIKE=>@]h֞#1tL7։0iP1OM==dhLVBpI((,h)uؓb ,&vpؔ9(?ﴎm!F2˹ N=_.eQj4fZi܈*Opz (Bk!D9zt^=ifq!Pt8zw0eӤښaZ23&M =&R .| +Քc)RԳGXjW ! 3q DEKbzIEҁ"99)DAX+1t +#aGJ,_ί@&X#oQ-U)Y. .{w2iëf2U&|gF9J/T'=~+M {J-cuY+,}.jY^Z]31<1^46mP}i4 E@t'o?ݕ;hl4iWBp2R|40!OHp#/v[۴&w)"/Sl澘wu#?EP>)V"FCVeMDLRif[)CO 9)۱2eV)ň>z=s*QKqB:|{i>+BFY꼇X6ˠ Z!WK6`hl+.(8zKRztSSm4H}lLb<|#~L0 ",A']WIO&!͏~{m^u sz|O㷿t"Eܱw5w[mnlJ<*V E*{Mߢ># +Wu-~-DoWE-}f>#pUUQQj[uMwQ m4JE{X +Q+GU7iMzYS}Sګ.;؏o+\ +w!-)лⴄW[1| ?~_*Z\wTD؞/ PK +L>F  "org/antlr/runtime/misc/Stats.classVsSe~NIN@K iiET^\T"e[,\49 &9z-wuFg]`/)Ɵ?؟wG;I$?~~wo؉؂ ?I?N)7oʯpo E~4@R*~%EfJEʏ8gULgEMF,gLFE֏&`óbb5RGL*fT7|lhgG% f޴M9MKh:`fmG:Z:K YÉI#fk>L1m2 efBKk!E9c6 V:V:FFvV5ǦQ$oTwBdmvR8EDLmȖL>Rt-!vu6T-#Fv"V񬣧t?FcF*9yg"VP߾᧝gD$w9pQ=gZjRW&2h6i鼣?Vޑ7+[9ҳ4&Oj$Z&Ԕn1ORmeDt6Bҝ}P]x]/cfJ'D` `؃s鋛mWJrܫ+ _-ێ!K} D baBAca3w7>b/S~<Ա 9RǞqވu~*w+mJE4wE۔ġy +~:̫ZȏsvTuX@{oЛ"~›hX_SMʛn{\Mo}b|Ho>7'#"7(_ylk0o"ޟ+9s4+q]fN]bX7&VnQ%<;E.y"]DKo 'BE!BjBy h_y0v10 +kWW|M80ߒ8\FX;ZyY5ya:J8Ru;+0v6-lQ>HcԣP1?f%ZF}ԖXƖ5t-6m<+`50R+ +PK +L>*. &org/antlr/runtime/misc/FastQueue.classV]S[U]'ߤ hmCB (mThb#A*$p;!Agg|34 2 +:tڇ}笽9#ы9/\c^H'HՂ\-J%/>4|HI3^C>Ei‡5/y.ʩ*Й|>US/ēzHjV M=N'G7/6b<zQ72rzQ[lg4#f +ZXV ]~[F%#WfokmϪesU49 |kOqwZY3M5жKO [`R-+!Jw ִSXHNYOj)_k:|-MmkEsʔY>5S/gogRVI,S8⚮lnj. +D&ҷZ&1J#g˭ +.L\ѯ`@c0` 7bC 8PJU[3Ԏ˫7.7\-%HM|_C8V| Wue 1!avТ u%F\xSVm@{j-pn w)J_wGZ60rYEG2kÊ^G4{g'Iv-~Oъo5& _ow:WF-w+GXixXDŊ(W+b׈!W-e>\!kSлoX1{4;8ܴ6mԺ 1ͤ#Y'ǴmqXej $Jc}[<4OFqEa%p35=?PK +L>E )org/antlr/runtime/misc/DoubleKeyMap.classUmSU~nI b +M-IXĨiKVA*v`KH0 _N;;~>I拓޳9oу`~,pW|, :|.NF/rɷBr!rV87WJeΉ浶MMIfa[t +&ZwS6gO}ޔ!_ƑŁ{O?gLx' |!P3! a2i}6-3_&B +_$o-B&Y-6K-m}{㻊a7yaK${gCQp+؅&aGϔsh*;"')S@;4Zix\AϠq׸K ?}w?C?σh"jhj>!gj*ZMLZC]:х0uQN775 #mZZeeי*3AOɫa :cFH8I#A +\1I+11֐W瓖0Gp0UDI*KZe1GYVO*A'~̉;I[o*)Riq_a]SH[vanЙss4sT\PYbDSFSYѱwNM$ґ(JA'j̉J2"}lr |X`S,7 8M |VmeGlD۱d%q2u +6di1"a?k@`vژ$,rNs/fUzNYye; +wOܖ6rv9ʶ0dZ+*-[vZS9iοPK +L>R^];org/antlr/runtime/TokenRewriteStream$RewriteOperation.classUR@XJEEZZhDJZlӚn_)>4 df|纛pzp!F%uQ.rWp51\&\#Q7~SdSՎ۸#9wUSK +2 ]UfҭUtYo +Hfxk Od7xN1u?p2Si;m;0o9=" GO(Y1৽3Kn×߂3|!Kt# S.d'<6'Oo!qu8zXw= ^uQσdR~2 +RL~c@qW:`) +| Xv)YEL+^r-XIUV|Gxy}項ܲ/TB]>T?&触PgSB]i/yQE[t_[ι(q0zt PK +L>^+H)org/antlr/runtime/ANTLRReaderStream.classSmOY~n;ӑXvYAEaWvtLEbVV [ft&c?Dq!>NĤssiq. eFӸfsһhũOn=*>)ٿO [5yZw.{ƞ' {Eɡy-T\wgKn8,U,e:7Z Acj ~䭺əB:7,E묎So'83.KsOZqW8lk3v9UVW%ӪS0|9GyclfKA=,Ӟ #vt儉~\ŀk&F_M10M\GP(=yյ;=ԈdsZ℡Q6N +0-G!!%^^IY#T"CD ՠ0yZծ{:}ǵ+VBl)H4-0̛k=3!nZI#61kZ Eb?4xclGz?A㊻]`,)Yl̗8e#X'ƉF4?`m$ViSWFU%+Azq%?fnMqJaf3V08ݍNX;hQ޷h%(c)&{RI_'#YmFi3" ċ;VFn+c1o :Y3ec:Ź~#3ffϵHMPxxHwaK;Y8M4fq.iw!~PK +L>@v1org/antlr/runtime/MismatchedNotSetException.classRn@=\Ch(-$PҚ@J<:6*Y[ %H|u"D3gg_~x6r(SpF ֍߰q zf W,T-lϥ%!WAGr ޠѾq&~'ɤ>'Fu=_^Ee_x)Ӷ T̽`GZ?u:~f:̊PT]tBJp 3% 0R{=_uqv3DmF;Mٶ)up,k E}Oplsx zL+pp M@%wJ ,YjC:{q3Oi+{5ގ +E>1o[\>%`̚m#=OG/ pm:NpMҤb VRaoXgX0M$ IJl2ۙȾcć|௰wq7iZ̺PK +L>Q{M/org/antlr/runtime/SerializedGrammar$Block.classRnA=RXp++VEm-m.VkbjkZ$1> 0Щ,G_|TKM?ْ<=w{Ν "F``9i(F6 VtnȫÂU aøgxҶjm0}Oj=cqdz]8 +.\mV*.vKlR}Ho^f=WC&;67l$j&8%J/T5_1Un9_KVtW\tf3Sm6w:inh481IJ6Xmin 5h7pbG*1- .`yemܡQ c!;oi@4ɡQ/⇇i13ϙ c~i$}6?hH aX<#A_LxlQ%}pէ1jDp_cf&BCk7h'V{jm7Q3C0DDӟ_f"g8L2IBGqX#䱃uxL'U_h$,A6 +x," X"F J.PK +L>;Tvorg/antlr/runtime/Token.class}[OAV +D*TEQ[F`/aC6Rjxj2,XM~(tam9;_Q<R.E! r]Q.^ Do iun'Q-org/antlr/runtime/ParserRuleReturnScope.classQMO@B(Nk&&/& ܗbivă?e|[<=xyo3fp +hQ@SÎ]{6 +R14 +Rs`ȥGK=1CPX^P ^4W ՟7;Fvн4HWQ"=qԦ>%e ,1CkQѠEPɃJ҈- +m.1#.}ѐC>bЙ Zzf;rѼa4syh y+UmPK +L> +C org/antlr/runtime/Parser.classVmsU~n6,FZ(RZ^$E"/D4MnB7(~3NeQ]n[3w==6_:FQ1,]#i4\0J>סoI͗nW)|oxG;,,Rm%rW@ܲ[O`i{ vlj¬s_ϕf@e[Y-bNM +˖-uҝ5ɔ٘3]2ݵ{BM-]e+!3mt+wMW*I_͢@ƕmIfa.J+q +NCXX%{U=Yhv05OV=BD`jۖ]iycO,Veӥ,"H,s(@\lF>j(Fy@ǼYrJp40wi6[[ZoF[۩=t"E}b6-O628*MJvg6fUl+vH%&I,xfuѤ#(8K ﱮJdx, p(3MqS4küYX.E׽ho Yg x꬧=`Gh/qw,}'Oq( jm:G6rȫkKy>YաHyV`/ɛ*aZҮ)O~M) fs F23ܵAxDC޴RlzN[v3FJۖ tG1aB6lRֿQ9L}tn#m~"?ﴋ0]ĐEr$m%1P:aT2=8 +tԿM,4|Y) +9?$"Xc'ʛ[b(ӠV.\B}S>F&.IgD1>y=W/\F*c,QK>|L%(_P|bb56-H:J/0˃,JD_`b͞j.ǩG1HvE\$6LP&%Xtp^+yu~I*oDIj3T(qZ^4$y;K\SAyA꼽=~O QBliHXK^P{Y=Ԉ0d` +u̸Ճw&(SB\^tmfՏϨBI<-[Jjoಲ?PK +L>7~9org/antlr/runtime/TokenRewriteStream$InsertBeforeOp.classS[OA-,-ۂMAE-mEoEԒlHb -2XfTF +/&?xfi6iM̹w|d q\ `EBZAc `ka`_zra.ە~C*MO+_(^P>w ¾T;\e +)ê[u_vՕ{K*b{ۜa(/$+r-V2Jnu[ƈ̐pd*˞7j #%׫nɿ*`ۼ}o* 6PhOe2).6 Bbch3M$0d FLLめpwqyLȚX}ҥukx`!]mOY-m}W O2 ;_ q˭ոf:m&{DR %v^d-4 +'9f9D ~=ͨ4OG-k׎0C8NOh9@0сY +ә`7a$G^S7#m&1sAQ quGҵɤ>D}'e}L3pAyH-xYGte• *&D<PK +L>~X )org/antlr/runtime/CommonTokenStream.classUKsU<2ɤIaxtM: LB"#Q|2LzBIO + ʈ.RD*XpAq??_`NwO31bn{wν?< Fþ:~DiAzeix>C,0(q8*LJdzL\oD^xIHBʶͬQlدl8M +I6Oϛ,W&sTL*o[ /Xd.?Ol>/Ekڌ榧sxnʴOfjW+f0/>^Pض ֕@G5م4SMggº”538~L@*> ~*{PbmG =E`4d4U%hAhT,_g޲wҸp"DcR﹅?s܊m r~qF1\IsHZM@x}&ꮘPG1BA5wSyOjSAuQ! ꋘ3 c\%>GݱJ9o~-1sh(VOԚun\&^hc- 7[,j؍ YOV2237|\BPxv4t*oUq[S㓌'cg؃o3D;}B ]~Mbͪ*ºy5RSzyנj"^H}M7mG7 ,h`ߓh苢!Ee?bīd1}"Ɗ3np9ݡ0cM0!Vkn{QB qr&sr0org/antlr/runtime/MismatchedTokenException.classR]OA=kQVLʢMTIme,SJmM(݆̹3瞹7;x`"nP#2X-`l,`n`+'-83CnO*1y!$ Thx,Cx@'n{#/&f6p&E!org/antlr/runtime/IntStream.classEN0E+i e3 E`nS6q$ħ1|% + +k*[ 0 +A~H'I.(5N +y?|HZTmJ~C:6;Z*zgluKH&, eºTU\[_֛?{|qcJ"y8"@8Gxݴ#org/antlr/runtime/TokenStream.classuPn1qGHSJCR3M$$ +,KGERޑ3iQq +U6_U>ƅkv8Y-]o\nm5Xsֺ~jB pIey0Ft񝜥jLظiԏcEt$xkNodKSښDɿzw{~zv-\Dm# 5.;kFY[PK +L> 3org/antlr/runtime/MismatchedTreeNodeException.classRn@=㚔R.)48NZx,)R xtm(Y[  $>:kR3ggf6DT ZLdj>|Q@Àa3'CWBX!TH3>Ce ]!hx=`@r7 Ou$n]OAF#ĐoSEoJrew.Qtw󆛺H141P%}}܁'n%"%ծd %i[XU?%_yİrma \lh!U{FдP +>~Чbվ`rffC۞vi>WoeTTǐǫDU;CvHLJ!Fޓ:RX"k iNJHXƍXSmb\pW+[$َ }*nS&ӿ4߉}̻ yg cB /tv.sgʷ˿PK +L>5!^'org/antlr/runtime/RuleReturnScope.classJ@ƿIjkg EhEwŋ)(m\4)ϥ'Ci >73'+x؅OhިL[Bm0w$t"ɇb1z"f);(E:ZY1YQPd&ա.22ISlKyMh%ҌІ\D, gspˎ|&Z}[27 {evdu4῱86Yɺ7.y{8ݥc k /yJ@m%;;ídTG?PK +L>7 ++org/antlr/runtime/BufferedTokenStream.classWkpTg~.l $l !\( `Kr$qsBVmjkB -^j m댃x8u/~' ~|}}wBA +Gd8*Ã2|$2<$ӏE8>!o""|= >B9St|N?EyZD}A/xZ&τ0pB|8-syq6x@t20-k%Tc*VcgvC?l`jۡXcڎ4Ňm~+%bv:EIr>LrZ ĆT2`7d%(*&k /mܘji['yoHZ)1NYAB[.>7~R'z# Zh\5`ņi} 4Rp!fM~8z>w=F?GIܖ*[(1ZyFy#/)vy',H hnrdJ_=겺ړA+ao:m $aF~ U%SIY _70.뒽u-]$A щ ̜0bWᢐQ9YCwFq%~QFAa2C_U9knddu=R2>dos 눓"=Oq&[LiEټ~V4*W!KJQ91PuG$ZOn '"bv2K!88O(K2'kWS]ud)Yj0 ,vÔÔ|2sfZ_>Wg086s1uMCQe\RpA}Ә3 [v`-):s9]tYplQt/oSf8]e&$`=6jTpZt@-n`k,,y_9#yr{x>[hE0:)QWZ(+;)l/~=t#x:ǵK9>ДxCZ#(rBju/вY[Y;)TmW iqE $8bzz"zDiQ۴mMY(*CصѵMo|y a0AHkv|=.81<,2 p" #Qٮ +Sb/O +9R<U$ć|3EuVPɌJh}|QN댚̖94I:^C k>X$"p#>­e!"q,1,q&!= Fr} X9C# {+$ٵ=r +S伀;czuwl(f2*˪7=\Ŭ1Dޔ5Tj.=IOyjeS`va{"& +X}~78ԃ%Ҭ0a,V`M* +#&)iCx ?K`d?EO3cE2j-ǀe( 64=Py{N-æ]L~%unfIՙԋ&"ISX]_'CL'z̩E/QZG->zk}?{}9)"Y"(#ɸb".4*8/9*-'B wzZFj*^c\dN\&+Dg?&>]Vd-[&-Uues {@n=̘70oz/pu׏[LfnNi&'Ҫ0w]qH˄r"k,\J1ogܥ`тq hanS9 WMŤI-BL˼h0Fs_,]fɘҍfn4iR*~KRF.1nG@RaWQ5r}[2 OYh*P#TܡBJn1Repg^ iΖ3(1 ­g\"o.[00J=ŵS2dPK +L>M $org/antlr/runtime/ClassicToken.classVmsU~6imXByEi"HC-/R@ o6]څtS /8 ~HuƏ~GٽQӻ=<{o_`ORąć2\\4. , Wek (%1B.{CyRqIf +PֱQѱcUQ븭㎎b^$q?O5y]OYiݶre˝ϝ;?&kl Z츄fiϩ4]s] :8҅LUq?U_3lY+%|ު:2W6oi/V9\z΢*[Sܲ] +ܚ, +^)3 }-7jM^^7F- %mo/N<=$k!+~ICQEt4METmOfjM)%BЫy Rݣ"ZdGD'H`N]@Tc\$Mzh_uT99KIS*j>Y|@Fv39d p1pȰK +t\(d Y%栁@b7&EuWI|a[u;n_/Ja' (; UU'<# -i$&}C&wD}s;7o7⸇ӈXy-Xfsğ^&m$`.:7pxk~**x~%/hₒtAR(d/هmûM ey(ecJ,^v&Ke+h8 Y}ҭ$ |yW\ٶ*)xA Nt +xK8j5 yV^o%!t xmTh d\u2!)z>Ŧ'ab8B + 9)d jU }fZȊ\r49v|,psE##BJBދ B !9%+u-b|9ND&eD&u2ZRBȨRĴUBbZiRk8x= 9o"}l+,S(CXk[z1-\Ɔbf?05{elΚ̾l6 s$23qf5.a1_&1Wax@cڟ0/~ţ5;MT xdmg?PK +L>= PA-org/antlr/runtime/RecognizerSharedState.classT[OArQR7TUkx3,d:VK'hL$>㓏>1]C0`|9gΜ~af &;YfbͰa/C/> A1Ϲt$5mw2HdΪ]6g~Ogݾhb6ZVqI-Jz&n*s3t2t[s#U +eJy&QKe)O۴MJuf <1Y-CywLL& y]C , |`n}DU jGʈUVc7AM6EDhmVi/#$(CV!A fKE~-"^^6#XﱀW\85,q+pqZOd}*v-*ZW&"A0,Z_F=s7+Rhd^XB2fO1WqxWאu ӭeqUC4org/antlr/runtime/TokenRewriteStream$ReplaceOp.classT[OA- "-^P"ZT)RXvT/xFMDM|S̶6b}87: Q,G̃Jg&'hSZ?aB Z8Nm5Dݲpj,H@x oP"Ri޻Z6+s8^V +jEQ#\j֔+;|wNknBZFg +.g5 ^[k8vu8NY2c-4A<+/PK +L>y /org/antlr/runtime/LegacyCommonTokenStream.classWitT~;3dka&*h$$Db"A "-& $3 [֭ZhJ,IX.qvsնJ;77|[yy_Pwb!%=|[{}/~6Ʈ"Yf@='Ai'4?_K>c/4?|C>PNlk=V]D&Ow&k=tKiްPkO)kI:kc4&nzg6=G:qmTk4ٶFO\}Up}9lh*}y4UO*xxP]u +ke6]=m̄Dksm457Ǩ2zGwY+7I'h.Ď_^vt3,XgҞvq‡L45ޡ'@ 2"̧ـ<OtQӤX76V, VQ?TB(׮+u=E37v2TyiN3,+흦 V5)PmH=II: f,XZ-y:?1"i=e,!jd^mv56+iʾW:7%N=*!vY5b;dԷ3Fn3ߐԶe_%E_7XF;(l8q^{nd*KwMOǫׇcjgRG.D1͛HЌs +LѸk͖O>%2B-&-I0!*K#'"l%[h<UmnYB'X_k29h9AL#M.&jIVjx*ʑHxٽzC?pjG񐻎+c0K>'0n4I[!W~L$dub {Qwދ \Tj}*aUě[Qwn->l]{ %LjnCW|Z +"+a3F(_`伐n6J-MvsSGnqn!YGYkc(__=9aBjkkKuK rB259͊zsP7E]z󢀯m+ɃI|l$GQY my#72Q7.-:kHËw_lq-,nKY)3&`o0eWXM R.lTn*& +;;hw' j+-pk"\h t 0`*Ff= HPP2$LP (USQBJ1SlWeXc*w8`;`9U1 ʆ^۩d21̂Ŷb&Ik3 i?|އ~Yf9spZ2vE,~2H .8&l15YDAljwAme2jh}&Izkܱ,.qاlQ}7oyF|m#6j, dnp؇rֲ7<| Wa4p38d? [q*_mql Yl )2_lj ZF"[-| +5D) {"org/antlr/runtime/CharStream.classmPMK@}VkG*? = +޷e[ l68:*=xoߛ  Ƅq1' fi4n%V_]7jSƙb+_e^ISK?eb]RBŝ3ϛ_VًOu0_/n\WL*!նy5NT~Fu8߄0nBD8bh=x<;>{ PK +L>q W.org/antlr/runtime/SerializedGrammar$Rule.classSkkA=lIYMuI+~h#dn̖ɮJGw&AY>{Ϲ Z)b%8hM6CM6 66,ɇ>r9; +`! ;>{:dpK)~G#1bh#5CDPE!W5Kx(Z kiڨ #=j~ ~ T ǬRf?2@ɰ+k VQZ)Zz7ryt-mY (uDē@c_-*`-p".RYP(`-Tml3ifX{,z1C= YWk''BRL=S>WjPSOB!ٲ$9P+dRr|kSdYo} 볩ft&"mBu: +n``W sD;K,t73gXܛS/N` d(&(NP5o&g9cනwh2=]Kh oPK +L>z-& +)org/antlr/runtime/debug/DebugParser.classTksD=Gmq2Ի}LOŁ ؚA+!bT*[bUp^A]E+XWqwUMTLCS顢6Qq(bF}0\8"~ejk-rvU۴\ꍇ,3(=1C55\i>+k?((plG.H, 'c ;X|^b 1a}S4"C(fi=?~R`wK=D.!ZdO+F"^,bg z) +hq.{y25Z-@i, Gd?Q7hOTyr,Œ77$9J${%s,_EW=q(2)0CuTY*Qm<"IJHKtgOw(oQV$堊AJADO +P49y|(aAC͇yS9Iҕ*wꡲ<-(F-jW52.JRMVm*)TEg V򒾼KCI BOR(}F`nNp)p)Km'ds.SQDYT xf&GT=D1kD:2aƂ|I~~E3>G,"U( PK +L>[t20org/antlr/runtime/debug/DebugEventRepeater.classySGYc%1`1+6**auX +I43:0AeoR||T޷gX({%e_3p6$SIr5I y-$oC.{$|@!G$|B)g$|A%W$_|C-w$ߓ@c3 ?7/ +,3y +*^%9M V*q҉ ŨMLOǓ +$\)V46e-he kڋYgsTս+u^W +p"W\69NeP!gmOm͛4-6ܒS)igR#s5hP6m5nVM @0-xF\axXƨ + ]wڦ,m$- ݬN5قưMWwWϿ<~(chK{<l\TGy7tFs*gT腼 +gI +$$pIa*]aB)\U:QT*QxATD /pJI^ +J(LXU&YW3SCxkp(bPO$$@OP{onVh@}{BЃOak8KDŝJG̫؏zݿC44 [洣aMū4,%|VM3{ˆQYvnAcB5l p_ +ka}6 5su`ڂyI|!Ja7,5M^cdF4/19Z39%2;ddE%&'ynrJ +SE@7#U"xP2kK#.V%kѴLKam"r]Lt l )k԰UR8/ؙ]+ M|/𦐬GD#p)쀘hlfK2A0KNR;Ġ%AAd&bݖ %uX*B9Һ,ĠaKyPdhrI&u:) Jp&~ɸOy wƵiܣ"wAuj\7Ĉsl.pG]opxwظo/D*UԀ>PK +L>|A +Gorg/antlr/runtime/debug/RemoteDebugEventSocketListener$ProxyToken.classSWǿwl4FP, !5Vm +*(uI.q1fv7I3LQ{ lLs={s㯿%38BYTX|b42<.Xbl xbੁg⅁ L4^a +,&DY o7@ƴm Eò]p<[@nniKKke[4+485r>RU. NlkLo,JYpe筴epgI6Sqzɴ[r[omRMErw7/9շүX/)y&_ +9]rPD\ioנ?HDYy딯[gt3a-r청^>Ȅ^fe}^z{n|"({:Ϲ.:_!0o HHkZ,9-*Y|J鋼#Ya5Ab0,w&)cOQ忙E52Y\["OyhE]zp[&pV{Vex`XeLǍͦkŸy@LKV dtF[ qX#jU#c7 $?4426haN6|Br BALU*dqpDad?FПjC/H9]$qL>RJ0F0+qZ0O3@*q.:mT{@%r)>0AI@|O' c@D@cA>?:=yB>O|J,H* +<j]@T,iĂ܌Ѣ 4i|b&WGu(nI}hZNrALlsewT!9;g},vлʮwpܱ$A&'Yy .org/antlr/runtime/debug/DebugTreeAdaptor.classW[t\U\2Mr6 $ +$@ER`$-rq2sHLfҙ3Eb EoRP@0UЪA˃k/>Z>߿Ϟ9tu>}o{ymk9/󬘯Wy 2zQ$/7ļ"ߔѷļ*u1÷ݴN +sbԇESbfĜ=1}1?32DO#Ysc 0;M; +;5iŒxa"YVJm+cē);3pB;gY1,.1IF.i!ݻ 6eT.L[xoxzWt#`1nRl%堢w\UU9b>@5(FDZK0gh4kg4Q؆zF71LlvdD]m?$Y9[\9 ;%Kc)/K օm!PWq$h?̧J,uRfU;} ;p7wRz-oBhAS\J3_%x\ݩ}Ԣ57FmQq:aPGT-8ʤp'O4%|˄j2TeK)gb RiR3@15_4W]C%`z_C[3H;t{˂P{Uc~F.Ǣ)N(qrqF5w:ڊ-A 9[+!~z,,v52u &T(_[e{IW.e,6NjkXEwMTEE}nv{6/ImT:j¢Z^-U/']W*p}[~^L kJ1Lب夻úO:ɸv\!i.oC-T}{x\2YF"͵7h{.gUw.D,}uNTޗ4ذ.tSbyiQ)B6]DQe4W^|WuS(}Rk\zUo %]U)dmj 6"sSmMU%.}bu嗤ST:L85x$_b.yS>\z}vwee|{ +/sU9{W]d9˥VӃB<ߢG=3.W~DyW_6UwU/X>`ţcOe|V*S]3Uv?s_f1}9Tb5_jl珜@Mv[pyx*tL7S:Ox_S$Z2\t&h~ˮ\濸%1qes>RH\<9 -org/antlr/runtime/debug/DebugTreeParser.classVmSU~n^$+Pl4$)J-/iS$l.aK`3~N 8xY^/s_;C^C/L6L02'\tXRAqs1[ afUC_*R9*UéXin8zKX/焝憓m0lCt[7 53}ϱ8O,%ZZeeC[M⥼:<3%ޜnzȭZNʊjbnMf?YоZLAP7ju#PC?$}6w] )9zA#3oY&?=Wմn/4^sJ2FʐfUF:s3GmF9ߨ +uqިO8o-Ei !*|j4 檂a< +!.b +.(OCx@E1MA < e +6QT0)a>ƴOqޒ3>? O}U4dmfĪBOP"COoB*P6l'w7t4ӰUY2 >DhDhThF#{V? Hx"'؏%j2 ]5 kt,AGQPEH +n"[4MPK J1r|rvK9W]9aiHNꐜ%6%KMіQc.;晊 Pet"j&^3W7a>StY[íOrE#' +^y3e7'Ao @ )P$= ;d'" QĬ<=PK +L>!(UIForg/antlr/runtime/debug/RemoteDebugEventSocketListener$ProxyTree.classS[OA>vi\E)7h46i7v,+elwC$6&>?xteD33L$|0Nf$ -pрifh,@ۛ]v5[r ]N):%`߷,b9e <)_ef;Π/`Nf;̻%ߖG* +nѪXMk59ė ǽ@}g0}qEok> 8>LbABv΅A䏡ixBGR-OJ#TYSM.KPK +L>--<org/antlr/runtime/debug/RemoteDebugEventSocketListener.classY |T?-3/ K0HbA Q"a@e2y$#8 bZ.UA) HjVUkbR}sdj{;==rsh+#F0JJ<:@9<\>xl8Y+rx? <1qO P$2L)O Б\~xzy!D3|jY~f0GhzeP#ü^שQ&.%F_XpO=)yʱ*7C-ﮪЫ0H $gH)*SŵQ[B&':#e#O3*e"*+D AK$Ӆ/ G<H +H8MEg$BVY-ц2dS&.zR ƫ;K/PKC\LG{6=L۴mzv =d.Ӧnد)T}YAټipLt@qR~͏~~_S6?=: ȿY~7LGzZ`$am;xg "w6{m=lw1. 6ev{\hKZ?ſV3Rѕ`u̬l_/c88Tı>h"~Up.q2_mf2~] ץ\ڠz oP"nf$)~CDes[tHg I[6Gm7.vVщz4Ѫw#n6.EU:q`) =`ZϺ ynӕ~ɯv|G٭+/H^Fna.o8v<ݹ^HJ;i^eNbm4dERc' +@[QgpZD1`X X_͟BUlŶSg(!"w)_zsͷȦܨڷBpĵq㱸 ئn@8Sp:y|VJY$l]c\=o8(i6Koakfؚloul7hZ¦U^H>Ѵpc5ink8w +lզQ-enqȬbk!FY]|E=ZW9iuQ@@#n!Yu̕ZkbӝHp-J8ڔ;M:[tdvL'K.j!'0vc_]Ȭ/{ T=oqib-isE$lNWjܻ&XXgsI#yq02uiRJ eü!$*̑U͌&{]%^ .s[ca*Yo}zFHIq6K|{9g!U~wh[-$jǡ$?[q6?{np49ǻZD/;ט,FۘƐuY(d,ǯN CdXN"ՐGb*8Ѧ$Nȯt:Tb6scqK` ĝhe$4髝CA4Rmd@A;4#3$Q34jƛGhՌ*:\{|x|x!a snl`./C\»I+v^&t7%eWR%d]ORN +%! (}l[;ڞ9H\xZęsQOHIg<L-ܸ30jBΧ [h;Յw`lUj~!{zt߇M D Ev{&Пd MC` -W&!l>6#oǺ<1tG?M.x [\ȃK(>Sy桼3x7p1Gx\ +q_ǣy]Q IZOF)MJmOjxVӵe]{Eڿ1/>%z>/ՋL}0r}"7S9װ +fj>[_+ ѯ~ ܪos8'ҟU ZZ]>W?uMc7񷌱|1mL ,Q5F#QH%zԸ/3.ˍk +fW;*x`8԰!t0ЙBg't]%,E!f t6nnI.gހ.]!H"}*NEr,-+_i]KVܒ*,|*+݉&wQM*ƻnY=4)ܡӥ*_¬V0Q~k,)z7V&2 +߱^xR!`v_=s r(k=--Gu*IQI_~eE0eх$?=LGi?QaM'MꪖvÒxnBuRtʥTld5Zj_V ـy;/PK +L>% 5org/antlr/runtime/debug/TraceDebugEventListener.classVkWW2y%(bEyZ-h,D-G8L|}.Z\m~w&!ۺXs{g߿5~\U\:ދb10Xp#pSLJ:u\5t,'PBYBt踭qG],:j:pX5F͵ 󶳒1,dUq!2E=:ϙNj87^BiB,\Jw.kcʹvor}FIH}aysb +g.az;}#S1̂֊";.U~C +nM*jږ-!!O5= +r!ΩS +Q[d[(O2|=#/jHsmL +m/^|Ѿ+,OLKrEÞ |rVYe׭2Jڽ.Ů5h/u$fMkI Dq?ߺ%{&ԁ$$>Q|7F3)/f6/x'I|Oج-=S| O3)O+@&gO#6JL{r5?-NL@wNtJ;Z[Unujڙ+ + *(Va4شi"?}V4Jo΀,kM*h?:eޫ  o* pGwuD]m>Y V|9~Hi|hbAf~mxr!##YPDe(}I;‹\Ct5ct*fG[};% Ǒk#vw"j*ք?c +x0T2jPe0cPKۘ5tuoO#81d; 2 DZ'ihi?SzG_#7| +p >]4('@-#Srt +$"B7 $ *)c:xne>07}r%ItTџMφ}[iT0Dfr%!2[$'ɤǢ򋾤r +ԜʩgUNa"R9Ey)u6t@j[ڬjzI{ngkgڳ"uROr}ݧOφy wo-;HyrE&iRͪO3Y%I/%|#щv{7 +*tJyVO4)Nmʪ?%K:I,!qIco /Dז H {-$4IKZJ, +PK +L>I[-1org/antlr/runtime/debug/DebugTreeNodeStream.classVmwE~fVJVJ"Xhb %(ӥɦl6"**9 x]m-~̝;}l?I5v KfkeJ +,ͥp65><߸˛K ,<|Swxyy$Ogɒ@Oj]"r^x|^ZNѬ9Ғv@˜6未-+cKᘫEb5Ww>Zc2*tݴL4Ƙc8R@\H3bس̋@U&2Dg'tWhZr^1Ybu(Oko3̘5+[irx]M:i|C+2m-I'yL2ZzG2gK^fag&)t#dE )t +S@+Phțοqe*٭9H&4JfBU +jMʰ-o*xK–ei85)gy2he)1VS>ʘ\QҤUJ͙TR9g=o ǬZ{sIÏl9W6Y%Нif_(4tbVs Q 75:T]G 12y ԫnTO!u1tఎ uta8cv8 /WNc:^>C_ +_|8N"RA0/OtY1'xK\qWrпԾK${U@vM.tTg:$UUzZܨPԄ z]aaUސY<7'ubrkP3bL99M=K>Z݆͙chcD>B$@\C IWCHи_RvX$⦰I$$l5l=.h3EKI{|j\aQrGxXy͇W*x0ƻ|#9FCTBIiXdNQ!7NDΎ̌3g,4kXw8N96J@Ok5 xbQOnpԲ"Y4YA&CE2>BQ%#gȇ7הO#SS[ɿډMx"%^P<*4X=r9 S> \3iYdp'=t' M_pϷ6# E?&$#ޜ4L27)P10u&zŚ@Y>h.ǘ)3QMe:9L>ӕIzvGKçtaF`RݜPK +L>4@13org/antlr/runtime/debug/Profiler$ProfileStats.class]S1O`oEoL+ܧ6t7N_yr[8ɓMz69ɯ?~cx=,/rR@MB-`x,9(]{#OIIp+cGĞ)>i%A+GVo'`[*vuLL,*M4ֹe\c+ۈ 5/ڶŒ:Vg~[xLfQFӑ #`2ldA#kx&KB{} nb j-Q-i X>d+aؐu^֕:|Şͫ,( L^yY]b̬۰ܕZVSI:ARaTGh)D%u[SSmOg&Foۘ0״n +%JIEh FMŘdⶦ+egUZ+]1 heշWH G(;`_\WFlJ,m=|yS M͡McIif%;nP%Lwsצ} +k9x%`V09ٯkӿe O=1fd13a c3'XdN13̛Ysy\df.1ww+{Lau9;o遇Gچ>@[&6aPK +L>*R45org/antlr/runtime/debug/BlankDebugEventListener.class[SFς/hhB6iH/Ă$MHC­(uIjt7Y:X}L:={R>Hg`~$t+pX_*Ydc8ڌӃ,QqBoǾ}3XJ߰Z;}i(E h/;[Ǐwds͒r*6԰KE'Fw9w`1>"Ec!="k,@Ud*=cަ|(Jܤ';%0YOg /Cp$o֏0T GKVC42,S2J!jNb4sƏyE0>scSSv0 +-2}D͆A:aHrfJwgɳS*Gӯ'$-ù+q s=6a%/NNV>]xlJh&s_\0h_Nh4` xq_vxRqQc1 ]-헽Oq~|XGW8 5Fn}AqKil߶5E鹃8(JU` @JB}ݝOhvފ3BЯlÜ*n=Â3#wLTxF +@8"yuEp&E+]/w{~ ~? {J ~ 5|4~\%xI9v~pQO5 sj~/aK9-X#  ;iA\OmPܴk9ܲWgq6/KV$;vcA,a)XRZ-HiyRa2kzwo^Q9#' O=2{;]/5sTj@}T*۽xr>q +xykcsmU;;@$org/antlr/runtime/debug/Tracer.classTRA==NF4Ap5YqXTp(oNHF&38/'}U- TY R+cLaz肩cIljNp +uIAGog|.).Ml Aې%0:pEU68l=Ui{A 4-/ ist%pP [+?!+R`sHV]viɔ)۝Gpyʸrj ')Pʫ3Wzb٦k{U܎W#kc 6ôWehl=}Wŵ|FOTH1#(϶^5ЃvvqÀe]+ ?65p#Tu}ƞv%wA+?!>r1C}8$ +hGYZn$D+EF~X ^1V|? x'w '$Hϴq(0@{&Dm+e v7uS$( ZK-\&E!uRW +<NS +<$@>EUFQ"5ž#itgd4[@ǤpUyt[%1OV\"-PK +L>/a0org/antlr/runtime/debug/DebugEventListener.class}T[S@> rNmĥP&<ə4u6 _r<,C+ɗ\={n~ a7l7/5Uo34 \ѵ[⢻`@(vE| #\\c$L5Z"`Vl2(謽 &uY.yLig+dzlZ;giT<6۲DWT]([%"!ڧ˩OQFU13`Rw.|Vj`}Ŋ^zK-sz+xj]z-so*)u~Oƿ0$(9-N\3O! +b _6]:>a7;:y:6ґccD|9*3org/antlr/runtime/debug/DebugEventSocketProxy.classYy`uOcv5X@H!@Vbč KYvf+p|$8cرII7nخ%bunq4Mfvvf{oVh?RJW_)Eo0M~Ke?৷w00I p ?{5i:9 +)A,"ӂ4<=Hx8$d~ƳJ\@*sT j?) _ 5x]4DFYh8,,5nѸUiL1kBw w44+=ȫxuZɰ^4zQ iF27ɰYsko2Nw +9_qҸ[J#2ɰW}4xk4>:?G<4{mvܲu۷ܳ;ul$I &H*e3FMExu}X-idz|d$9}(FwdC&U"jͦ!r`0܊`=۟g4vKIܡCFڈhj(H3wCmP'nKѶ91zlGbl +]ȳihðɤ%ǓFƋ +˦}tG*L' &RHb_$wkѓ=0p-2n=f$S'N)@*H299DRJ;sّ\5"ʳL5gL^!v DLZ45 7M:,7ha|wp _QW"Y +j _#`G9je@O謘8Z1aWj~a6jf"9:[>φov:1!@~~C;wuO nz|2|D65+*싗BݖN +& t^ 3Rtl*o\+ +ӢobSƿ_2%. 3No%dJ??CG'Vml!Q 3:bG +♬X n-45"Qq7̒߿jܒJپHwj~R5~!ә:{NlԛwkE‘Dp5ÍfHK$`##4-SJveS +D.sXJs"kf8L)GWcPK5#':@]8H&E),8bcԚXX)nHzO*;F to2~4gHڎvFvyՊ) +݇0slM5 .F;{iE뽺cMɤ|O~I 6%zjh!%)ED:JLi鑎s0E=ܨ'D7-ꉾ0x[1oW`T䄛CrJVbޏ1'& :n0f5%xY>G'p@MJ`☺pa.1m4Mu5?CA?fa|(OtDv KhVr+A;t +A#M: ,qR ]fKTV,E4!m- ݱ%!{CYH -Wf?$D8 ZXP|`]2qbjQum>82k \c]p-ur"ʎ*u>1:+)q$kRun:QqKOMI / ӢZiqWBxFL124=gjRu:g.Οg,1KRVe)rE})~ḑ̷OYN*ap{g/,)60$.*c`57›Y}M +}'upߧY}cp0)O+.6!хg`:.>&x٤g92b~A{ OiQqX~֥A_T/e% }"oƊsyWK[pV5Wұ,mq;]BKQr H0k/i1}Zͼ@E/z_;4gkg +AtMOh[PuoTՋn,v{+dq +hB +/[apx7!u߂MK;ElB@2SݴXE:W'i]',)MW7 w .7Qើ4?B3w"86kcue+;zfuT7~YLX7aeyT]{@#o oAKm't0r$aGZݳلx?oRxPL//d쒾Юf0ׅ`/s?h8DWۢVR~}Fm Q9t}9IդPPE?_XRPK +L>ysj+org/antlr/runtime/debug/DebugEventHub.classmpUǟ/͒IJC_rsoЀmMK#!IhiV{t4EƂhyтhyт`Aepjd2Π3'tAO89 iɮٳ;?gß^ k8=1:SE /qr/F_JW9| pxCup#Mqos8qOpxSNsx3Yq~yz +];Owl=ً +Ul ޤPg״ri[Vɰ̊L)q۸0^7׆ +EYPmnR!#t&^SKw-Έ}PB(e@V9?h + i+ieNge* VږptΧ\mFnå=2h҅BڮE_ 5HbfW vUқ6DNpp/T?P8jBӋi8k pnwBDR>%x8582g<\1jQ,Ġ9*0y(Q9 +w٬0ܼAZ=]7b` 9fTamB8#r1kѼy)l!Œb]a a+AmsH7y:vb ~ (AEqФ3( DV &%6LѕY(CC +(vu[awfFfˌvsG =eZN;Y%y +}*G 'IDisL"qy&fN}A=D,՜e-fsvFNJ5n9v̒ > #cgUi +iٲғ|T^PJ?t)\1ҧiJ7rL[T氝MCK> [iJ{ƐUǭ, wPNnrS*q EJ4$tBno?>'Y㐋#B +89aОKS+ __W"vj jJ'% +)|"ih9Q'b= Q-_kгIA먋HP }-dPW{暹5Sq[ 5Uu;uW.>VOڍkT[\[Oմa֜rn<'x3]GxS ' +NS+[)Wr}1´4V&k-jC{-\ýeT%F0>re6vg.E >_tص.1\Rg-v ]ڦiThoEhJ化DI4vBbh)ڊ>^@>G}ms HP4dk&xjL0O rC9\^rid* r]%&!5,NI{џ\8' h_4wC 1rӀk0Pq݋8@sX~'~X= t~{ +/NH(: !CNq@T ahcXdxc, M3@`+4睹<4j?C+ng_-T!yM(Q ӷ,Wzrϣ C,Kl}L`0>llϻ8>}^rO 4:^!WI+EK\tsq_#5j z ހFXF)yQQP?-W}oѻ3J]mtꜗ:˽G!3@V/?!Ϳ :j/@PR@V@JjW T v8UjC{RPdVuV t@WE]e V{-]#薂n })};JwtO + +_i[H0M5-BS9*SSJ))+$&do$Cʄ]E׏]3Q?PK +L>s9org/antlr/runtime/debug/Profiler$DecisionDescriptor.classQNA9ÁM"!G !! 20;zwOGY. Xr]]ݭڧ>4hɌw/cL@{%2%Q/']Sگ[n|&0|aR]ā~Ǣu9HJzi*o9wEgTH;Y͝NHLJi2gɿ_.XaǼ'ECmVdJҰF \x3E&$:vNӾYVnD"*\nRź_$ +>2徉٭Ҹ1h Lڿ*XP 0g! fq*| +PqeU޼㤆9#U[/Xr_,16WG,bPK +L>KmG| .org/antlr/runtime/debug/ParseTreeBuilder.classUkSW~r[@åbXk50Zz-׊@uIa!M7/~tisT9 g{>Ï10> QR7c[jYc%(#w"b  c5r,R\WgV86F1@lf.{~|937>%ltѴ ϵyIǮx-Ūbq3s}gƺK{ٔvE`_JƪxJc˅Ĭ@˶KɝN. &؂SusrRoS*Nz&8*л}<=wq\`Pյ5Ņ`pPf |8cTPT|f$cl_cV?0]{XnjsJm*ri+JY.K?qxHԣF\ o9-4ḧ޽ٖ:{XrI*bLԪuS~iI:O|cBBkwՀqC{Jmʶa/ېk:'zoh/0zzhv>C8E9B9J9` /ťz"4RþO!8Sdw0i60o1hAҴm4}g [{ A⏺N}wV^+鱍 $ĢL5?N8cL>ǁlxj)"L&$\82 +;T)q)[4{Y +|B>wl9:W8Hw*(gue53>t0&tn6Z۠ L_:]Ưj8apH-P^,_CoZWRPVc;I]Oqnz< ? !݁Y!r}|\4cC/M﵀_o7} 64d[7] D Wx,j6@dhB]NGm&/68Tw89W!;YB?sPK +L>2Z=&org/antlr/runtime/debug/Profiler.class;y|3M  !'nB`)SE6ɗd!ٍ@<<o*hj/ūVZGjkͼ]񫟼{f{KГ0Kؔ/b@n\ivc (MHViuc=e ڤiwz5 ҹ<09_ {anM2ك[ppHmn{:5DK32\\N^WɪøצC1Nzix np7 ˃x^Bq{4C0.[hxOE}T={PJpzXzHO<<,Uaw@tg!<,I ? 1n'SQg\g4|Vã"s}^4|Q×4e"^ʿU o4/S5 ^ׅ|SG4oK( !wyOf? n{_$P4C<ycDSYi>HP:S + Wo4P#(E#FiH(]#Fy(S=5aF5\QF}4QF4 + h4ţ\)M=px| hNC= !HFi/TBs@$ +V(2HD dYsH}8 &ZhGQ̪uFZo6@q +(#Q0ܦ#@{V\zX [dAmeu9kg.>gneUłY!EQ +72pi- z][cikN5Uzl*bE6Nֆʠ%BN +e鋘$е-h.]G@cm jN=0yEp~ze.Q)s8D{DENZ RVy":ȦBsYMMo0mIn@tNY?C2-'D$f,JgW" 'MQUzXMʏ5@5WF[Mcו~;56eRiS@t:BJ襼C 1ł:=\g&@W)\}+WfRSHg=D*¨F U=]7~ 'j%8U|B2 9ԘoRYoj I2ͬSl7p? }w% vL8jNiltCJ #I SR6\ȀP[sl+ja*TBMd9x9 482i215Ј+c:]2%+A-ZtKMZt0(Í5м5&aOQxp2LTUA_yB࿨h1t8(٬Ohti#5m|u:D(p1JxB)OQ|޳:!|Nrēm;, Čq2!M6bPl Ezw/ nC(eIkQQ^5rOLyp6>Ĕ~N/GII|8W- '&\M0ߊȵ0 j:p5Clglۢ +1+٭2Fz8 + =Loo\Nf9JOo1z^׺e=^IG3r1%ՎukudP!?W/i.KxX +|)]&Er/]A3YGmIDWd']ɡmEx(Yu +6a╘YlElo".V].=EINhǞSyW8&$bXk\/Y.1jRMd|k Þt6SBڐm`Bl!Ba$gVJ}0@lQs0^YR.{:`ǸQsYLEY +Mٵw$'c؁|3pm Mz;!u (7H 'o V]L09ɪA 'E$Ij _{nuY^91> s|yps18f]my\-j_W!犨WC5!h +,U@8 *`ħ gq'jTi`ɠr-/1:EֱPޏM} +O_ 5;vcQT5>3 NPs3x ıM#YaZGQ䕛6'M*DP?`$qVE-KEP}ĹKbHZ'J=gqv9"\$%quz*HktŢ ٚPϔNK<"~yv $^b늝<&0(< | + %Ȃ +>]7G4gs% yKyH@`vtBJap8]7wNHu=nFnfvBOnNȊ!n_fwB}Jp; zp{=KzFȄ-lݭ,EP +۠ C\ +5΄+a \ 5ffof2zeMQv)j*JOLeX.XVA?A,`Na 妈`P7,l{ ' TtFɬH:1?pTÝsn%ϐɖ|5-#=XzLuMr  һ`/,6'}~/e2fkLCcN0lLFqFpS #<*+Gyau<MC A + 2( ydKR NevS TB7GS#VN0݂+E7<^RTOw^[KLKyXZbI$Tc?C[[[aJUbK=My,sCka"7c+A8Q_B/xyCmA{dSlv#ybDw" &uéAKldZlӲʸ+2;?UӔAs\J7L'vp}R3'5f9G,-'< snYZ;6ǝ Kb~Yʼn>r%*:h>Wэ.|^q|3\'u/.{1C,p"|,'~^d3.q8 T|8!`#2pKc{DkNAGH!aEcI<6d\'esYENUu +rGiEY|I>g2 :vdu9cF/|ҟ%r \Xj6ull#1tC={1 +sS =S[CZdoCՒDcwm: } ۶jjMɖՅ'6ZZo&k)So_Γ5pcvG b2sHLPjα8- 5饝˳~1{Ole1 +qo}ֶۡHe2ecnp+A[4Q/h8Q^#M1=eQbl[lSX`@o&-yw sv8'`Z,2#kṭxO{)2çS*;sޒN6820ZYKR(`%{ʃRhFهfهEl)4V:g1;$hԆ1U1U v0U! 2h-ru¹ \Z q ڥ0A.x]9Q(/g-E(RcB #-w曕Щ&kt=!eݷqo`D7T\2KV[!VǾiuO&f~0GRCD&&ΙfKl' +=<4aIY3'Lz#A,SɡcwC2ۡQ0yEU{Bw#Ma0nB +M9[NC՛j^Y8;ԡp7Pk|\ V넍6Y霻 |oXF'l^lJcTTkɭbN&՝3g]p3u5'`[db]v\eW;P +uh1Q06Y*4!%p ;Rѻ,c+9/xX8&8=L)|3_ű0gC5gcGLtƑOKd?⣎MfU}/gI ¬9c +.RA9c[ܺ h =0?u.0yk0 +p}{74UĤ#iha}s7RX9N|N0%kM@Z7\|^166O`Yjg,KM=)A^8ڋ:řI?j002$ur FuÍu6~)eb'o6v m7wX;-x]65OmMng~ς%-xA< ^o?(o;7.4NnhzR_@~0)f@GPC`j M4B 460Ni8L#`i1ohj4{OqMb:'dFaaM%4Ϧi@ӱf` +vjŝt.^Oa"x/EAjNj'< /*]F3mi3~F[kJtyievʣ4.b&ҥFeZ lh#%)W#qGFō:F]q| "$/.8ۏHOC$ ';SfhF RTISO8Bc)') +x~ RhY? Y?q& ėka8ww( +ՐGp +:Git̤t ͰvYt ӭnvO9tFwt|MwL >G{q*ݏV~AAz/R-5cyK|q+.@}y'JY#gK_aSfƢ U*X>ygAaJqʠ„MEC B_:dI=Z(\2 Uoztro2tg{S 0~Nye{/*R^_pTe+PK +L>Wd .org/antlr/runtime/debug/DebugTokenStream.classVKsUnd&4!W>'HQLC (t2^\p*]RVa!Uk\U +;0 hsOLc/l?5vC4KĬlD̋-- ڂhh"D2\aP-&%QNW&J^rU\YJL۶O8evmBުyOҒ^=Zu|* +BhI[= +!9:0}sI!>ZgMkOUgmoƚ-1Q,ϑ}h NY!~l/۾6?(ui&JnL +ß YI\* +}A807O Emλ]cbF+Yߘ8 +d_f4ͼ Yn}i$&'=h[e)qٶdqsF̠7GþlS:H`3eIɶgѺmɢq@Wؖi)'Y-Mǯ~L=8duaUfx1 ؇WL+OO℉8Dm11"v LǰԛȊvĨlD36I71ISq^!#ω_Mxzvўc}kF`dDמ3{ujW KS]K\2\Irܹ0ڕ* +#\ +*Z \~(w!AӘF 6hM8( +:&=̽5!!QRz-ШۂSjBc2I b'0G2ĪhU-Hf20n`="e3~Gd)3gr2C*L_O LA$8 G}/X7L9>is\y q'ؗp3"&"$zYM ~Tj]ΐwW mYqlYvk$&K8?c3uTsTr|NNBStT1]syVqF ~.~NsK_6^aܫ{q HH88]PBU~===ld{ ߾ʼMFd :@\!ER B*BmбW{MRnN[g-jq]beB*5DtXuȇ.Ud<^wy##Gk?ץy 31S3hݠlew~dXNfؾ_5?v5uC~ `g"|a*B8" +t[Z?ʻ+w5_&3tME;6=߆?4p59/PK +L>+X4org/antlr/runtime/debug/Profiler$DecisionEvent.classRNA=W3Ì"x.Mb\i  Ja2tWX(fEsn_Nxa#&a>Tښ[~s:}#tư,$Z7]e.88Fg:gKB.%;QSU~š +=DzdiKo&.lW9^d~9ГϽFׄ'OO3}PP c mt7E{s7c6 ,lR~kMVp'6ad Ku+3?&JaO]Ĕ" ŒB;BtjxK5uwŋ>c +s!ȸ'ؙ` BaG{bb][b>Z?PK +L>Lmq(org/antlr/runtime/ANTLRInputStream.classoAƿ)@Z*PJ.t16mCjR[mً$xe|3[9̛y|{~ E =܏Aec0b0Dw=H|ƠB9,ҪQP%%G"2Ea"0GDjc(,-VGBVpҴW(J[KE(u#GI +ݯJ;#]|n RKF@tcYAe +F3{0B24#wid +uA$!$BA#b'p FD(ECr'org/antlr/runtime/ANTLRFileStream.classTmOP~C‹qb4`Pԭ̚њ/Ua?vL{ssnp2fJ󂊋LIqIe )LAôW%Ԯwѵ *RN^6mL2[3Wqk3Yu? $^=%ǵ6*V}i.s!0RiA7-7p6msayTlܶ6ɭ-/s*h:'݊WEƺgUN8\r_nSٶO>!n#d*L47L\,1Ɔ=GkvmVK:5U0ҟ +:1 ż>1OfX8v@J-):ncQqu,a^`#D9~3yz[LqP"xu|̳ʍY YNvviz,xs8O9ZQZ} HE8"& o /Ka2zh 5 PK +L>jWI )org/antlr/runtime/ANTLRStringStream.classUIpez2 34!dcAf@D ’ aI'$f=lByYj2ـ|HLp3;a8YΑ0gcf9+&'&/f$'ݐ̬ +! J46JvFItC3ot>מKڛf٬XC +B2 SAUrX;5t96u +"ʹӲhNHy0>K[T<ږOL QF֟Z[3Ҳv7P'`2FQYѕDյ,cne AH"Xo[l١wn +Z0siC7u sfA.KK=.L˺M{qoO +afq q 0C"C?F0')Y7􌮙f'l:fߴ裂>/u!`Xc)yhP8k+#|NOT) w ƀak^+t9F["h-oRR,Qs'T,=*NbŲU,C´Փ IOᔊ6lR?%PqgT,$UM#bc%˿BhM8;p'r +[yYť=7( ZysWR"-*'к>~T1w]~|n%cwDŽ;R{\n:Ψ4}\텏3`nbJb  2 \G 0LJ`5`+V > IJq0M"S +c*vqve o!Z= 1;êۨ +ws铥oD#B0;PY)"#îvɼ@2kT\Gs"}y|'B1mrse&0z%t9,,S3gnhCu)cT}pNi&ǩ! Ka^,OO[YHKe7{=Ggk6 @TQa{EԀPܖH5T)70ղ.s_B59f%P+#NFm}BNa Sײ+Y7_^|G<zBԽ>!z2^Y|Ps,-|$O|ݫvIF'nO4vsdX/M疾ACF5~^e|\MMheI⳹L6Ò7(b_Ƌ {Zn.tJDYנ&u _ORUgC>ycTT|4v]D$1*M%+qlb6RP'vt1wCV@Cs!M`(f5 dXlT6͏efwYgOe;\nz3X]:L_nnubJnj,Me/P'Ś]aF}j߿PK +L>*n~#org/antlr/runtime/CommonToken.classVsSU^אKeH@iU +hEH_b殨ྀ8V_t;.NwcCVlc;ec@PD<ڮQ|9Q>ywlkX̥)eG bN1Ձz8O)W +͌&X]Z,{V8\EU,3JS,v hrxU慐kvÓFuw9텉e6ݕ-݂lb!"/zMK w64!$=ŧ'$=Ƶ޸N'Ԝ}o?덝jlet{9@TPjA}\GGHg9fb 0M ;s]G&@!!lppRGdt눤2fMZBuDS-:))(7HtDmp)%R]nIRU%C!PH*THjRHVx䟦h-kT+wiR}Ix oEtz2z@c;UB‚ₚf +-((Aߐ8϶hˈQм<0_z9e۱`'ǃ8Okj~'3jfrK#ja&*xn h.Z9)16ӈD&DUj{Dk4S1 +\Y";&1D˘K#Gy p@~fR4<%udQٶ_Ӊn5VL|Җ_cڲi~㧴3 x67bWD~"'DvDNt"<"}^7>M*OmmR{D~"ghsGQ-O䜖^=Om.6  |%OO$'ml@#Ÿm"1DK?Ke37hJzbnǘZ\My/KX0=qǒtĢq,M'$`YbIpRc$.6s~vw1, Iq"x 19M}ǘwDe#8k=?O{K^iR_x [j)PK +L>x>]k.org/antlr/runtime/UnwantedTokenException.classT]OA=C?viW`hRX[h0iR@NvYoW!%j⫉?xgwQMܽsΝ3noV$U=K05bQGAKXTٲ ++:,*]q'ᾆ uG8r!bw:g8umﵩ5k{zQ0*״l!۞t:[.k+Wrfy,dUzq~5&+3ILrGhB7KISllmT ~CU G~3enzxb` dc0(Dqy o(aB_:[1xpv3A(GizEj6} 3P"GIO,yQ'Vr GJpb/0MU'CrZ!S<zV`,-,l2V5:n h:F&s&zwB'B[_}sX*'}#QQSIE#{3Dk"B1@o(e +iZ%kآ$z:PA7q˿cPK +L>AD)0org/antlr/runtime/MismatchedRangeException.classRmOA~rVTJT)ףpU?`ѤIEc4^rݻ[JiM(BE3<;?}`uQQNsJUiT4*m)dkXPװ,,6KIÐy _>eHZ=ZAGnOxnύ|eO)y2[As\! y\+zőCbb5h +ٖw[/¡d(5Aa_ +V:}zљx)Bo/}q颾6T)q).org/antlr/runtime/MismatchedSetException.classS]OA=.+AAE)ۥ"F&U#ۡ ig7DD?xg(B|̝s?̯)1,H= +5Z־F,3i}%daDmדQO*P 2τCҮ0^-P MyEz{/z= +ԑ8ap&4x#Nx:yD Sq&Ԥj{m=^ZȰd2T@8>]{=ɶ;h篅>i7u ׵41YvzGW2org/antlr/runtime/SerializedGrammar$TokenRef.classRN1=&a )BĢUA &6qWDЏ:P!A9OP@۽-bJn'org/antlr/runtime/CharStreamState.classPJ1}n@уz0-^ +¢J5)DҬIQbyo -8M1JqFW BQlF/tTj ˫̋g6ϼR\%2= cWk,vlw +ޔ;qGJZepo68!a40z&#Fk#1лKG >;q탨PK +L>O8(0*org/antlr/runtime/TokenRewriteStream.classZ{|T՝;3yH&O<DH#Ndp2'^PZ|*2tA$cDKq#nvR2w`wa/7Tl>S`ې`T8?S*c +(7{)8 jH{OZNQ3= +T9[ TڵsCN*/)ꦊ:2X9;Dn7]* +p,TB[4QxCХ#Q KTyQ MI1*=Y-Um*n+Jū*=vu*\ ; r|a| x_)Az-noR[xƛܥ-x+Ԑgx;c$B6]n+ 7k`K Q4.Pmx6^y%8@(\C7x5`kA8VL*qxީ‡ q(C_[ {VfBAqo.=/VƎHx ,UA'⮞otИWu+=\&;% ʸ䝃4+$yiI1!pDDmة%}@+dC-Up3|[(˄nUwM!Ð2?wef#B<%tؔ[ yiD9ÐvɖLx?(6nnHib{F|)I#1Y$ 1:)&ý;(;.C6ec ;.-FE}d''jm'رS8%0RCN}䉙22I ?BOG3eN{DrnaK{ +$KF)dU%ox+f+ (9_^&X:*Ht>l9Qq>%}0*5rL8.p°%щo[) k70>WR$oޱ*O7.DB>CZCv(]RE۔6e)dO"'}0lO!?L7p~nu#myC~W㑮@ɺc +WS*px8 gj30 +f#QCa +"A䉉~\w str/.ԳG޲$_W)R!!kUνI 2…Bg$ڗ%A=MwQC)B;a~ɿ s#0שE}P_6ePq>wbm =0T=saVm׳A088Ʊ>1FLfT!Ѓp-f +k:0o$z5Yn =0!|M3>ܒ؆ fHX.9":!ȢDFbKCDapA40J7 \>KQ2K1$%s'zpR!p nJJ=wz6|7.:p1I g6VX\f"Db(-$;DCnSn!!]ĴAf#NZiI?f,SNth̺c`!oW3ÙU ϳxcj14 >بDM^֨9BDvV" Խ3v<"6ih53{Q.֕>`j!YFcES⾺U`|xڔxAG7:!T7"/Zp-FF. (˚'ͣVZ` k?ˤGquިclaNͻrmfE\*qsa.}Y^n {O@u<HY~!ZXDެid Ǐs޴5K=<'%t'xA<1?'.I8f`Ւ6FD0?5[S&04Z ږr=kV +;@7!*M P7WA]mǶ!2 vy6h>" |#tRr:7*wx]s_̅AJ?,j]1q&vW[{8[6&Na* 7:p;J{jxp<1 /M*'vƩxv= 2ᤶTPp: O`^|Qܨ~P9qdґ̔ {%ޠ{ՔPUz~^=Ҕe+e=qf>K? Oʦ'4C.W9r`:jţAXjJxԈp!CH9\(9^g7_A7C BEź /Z;dNo]Mفð\ϙdIΏ,ŢqT| F`e*|.רƽWp3OG>wQp9# 㟩}?O~I|?+B'b<.Uii6%Ju$dEK;$tHHNa)MzJJ^R4RvK9()WIy9(J+1f)[ +OK.i|4Q'M +dT43/`3 +~MU*TX7 +0W5uэ{ +EkS}j*oyh").TQ(Xĵ_Hk'p,-!-~6i,} w4\#`D֋9zܠ (wt0)~1ȝ4fWTTm+ Mt\UǪrCTNV%M1tܤ/t6}OЗrBO:dTxz]!߹J4•߸٢%|Ɗpɸ˫s\{lN+ɵqMHCKȌi=L|%͆|i0]54^.o~X<"xް{ (}M[e_5|;=zU~hNEOs ϤJpp}i%y-J@ΔVXզ&ӭ6t;̍]}O>Ȥ~T ne֯$}_g^6|Lߨ$hw<*\tQK]HPK +L>>1org/antlr/runtime/SerializedGrammar$RuleRef.classRnA=\j[[0@KZ f&>Cd%Wi"Qw&νsϹgo_t2<- {:,82u@'0dT.F/+j_ȹBK/b^sVf*TҼeh5bCדJ\0Jx8Ģ|SX پ@)!_,]fp}&9j°lf|RGz$I;kKqG.pQCA.312 ]S5Z’$V{m٨ңKIG KwHe<$ܥKy>] >Z!-i\HXID=<^"%`ւg-#OdQ$Jg~O𘢵cPK +L>f% )org/antlr/runtime/SerializedGrammar.classW{S&IӠ@p+mzc:)RumWVtis +4$' Y2qL6ɼ`xFЩsws{؟0'Io'{^>\ZӇoa񘎟¨K|3<ExJ}(1aЄe􌌎{y/x l'sRX˲+/^)9k~Uk>,Ǩ^Zi/ޔqƇ,89u:  :.נwf9I$d[H̔oLOkܫ- +ۺk(c7FbWc%vm0Jl#a4oս-ܻ'թAFv cn;@%7vR6K ճM%[;b 3=<`&{Q4}F2&VrW#'6{LRc薤1[ݡ{ʧ}+(; +?*=Rwc:5RFU6܋[#=TsȳnR-1E+Lte'1o K3X1x;T +E <4,.]B<$l*Rnss +춋NJuQz2 a5vͿ{1{ M?0FPluLsXΤCܗzE~v]UI?,Y`_Ⱦfi̊NW| de>ไU[sy$# + $`R *RJ P˛S5GFf-Wꚦ0/_@m,>jcT7S].g٣&t\ICE@MVgv(G(ǏPPGS\!&^.qeᰛT[)1Mw,op&ȯ@Tyy-rwӣ<iO ,S>NYO@Iz(WX{8e オy-eȊ|<5{yAEp5nT*.- \ИOd))wU]ϖbk`ake;ʖd6۝ldM`m>c1wq0s; c&]}cTEc(sBW9Ԏc&Jg$?Kܞ7o__O-NqU^4o,.Boq_%tkE*Z|O+jJi]#*Kne˰y^zעx4D{8bs/akT&fRVZ_ySiڿ‘R%*vkT20+&f+~CJL‡&pY:jTJ:ǁ\e(^ipZt}J>| YLчX 주ZD:I&o6$a_PK +L>63mR#6&org/antlr/runtime/BaseRecognizer.classZ xT?罙̛c IdLA"Y0 &Ff3 .նF[ -֥ ZkZZ[kk];y3~osggyc?@DhctiS GX[>j6o78 yqO 7npO7ipJK''6 -~(Jy\ek w3_o ~o!QՇGQy|L'|IQ\2I?%,Oc@[F,ƟD7^^;>|/b +%>N{t^a>(4>2~ˎo" ~@~amw #L[[6inܼq971Xxzm8gǏ?4&wMֶu8SYSkSgSCmm6wt6,[ica*]޸aMsζUlhmmlTiVu3km\٧jmhid*i,3\ ǷuL^@L[L3mu@M%H̪k+dp鐭)ZGҋk<=09Zb]V3Dw86YGRLS +4gPi ޤ~X8ݽx +±M`DVzuu]fuE]xo+hk"Mbt* +,Du%C-hm#*e2QC)8 HJ4waәařvqզAH|[RpY-V*1N[gA% +,O P:HaM'9#Pڤ][B+Cդ;hlwL}&0Mѣ2<O7JLm DsɿRxIGIүM!=0}[ZI_Ã&?&H/4W@q[S ;]&?C74PQ[TP<= (*XJd 7(O.Am9x`yO'lMURC4kr<[`8%"]ݴY5ݑ\F'"p`̙3gDo't頂 ѹC9 p˖c7D͠|M( ZdEqlxL('WN"˦6Bij58 J P+X$ݧJ\_0SnITP眂Py@yk +za;xSv$L35%5s%qRtB,&Aj57S)CsPu'oPieppkQv +po/=4ؙli`uȴ~7bњ3ͱ7m!2ApU'H#!*F:RoV$>95Rm|=iFQ4DQ+˺?:Э#ؚT:2,W\:=]-p* y)*lt'zqi @Ahҁ@!EZ~)oɶ]DzAS'ü}ΠBGZIo?ɧQi=6Q +p4GFm+`p'n}CG *p*.q"Y:9]}Wis&H7St31}30%g~+ər|O|/.HAq5 %̖ՇIp{s2#I i-;13m +ݥ8t}T~ Tlup G]C_sq? T+\}@c郸aXZK.jK +RRRetJ h-߳Ź ++dx6+an0.j.}O]s 8_|:4H[C8bBf""C72(i U ReG<CAq>@FA@q=H K?7N[ 8g@`9=˲*eU%¼ؼm7crXqP>cǺ,s9q=H汣7:3R4'gBĦytͫLz`钠[`-85Bt\--v- IL:HkAH>O8iK[Uv}G8nxxy)~BvP k3ksTh_b)5zWqmq.cdɭȫW7Xjc;V܎`ܜZpM1'-u-;Be&6^qV\hWb 8Hɷ xx>9aӍ^n|c{tcH1 p=/inٗC-rz'HZml 1h/%kd!\!vM}6B^ID|s մ/Y94bh+f|"vvZ>q5._C9eV'3ϵ5l`ld/I-8H]s F>쉻h4o|U +qfspM85Msq%T^%im}=vt릯pWB|sQj}.ݟG"O='Rk# +Rq̦~Le`N]0mmi)JpVrHW/ڕRmdJ%uUZ)xMDURvԂ4WJ iԀwj!t1LCŋ*HZe771Ue# tEXRh b]s\8e.2ܓ ' #rr~e|}N4|vxt[ǞD2ʠԐoVRSx1u*Nsڇŕ:ʊϡ0WYwfZ"nTÍM444<ĐU(J: A2=IER54'y)f?j+AgςΚj\ K6j)*[D'|L{ {[ӑhhv?M#|hZA7MJr:Wu(lN@c͇) YRYO{ +{49 29_vp~ =/o V}Oa/UUBk$=H}Tf|gɮAڭgNo騐z5Z"Pi=Ҟ_O!> >,"i<-^@-h/jep^q(_`Sݔ. +DG* Gh7%h;;Z3=Y+W KDQGuv9H!zG~C]SrދXv1uvgX\*ϧi|䀫sչN`<\568'Wod>m;%ɨ3}A2_hWd>Pξ>獴"7`^awD rT>BSݱتR2tZWP +_HK&Z_Di[h~qNdJdtRICNLucʀ E+ta!}i4܃pizo6Rfa>WԵ43N̹Vu9<ˁy`Ї2t/iwAxJޏF +dLeq{t-]E*}sq?N`)p/9n׍Gbc'g' e%*zi~9mS9%\p W+*?PK +L>0org/antlr/runtime/FailedPredicateException.classSn@=8 +\1HĥRTUmUǭ 26'h#QuI+^ffggΞ9׷geeV`aU;55Qr_ZTЩey(H0؃wB ,ѰHRHgbOe '2Sg0^62[@{ `}J$|Q^G oP{u$qD^:3_Gq|ԟf=QT+2Jr}!)EÂә/6}0[u ѴBa,P K9`(u?u,-dX.~< S++Ԭ$Q@̐{*U *==҃"$!_"{VyFUtpU`43S9'jۿPK +L>`gKP*org/antlr/runtime/EarlyExitException.classQ]KA=}R&neM2ΆBЏBQsgν yXаC +[T2f(/&" +QKfHP scH]FPꈐiw^?L^E2ńމBHWơ#<+zPs?Pۡ*ɽQS;D8L|7ϯ6RY~{ldӰc# D˴[GCZտF_`ѿк-<\S ٔ iV org/antlr/runtime/Lexer.classX xTdY $8W$l0%ljC#LZkJ. Zw!+.֥նvum{ /57{9{ga'?qp@wpw.wc ;e%2a ]WeaC~@ +@dxc<.'-?^/]f=9a6ox_߉~/?z'/^|($t{2Ãz*d#m QXSy(h"pY>oC(,3C-rf#, +yFS!:1kZ֙кfWGB+BMwbC80jÌK .<R! _`Ɩl\%L,;.BIH3&勘F3PPA"ˌw~x+F!&9 Ԅ-Dqi(dVv4pTh9;~`í +̈́6'OקuSuQHHEb fm/?$C|d9‘&l SхN[6LX6PD֯WZ|$?jkÅ*_WDP5P zWہ7WUYt*:hBFCm,Ƶ"֩aWED(zʍX2JmE-'Yk3uqiV]sDe& &8Tb͛*;½[D|T&G@k/ws qxgR [CUQ&MIB fusj_ښ h{GN66I|bh~Xҫ_'2l`VX@V{oBXah2p .dpsUP+d< +EmiFtDB(Kʅ-"L Pg9ǠTsn[q6 &ͣj<7O 2`H";PC-0C WjiUHftnb-IQ PCŒ UF&L#̎V!AJ w|SL)Dz?jP"N8v¸DBE<-̢(.+++) 4Gpc Ms&VtqEgR槴w3h&ƯphYE:CU㘧gTg:7"CBZEM4x呋& 43-ϔDM3MM+ ]a;dn(n`j.kQrCRx*u47*L'K1Z9yhHdr Oc婰nf1:6Oj:W?FVUeSu=FиsU/[Txm}ݢZAI37ۃ9 Ky,B %nևR gKOZ yU$֓zIöxۺ-0#*Hi5ihqcUhՠ$g5 >Kjm5#M X"!-*ڣw K>qEݺH+<6ne\җ;&8iSvr{4ꡰ?]~fx6WpfEvp/N.HbKȰ9, Sp X$R0Isfaۜyenp^d92-0'$4`'LbWAJ%~^+(;0@nd@( ;eV{9kٵ\{6l)I`S)t E +1_3031p 4̥)|ER0-h!Lэ +:Oֹ Y'=w7ӎPeǕbHui uc  pB;/vs7uaN wva<^vt %as?Ww[ϲ~̲>Qz-1b~QC/~pSK]by3WW'Vo -.>~zc=@!|ylċhƫ\MÈD j„yn XEXaEfFbMia[I˝ܜGMfyAE2b)D6p ciAd|yy2s=v$Y{ZTohL8on_꠲o O{/|ah=QV׫^kY*n@v$ŀnM.rudG+Mm$_lcfNWy+mi.E2|wiQ_lkf[2¥]eoܽ;h>( NfM~Rԡ.pgfۻK`tbz UK$fƺ 9zm +kE\ȇ%\Ĺljr2:v +i %ur;Y_RUVPS$h#G'%Bׁb x)qƓTAקeuPT +$@>bYp zc#.ZlԪxNB==&?{^__/ȯH:ěLoGNx&s)^VZ2.e`iA X88]G0H;rF2_ع)+ڑJ'vƤgu4]TJs0}GJ^rlO2)|Gv.kJ>k-~ @-}E[0|^RN)]8`iލ)FNTEI;5E[ћV#NPP;$-83h&hVΰ&ln$V` HWpt:Ƽ3ӅK WNN9nw@(?`RTCH zuVc//6zB턾o{^)#$C3z?݀ű*|>ݛp5=5w%0to:{'%kzNȯ☓IESz9PN)j +P 5s,xNǚd\3yiI{5-q}4UMkqF=[ +avIv.׊7gB]b FX*XZ8y/d|[8I-T4Gm˅:;Uy~έn4SkZ?u_PK +L>c -org/antlr/runtime/UnbufferedTokenStream.classU]SV=W¡nBơ$G~$P7@LƅTo[HrLC;9?*ӽ82hf%ݻٳҋ~.VHcU5a$Qz + ^1|! )ֿ!؊ʐrn3\8nh~-]۷ڼX=qXbФa73}Ӷyatٲ-0}LnA]sLŲF]nլh%]qfkt-,1"ZCF/rML6?-0:+O/#SG,pqhfK=qRMW5<[ݬ=u_ +~m!oթV]| =U*Uqjڦuɯmybqlcyhf}^D_NGvuAV4uSœd;:ǜձHoH N߶n>olvkc#J+5iu}Ĩr/cp i4"^ :8CI=TQC>%:OcEz{.l,{EXꟴ`$F$E$REqܓq @>4A]:/fϠcR>B!14 "@_}af0>e|NDqW#D%O`q"$>kb(߁ALY=!Bpx"Xzl]y4ɧIp g %\=@T~I'U*DN#Iax%"MGwkK 8Z dzb~Z|J,Av|R;ę$B2?햴ؓ~8 ~xh7swb4X D"PK +L>$#org/antlr/runtime/TokenSource.classu10 EKiib3!BeE)m*E)l C!v`‹߷ߟ Y~K{cCX7*!qaч{L+Ζ| ޮ]Z% +gQ^k@A&|$_<_+.]B Dńb1Oi|PK +L>ݽf +,org/antlr/runtime/RecognitionException.classVkoU~fiwu)re ťܖ +t;m,) + +(55&&$`'yg- ssޟ?n8Z$J"xIv =>H) s}F!:,G"(~"='1پBVPH+E88|Ua€8T1T2츫`Evt8mθ嚣F[rnpƄ%AٵG KANJZv`irX?etkmO߰vyy1-O^{tMJXIBf{]CNրM# +wTPK#rNJxnSà;d)h*~#mZ^]i#+]];_ a1:zڕLnM9|-똶Wz?saK/;Zh4`Z)(o,wYvB:>J-4܃1eÌKއN6Bu k؄4z-F0&4V4jE WVwp,kh#p40WŸ8 -QA=n;Q Ljx/L?"HN [A2ON =G\diJ4/05"OR(u@l}2Jxʨ{qSp +ty?֦/sf"f[ݔ|E ʸPRdRs$[Jr R񻙻\ x0W Mp? lÞMؚ7/<1_E ϡHUB<P0X +JQ߄e!\B$mMrQ],rX4n0Ab0 ,q> >p01380^]$i+_@5kS,cPX CP*[rR<+s`[0hB|c-8Z|ZžUo k%cEiT ݚ74}kf5U-A]GvJ*6\_2ߺM,gӈS[P^^)cbPxp;R {rT}5=x ]xR8ϾNIG.Pm"%=>%|Lw|?_&QV;dx +kJoȈQ15.g+';J> BJCsQȧNZ'hYF I",RE>\H"Dɸǜ-sse\ď_pSKxKs56`/@s(?Y|?PK +L>iX org/antlr/runtime/BitSet.classW{pTf !d <>ZQD!!A6{fF[R(jM}JN;uڱ?}ttڱv:c3wfhfry|8w矾]AoGhb ^,Gc>a~C6/OpGO<;ϰzC.?&9{ͧe]~qËzC/ ړ{ّ֘o3 sut4vc}ս7zAW| gSd֘YU7r v:̔٤ҼVZ~{:` 2DͧD{:kWr5m]t9M;zOg +>qbd`"F" tRV@hJJYeMP{02 MKS +A9)A L9d1'hYg]RS Es枍f28hN@'kM7Kf{2z`\B1<JCg[R=fnB<թ|bA#|+1nŴN⢃5Le{ f.o ݛ2YxD@z"50U3@hXS6@1`L@!,!ò;g νtFvE0h73y}]j1lS˛s9}G6eddB;x08yGO4a trYRZ0oH˛;l>dƜ8 +L.:j/l&wsz,-z1]] ,|-H_Yȥ\Gʭ["ejJ?% +~JUz ޽Du>72C'tӝO7&?}um.y~zN7-"i*f:\VϬLhА{ّ͝GOt:쇒!EV1Q"ڿy\g צ|ԅ'h2 qq:/ahF wqަhU ]hʻ&5z+tqsu*1<[K}(ⱀJ^/΄ eڨ QanS[V$Leb U+( Eb)-Wrq5k],.n-t mdҲV7B,|'xJXYt?Cu D +tׇkuPK +L>Є|9-org/antlr/runtime/MissingTokenException.classT]OA=Cv-BiZфBb˲vNM70Qȃ?xg"i;s9{fE0+?:*DDiAp_<h 1]*M,L54 D`(%Qs3.pG~"?W8X~ B4T#{Z2!U, MLI!YI# U:="'(~G1brqY9B\KtW k(!y,9pSw)r#IQV`a>oPK +L>zOBorg/antlr/runtime/tree/BufferedTreeNodeStream$StreamIterator.classT]OA=vQ>Ru[E(`D0M*&BHɥL˒vn s|L Me^mvsΝ;`i ݃4lQ +U:f` 6Lxd`cUbH[^02}Ys\֥#" %Ϋf%\d+ʞxU /cȓ_c(g,{/7\uB2e\<ueEP +tC_2X%!\Ai}jFFQƖ,ݐAHWDI/o;SwEy+4 +R73W%Oy^.>-tB-ti<13<3e<5n +=_0 U\!0ײoHLIФ= ]AЫ;ucWaHF~|VH}n3d KG~UK3(DN%e@(fԈ@}t>fNUvF +GHCh J-"9\;EN币,2ĭj*@]1 _`_kNHSQ3'1AħaNQHɻ4{T?Gk{` H%'唣zF6ȆhDz&}FޞiČ F(ԦPK +L>&.org/antlr/runtime/tree/TreeVisitorAction.classeN;@Je 4~%w,<22$zءȱȱ$z6MyjSNFﭩe_M%BAŏ\By}ecnvPnsN( Q5V[ "IM@ydQGPK +L>O D-org/antlr/runtime/tree/DOTTreeGenerator.classWwƖ4%N$Pc˲EIgs֖ddI3Feh 4m)[B ͆me)m}378=>o}ᄏ|UspeT c5pHJV:Q{G}TwThpn1W;3OpY), +xY{&M>sW"Dr6id ϩ9c'J ݕ!ֺm4JaϞƦ4W$ؕ\No7k4Lm mAzlp㴮tiXuY cyX5t$4].ghȱ'=2+``8<.HX05{JmK΋@C5W2`!\'񛸛cSB4`t,Ɖ8k-~\!>Ʃ#{ nqŲdA⏬O6&?dE$I9XЇU5r/w8%] t@h`c9eq$cby}:S1*#W4U4h±]/]1HG|a4\_o:V.-VɈ!3;ږ-O芔7OʛASEE6xX&}= '(K/C³J} UNL&GUg )eY'D&ؒ 65&X:'F"΋D% xRczm]7S4]O[uLt]P-nK%*+,34!ʘ)|jnH6lڒUo0{UuzN&g^XVRK[ZbnN'@ͨ?evDjvNqTߨ? yiYSTOL}?dTOy5lW +|]k^JD;?Qa)B|鶠Qķ +ۀZeHQ]F$itx&̥bq_7S$2BЕlIEe%_q'B#,qTG$ŽS'ąiSΧZ`n V/LQ Fnd0#+B hM4`^-2r2YJa)Q9Eiwx3c128s')OяXgIy TP()inrt#td%X]TxJ}*%˛I:d!m|P^%N#jb>YPגMeD˸9d[j ծdI`RRQ[ORiPE2uѺB8G|= %ai+r\&ԟG^x"z|bPfF+ +!rj}88;Dϱc8EvH"I\ k(1E6 }!NJZV#KQ֎ Ɍi%C# T24RY?CT/HG?Nْn8sjBb@$l1 LW(*Q43qpB)ܗ5H!U> +'ט"b}O}"Umqk(br}D/}Dqc"wvߤmgds܁/&4Lt, ?}Fc=-T$!R[STrmK#QB}NJ@d9 )XAbkjFцLܗ_+7~$AAj_S d Xz xrmX2V]DXA.hh/N9f PK +L>3uo 'org/antlr/runtime/tree/TreeFilter.classVSU]YˣXT6WC!Pƀald!ٍM_gF!Sqn]IL=yι瞽EMb _0 +1t EΒ:V%䰮a ؐGz,=*F3cn.AW2ra()0l Dʞ+.ڶ馊zbV;lvsM$0v|Rtg }z994<Ci*rzN9缰5Һo=t<)Uˁje[=L#'g֘ğl3S-mY$g zqMw-`1KW$.W=d[E"bN޶~0lAw\="IrmPnm"[u|/&[w˚,+Nו10/x|g )!qʵquѾKFq/y:7H )ir&0&9h:":6:{PHwJk1- +{P%#cǗxı:{i82 |HJȮD>گPuɁ?ϩw>.LENgD24ف{s!Nsr ^rfq ncNcG|#HǸ*KF 60Hoq |N\ mu >5q(Ma D']D}K߬x]O0ELm̗ca9FCPK +L>^x)org/antlr/runtime/tree/TreeWizard$2.classTNA-]Zւ^؊j44)jR ̴eNM| 0xf5n3t@O7`TN"8(Zka=݌Z _Y0 񲫠R/<DT=J㷕[фwњ|ff}!Q{tKh ucAΆÝS&t\d)2LzAdO?c˭2^S0 V[uK*{ʘ =Kud 5BjyQ ]:Kqo2m9^ms_(M6ZmVTSB7. 70eGؚ4 %;cyH,tkZ6qF)C8ocm:`"}"6Zxk{>S52I~=߆0^ sOcB4&QL^o1zGs8GU Ɇye_xPK +L>޸8;\+org/antlr/runtime/tree/TreeNodeStream.class}RMO@7(z)ē11iBzK--. ?΃?e|m5ip WG8org/antlr/runtime/tree/RewriteCardinalityException.classKK1OXֶ+]}#NUuJi{#ӌUA\tL /$  +)Ӏ=aϨ+͛Ky-]_jm[/jN5org/antlr/runtime/tree/RewriteRuleSubtreeStream.classTRA=;a& r ("(" PEYLPˆz}'_ʒ?{DP&3}OO a!j13 q:üR2\e0p=(nD&[t 1mv-GmKݱW sƎ'[SC7 kC_qlemL f9)PV̸^5MکM糆f؊m3lq|r]-@(!yjimi9ՎWGC;K[2x+RuU$Ukө%;oBr) H@0t'W.E|%Y9x-Ǎp#5,C3C Lj[Х65CS a0.iH!!(AD^^%S4zDoMֵ | ÏY\ϒn/IZCX24o('/ڐ̦2s3yoPY?)v'S%UvT\ׅQ !x(d/MAeuqzj+^(<.7mӮ"D_ Fi8ɢ_)o7E?3ge!S0\:$TlɏM&? 54ESs H;}T{؎ +mQzVpZ~|-ߍ +9<ЋE "CXZ%VTҘZ6dOhl] (#fB~Sˑ\L>R)O5VRHJɟPK +L>Mf&,org/antlr/runtime/tree/TreeFilter$fptr.classOAjA6sqC!qiͬJQKuEU`AfLH0}"d?9CYEn%E%O0·J>lM;h޳Ui! /rj4 'org/antlr/runtime/tree/CommonTree.classUmSer*EQT\Kj +Bi(" /`.Y>Ʀ79SNa#8LӧWK]ǃyv뺞mlt)VDqy[I?S|4Έ9+fPs 1({Ļ E9n23,ߍ\⍕@\c\A65CACi3֔aZ_^)(2FiJD6'ul2ci`}Ҵt91atkxLώ,m > ܲy@ATv351Y,wjLv c6#S:膃M+7)~7,/ L|djA]S;GwѳUyR(Ojκ[k$#L:1F}m:2Vm4 wfAu_ [5".jTw~Vs/5p1n U9e0'L%Qf|QO{^jvTI\DsSئB3a.S/m$6bSkє:1lDWXiđMb&*%_ 18&h1rqIL*%! +eyPz4' +vD )vͲLKfA&r,_ndzRDS/yb;4 p*l=s6*ai)J1^81tY4 +EC/"s뜧"su=时EHLXL1CL[C4`7VT`jIg10u Um}hY3̗yF^ ( +IX}Xjfla]2#o+=ő͓qޓPzj+%) 8XS5T H3B;%NT (yߢ^U}Ń2B)t +pOͣ.?C Ey.J> e忍,W[LԖ9,,cv'ǧg-s[8/p_?k7nV3tBqgpMTM~OipE +Gʽ?؂WE.!/ ŊŒ< dT=.org/antlr/runtime/tree/TreeRewriter$fptr.classNAjA͎`.N9J!|`\ed3+I žT]U# +5!dh5Fu?篜wVVynM)ֳL[0R4w>nO_82+M]sM-BsyU8$/O PBh# *iA?`vPK +L>g 'org/antlr/runtime/tree/TreeParser.classWy|e~d7LmӒ4- J)4[& i2Mf'%T*(QRz$ r( *(x +yl$dfz'> ՌVJqP;r[>(@|K`g!w]^wbW>ĝ wcړ'.y- =!LtoAA<(DA<"G8$P xJ=Pφ#<I ^ ~KxY.? rz%Wez]!7xK2_עQ-k8^A(XL31cV.Q9Q\֤?p&sY3mUMMSoMvY˝ +Jl4Dz +Ei'6}Ev4A=%uI*#DVc[VU3/V-3N!y󣉨@!R:LkRFEV}G|e74EL5lNJAi,5 md["ze7m4m1KO 0s\䱭E0sKEG[gm, 7%;ތӺqHBn[aV©6mON&*KVMVCYSdaRQתh*Eͽj M&wXB̫ظ$4YxiXXdZ[-dېŒ*L\7V'c0[v'i+*ӻNʼn- +(/ږHRdAql2viO&zeqG+u]UZ +qPTQXnI{eZh֣z孲R)-=u4R]l@c=s4z]Ǵhq7/< D[-i2A3bt&9('3V^}uxl'}DN"P@g$~yY;)s[c5V5g ;BN"*#gTLz-8W0ey6x؀6շ M^ұa???-0MT4'οĿ \o>C)xP(-Qd +U|Q],yQP~FenXҏ8"HPA*L;D7T>* + eF$1M5PcT@,j*TpM,fv[GC0h4ٌE[S[Y]\Y)W0Mӷ+G`T& + 5IM6uCM5ѵf-g9Vxn]%ĜhX2V7jJL4TK7FcKof֤OC`@ùkv^LیM[cedg=’|MbCRN&$&nɔNǚDf%GA74j9yأ!У$O6ao( +S-wM%cH;@rf6Dxnc2SN1jq-p0>>qiEe@Y,3$@׈غFg{];1|%?(B j9>,zUֺAiԤҕcHߛ''9򺖫=醊u!n-o#!y1q5>@S"Ui2,w#S\QMyZhqz{Fh+T(ȩl㊯kB} /}4XBYB,ڌ4Mɕ#/R>I!tm>3 yZaiweD釵Y32vCNKϒ 6"JXRP :˗>\+NZۍEȻ0r,Qtat|/P] +yEy0v"70ۀtTb1deZEƐ!Bőf.DjȩL0Rps؜6]N>?F$~ eUd\ރlOS&*ܝ rnEm/oSaQf "]&oU;<$Gݚ{d瑮>MpjGA&h$C8N's5{ s>NǯR܀N|cN6`'Bs2 l-r^}]lF䵓%1~tVIo(|s4%˥ђ +W*|M6En1ώ,K^oC0}ϙ 0org/antlr/runtime/tree/TreeRuleReturnScope.classPMK@}ۦI[ZkB[x<CܒndR8I=9xo0?_pچn u8hN;^2|/(v6 +Ùw=@g =9٩TX3eu=W&RZisDr0S˭ز6+êQۘuarV1 р/0]}>؀GL1x+mF,c1/#~GuK<_PK +L>O,6org/antlr/runtime/tree/TreeWizard$ContextVisitor.class +@EH4>AƵAP^uu#M?D A ̽?7}||TDEA&Rhib9 bӰ"0[+̈́2E]/S}T 4N[acԑK5qۣ8>UV&7Na;28"4ZxBPG!M9_C앟PK +L>/-6org/antlr/runtime/tree/RewriteEarlyExitException.classNB1OU@Q.M4&^ba n\a_. )*<+>eƟ.ڙ9_ +(c@ @y4(tӑdu6*{+oqH=|,; +ũG8m#G9Tt,9ՓV{3rY-݌壌4,qwPZMqjX +j7@f6QW[So@뿍t˄=x~SBg!J9˼W +G +\A T+sp P- >aSPK +L>1n&8org/antlr/runtime/tree/RewriteEmptyStreamException.classPMK@}cZ-zJ/+o!$ݨY8EP܅ٙ7 :bK!8gî{dY6Y`))6tU7z ITcm@\,)mkr8.(v(ˎ-Ąt1|Li4= +2}~ʦtɵ/5 +_O@)[|Ѕ'\%W%R $J+s{Xls +Ah7PK +L>y{~ .org/antlr/runtime/tree/CommonTreeAdaptor.classUKsEFZIQ,b`88vB,Xya`^ZZ;rdX}pPp*(.98HTQp U(z]I37_?Fp#7a.4܌a!(bs3y{lg[t="2PNe@8ۿ ͒!-WkK5/Uh&3kʂny&5N.00kZ+9jW^kFζ #7<Jz6zXvvU*zu%wmi(gSEmP7`XM߬zIPD/#قjζBoٙ߬AQ`='Q(de\S2uÖ^-Q *ͥ5@ۺeu۬)?"x$e0jO0.4 xB|'ԯ8)= i$ӆv۩m#t#H[h#{*u<I, p`^9;U ;E C%/L8' +IZcE(4 BH <esEiGugu> n:5\7u$AP;-[b>ٳ#OƧÅ>ń%+C6:B>>#Ɵ{ |=++rhH[z_W-PBWbd2aI{~MgƣF͸jdp{-org/antlr/runtime/tree/TreePatternLexer.classTOUv;+m(Kj(vY-|Im,]f0[>Tm҄b47_1`|1Ͻ3Uj$s9{9__~ Di\`oಔ^IJa)  Wtn@TZuL˙1KJgFذ}ߨLNf hTzlZ rٜ pN56[1G)d1%*$h +y~ z, UAaeUt -3.U%GEKU=mΗ$RΛ.J)bqϖBd'E±HL2%+%֜m8ߪ[, Bd\b(3;D 8&B#:^i :ht Yg%|c:r'tϓysK"CR / ,W@lT4d=wBEN:[ +'K&q Ϡ geáqKne|1>Vc^gB>:|:| +ye!~;Nj*Q` xftN1g|cv4;;Ќv6j|'x=^{9U\ft?72$IgK76,?6pwݷArq%D6Т JW6wP|}=Ue > p瀷gO[xXս  \'9waת{B2J^Qmtzb>hMn_‡|['UTeu[~+E sG *u/7<  I)w f+7w\ßAK I`R3 yjX +tԇ" b2MX&+d +ZG}5uz -;Nw{븀x _P3f ';~[t<& ƚp5-ߊ <'MJ< PK +L>qR +5org/antlr/runtime/tree/RewriteRuleElementStream.classUmoU~fvnW*(b;"H* +kY$}2M+jMЄAE93{{;wG`'}byA$MeyH@=c0#IcHs,I4,FM 'I1 %𒆚l-] ZSȫTOC9ۅ̑s*P풆`9̀Sxqics*e]gs1y.y*a)Ohl(݌[.xΤxR4=t;۫awkD[) MNA 'Ǖ{+Xud~4s~? rT@B){lG_h'JWJ$pU%ŘNY\NI*O)EOp4ZN,;?-n4WҰyAY4G!9J(9l'b-&,yXv#\~m&6k`b#1(6f۱D;2 ,_NihTui)x^3v؍g4gMNyY%^5n)yǛ:~g4d|y˗6_D/jY]uc?ܿL|U!̐#?=q4IhA4vֺLOd3 Ү:م׉㒓G.ezf—X7-;>U@Cxs'3Pb)k ݤfz ڍ+hFa+=4}}'(YHcu>+TR!85z!OLHRRETe4BY h)d^ؙqo-X!qF]:pũ*꟒gMc%PT3L+Rgk75 +[j廪VR=eT CRE SQJ*,j|PK=bNK \-K* _"a\*ŧ#bu(q;ž^Hnwx~兾KU"TKs 13.-U}޿PK +L>ds1org/antlr/runtime/tree/CommonTreeNodeStream.classV{W~'.!) ׆ lTX +.ibt=$ 3, *`[E{S[녪jU\D|<g;3l<˞]?G%`|[q!m8 /J K`."uQeㇲ}udxoM~$?nOOx'<_}$?+__b %,ޗda q W t<=0͎e2#tvlF~.{ +#rOgc#' D] eOZgeO?&<甲:d n[|Qٰ*HјJ"K)^x!O%kV@BD_Үklb* Yq^.3@k+2КjRW5^P[n^Qo*OJfBS,:2Dتrh`WRγUeE"ݲkմ7PPEe=a4xԘ/XKQ;3I;ЧnU%xSq'CVQ;$ȵLb)wdk)];¼%` +U%EOF&%R3aɳiqn-USN✷sj%M+x* ?/\e j&?M%fz$j){N2xaQnԊhX9:8#gK6zdrʉՅCiˆU\'sʚRV4W؄ƃyuae] t߷ab27b"GM<&v ]K&vc$&b';&>s&?815e<4^p˨]Jh`eLO, </;6NT$(YT8Tߓ:h*,kM8tpgȐI#v:!_.@$Uv>h/Jgד( a$$ܻK`8{BHcm?*5X*V`XbPc:m +ZN]RB ;z6Y.ghJ.x`5g6ZzJҮ.暯ot@Vlأ;hѝUklXS0HbB3s]&66F[lio;|ʺIS4U^I4W,gDTӭ#U_E^Э+;QY[̘fsdFh)O_3xlۘ4Nޛ.jY[l2PK +L>p[I &org/antlr/runtime/tree/ParseTree.classU]WU7 IWh(-ڒ/h@EiChUILX }rR|.t-to}'}̹w9}ν_~0:p0x5 0&{*[js:AF={d1qSYw?!|,* E=/n[zٕMjhF>/, T}*QHga舝L_LR9k2Wy"y} `qU/,薡f0ԗ-Ziݴ VڪTmK}*yZ++@J)1ui?/gܵ|f 3m:AџuΘyXF3KŒZ>U@$s~uѰ7N,0HoF&*kkbIɪԣ7ŌY5/#g%w!vZvCdXV崡mPhƜNӐFSkx?yڸΣ+O4,ಆE<K!<j#_gx֐qNq=8Gdr|YနTBL։ ꥒ4yS'99^;[a*eE/O9K K:x TWh %n\ೇiz$^B$?÷Dg|&y/x M:E.1;'}x͙r"\AggɡU9E׌i7. qkք#FsoX!U >x63Ӟl[n?ߠWY#MN6!mm}CM=!,!QUA5?{W?zH( p?֣=hߣO={hNEhZ'^L*EOԗ.Zj!"+K1xǰ |ΪuaI + xuJ{?Ԃ 9LuΘ:j{rQi=ZQm0FlZC(Bun\Ŧ5&?8Ǹ +q^cמP8}0;}DL"цYY28n9&} B?fmʫz??PK +L>`)org/antlr/runtime/tree/TreeWizard$1.classRMo@}6IMZII *zhJ6l宥&+$*~@ 1Dt%μ}<'}UaukcG:YC(|㑑YܕyF{ +# ebXzse22HF v ?rR\*i^0b(r!org/antlr/runtime/tree/Tree.classSkOA=ãX(T n}~hS0bXR?hGL̽=xC<,$i2Xnm\u7ֶ66QthJ.2{IY4x"a4$D C$x<#Щ<>?8;VmsbsX~=/22ma|rMŴ2\+ =txCiżz.t0خ{:I-2%OS֣\H/f|rBrόڽ\r<;Zs:މ8.Nk۽x;JDݼ%.[YC6B:5@rvR/ΩZ㺷Rfd' 2PAVXV*hY@%j"iYxu B)kcwZ}VWzYc=Tr)f;tY)LɥSa@8 +K50qy o + .hES7yEqEUS/md7pS-6lBjt0ܡ~^W ~']zA}oPK +L>F8qr>org/antlr/runtime/tree/TreeWizard$TreePatternTreeAdaptor.classRJBA]s4O,ЃuznBHSgdz + +>*ڣBPk=症'X0IS̸u1ǐehvbB!^T5.RvU˼Q%SR*\6"l3dZcn҆{52J3v[ntDi¦ }K: o-}-E 濐)K! +~e?6D`uxa["w::}WBkV1\4O/`zY.! Mu + 8}{0{n;I6-#0O;H>9[G +d{Ɛ.~3 C~WPK +L> ת+org/antlr/runtime/tree/TreeRewriter$3.classRN1=&K6n -P EзV}ATZ $F;zH\$>xAї4c\&Vp0aŜyu鉤ɰV:Щ49Is-  )f3{QǜNNLH0> + 8LNj۔S\kgaЭ{J&.>POa9comX6%+X%BvcK>\|xhXFz+7 Q'POFSa}.Mz-a)GhBNQƨ~:@0HR^]C$>+Zkc:Hr~vPhv㨑E+nPz^$!(`*6MzJU$tXPK +L>V\3org/antlr/runtime/tree/TreeWizard$TreePattern.classR[OANo.+ +( +mϖh$m06CY6өA&&?pfۈ˹}q1x"yYdžMVSq;ȈPjE ZFW'bؖ'!Vf{P&ҁ)eЎ?HU2+ fQ#u[t"iaCZ̘pHkk)1R+B)%'+ MY_LOQ,\ yO%,+ЊGP>-܎<\#,ltt$-6ؤ~)org/antlr/runtime/tree/TreeFilter$3.classR]KA=c6d5F~)tK_B!moi2 ?%Y ā{g{{go.lco<]XuP0C4mA(MJ#F<0KPĆ}R4CnDDԏB +acܭRǜMG}a?&VQB-l|:40I Y|]cutjurngF(xGqG:鶢懓GHȁb9PpQꣁ*e0X  ?ᑡM&~MmOH hj}ec_*9Kk ;z:R!g` Lr^\7C쳽BE[y +v˔i5BBy ["?KV!*Z+nPK +L>g규)org/antlr/runtime/tree/TreeFilter$2.classRJ1=k][V +" +BUP}mdMJ6Y*P3ɜs23ٷWkXNLxä).f!|EzJ@W<0ܾ [ ;i܈B +ð9j"$?j]U> 1!<_cKc %׻q$ו֩XGNfFi9OUKG6V\ CBdD!7 Us1E·4g8z#CkOhM|nHiwc m\5u#1C?E'M7KVў]dAqWOs[.p~zKjP,I+?=!R[FKc!d ~a>WlPK +L>rF2)org/antlr/runtime/tree/TreeWizard$3.classUNAݶ].o EJ/lԤ I Fi;KY;%+0P3[(FZ7sΞwfJxG3X4@Ia>n%b]w2 +gX{j*۱"CVPvԦj+%<='ċޗު}_P0k,.Y^S*{GX-~ǽz?%K{o+c_ujP1E>3)1K +Çlۙd0VݺPR2qL c9lj]a R1{Z 3^h>cᬦ}'ͨ9S77OTVBwc}^c>Z $ 4C?) 1=&1BUs$o +-r¹a#O((e)5I Z)X}F İ<|A#3򅱶ɹ54,Q6 +η$\"&  L!pYAt9<}䋔A IiI:/FAPK +L>Ul)org/antlr/runtime/tree/TreeRewriter.classWwU^ft([AQRH']7h +R&t I+"ZWw-j[xIQ7!{s}]˟񋂑1t`4Ɲ()S"Ř+haa"%H*')Wy1,DŽ\< R烵؂<,p1<.O(8&'p\S +AYIЄ\<sx^ 1lFZh x/^k +^W7#YPZd\ ̍ʍΘ]q_lL9`111-7t8y5nj.r;SzֵyrSEAյ){:ToL8k8%/e ۮk峇F,#;Lt%^NM2|-tQ״״ذЇ3Yk'̠roQj'!VV6 +l@I;mWw $Z*l>+Ixa}\Vca#P/tcZg;@JDH[ xI#+ 5R?Y:9J@D3J)#Ē +l +pCqa2hB(z/ղe Lnw =buLΤbR1S~R}<ЮX1[3_NcJ//͕v*P-9Qq%bMV[ Vщ +P] +Nx؆**5*nMPcڕ>Pqv^ T|U|])_+wfieb/nVq;hp>.hץ}_W-* +S=~X}.,Jʏ<2TLᴊ;tC[wqZ +4UD`C~e|p^eU!Y\ԥ ۲-֓,7J.۷`^yp-:1HYyRR5ƍ #]؎׸##:{H3_.e׸?g؈V5^3F&oą]9e8s +dêiw"bdx*H\pEG|D(W3€\eEn< |^v5²E +ˎ졐?vaoQ¯E7M*K,f:͍*\1}lc?oNR ؅OaJ[pOx1<t:w#1Qz~a8N=WnBw[lICIFe]]͹9ܽ$ 'ք=;)Ck½-ݭ\>яT@M( +4!5!5(u uq-3R ɺsG魞K,츧M-| {]G0w( +)o`RsSaPK +L>_C(org/antlr/runtime/tree/TreeAdaptor.classTr0]BhrPq:Kg:%xSXF| JTn?g쮤_-xb# (y@Ccqgg;:zCda/9JlFmY9gBU=!:Cq(Bzs +aK_1@0 4 &}{ O'5xa˙iZ}~}U%ot eefMb2KKV-'JGUC-ܼCjrJG"er&OW]>CNeOy)S_\Yc'(nx%z-O{QNHd$5#QmYYw)G5N#;-;_D;tVL +4U{, #yOx;7'~Ւ[JK@ɷ$CI U)<ugaA",)lhsWª/(.eWk:P75n@S -w޿ljmx[p_fN?PK +L>#e6 ,org/antlr/runtime/tree/BaseTreeAdaptor.classW]tTWdB2@ dXCmH +3 `m207V[[*T6%1D J>>Z>AyR}$$ɗ3g|{{_zFB; sL #υ*  exD/e32<&!|O _dxR1圲4ZӋMT IfUGKOY[-g}R5d56";d7)m:` +.B-@+6Ԡ[(dv+O552>5ƙ_ͶQ|AJD^`| +*&paN;FGƝsM1B"1s3qgg(Td(zp+l{ Am,6d]~eU9l6X[Cp5] +UvbMq>3x6>z%وa%ȟqi0?DdUf,"i{9 %A+(Ȓݥ"ORs},I/g~ziC5<'ÍedmWQ%R*fY14`Da:F{+=e%CR\tLvX"sn,j!y˸qSX_W?ˤWD8>ĄZd6♍01<&F5U=/FMx$V \uJn&QKd̔s=ˑfq453TۼCBõ0NJchBȵrKW)0fbetyj#ZdT*U5=| +L_}Vܲ~8.]|+>JW+~>ʛZچ2 w'񎭝.qSX_sl7ڒI 0^q7 "tڻ0•΅2flD5l1[ aJ kv:? ~Of~zk=yQ_SE?UZK; E]K^ o&6p7N$Z|\+yb۪Φڿjq0σ]|\4?/ };?3柄,>F8O0&sܓ!/O)/PK +L>Us|3org/antlr/runtime/tree/RewriteRuleTokenStream.classTOP=o)oJEdccTQPY $'v^?K#Q%{{yO_Ʋ4Ѝ.zpGTS0`!l9XaX̕]0a{;ªsCx{dVkfC^|h6 t]YAiz!ȭq-^ޞY餯VM{,d0oSN,2;org/antlr/runtime/tree/TreeWizard$WildcardTreePattern.classQMK@kbj4Պ4^C  -i,nd_y<QKP) f ;C15Die[Nfn"mj"3V egR7wR;K$!(-þ4觌l Ү0*U#Bd L)/hBLkiک$̏rQO=|kO=4 ae66y`xU0N8W +l215fрquyj1puw^A/9-]"APhQn!}b PK +L>Oc2org/antlr/runtime/tree/RewriteRuleNodeStream.classO@ƟcDAŊ+~b00!.#$'sN<ҵYQ:q &ow}޷gKm#k&L(ڰQ0CXabĐy }Ws=ײ%\pw( %org/antlr/runtime/tree/BaseTree.classX{pT}d7 XB$M(ᑀI! MMw7<ڢ[ jI#@[т֩ckkkδisݻKds;9k]X=p7}qaǃ<ub xD?*|<&j)}8W|<#~'s.y#W^|$?Cv.'^WU'{ЅYxDRȉ!9G\o :91 8;#pB oKE-dj@΅X$PZQVkZ"1mE_Oh G9SE׆mLRݑx6KEX*ңզVJjm_1"P1]zFBӥJisYEfwz2DAEHSxIrQ$ERm;z)HIM0/ͥ)N,e4vaC"rѥ N/+ŠJ-0-LMh>ܑXRKwqb źjWo:t#7EfJKB%j$z-Ȥ116bXX_i7+5 +7K:d*ޛ=1^ĺ.ok vL4iGLfX\=aґB)S% .F+"Q+J:Z[ + $Έ_6 +;;9g"_QlOr;-c~lQec4{z1#szBVvKm\ҭ2L8[z&3PrICih@{JEO eƞT|M*AꮜX +q}G,5uvj 8 MݡĪx2c1}ޙp%:"奫VdB,B48S~`j,eуoƘQbNl{+eCӛ~J-*g"2MMXHyrՃ5X̠43B%z. NTjW'% S=pP4^cj+K_-I$B;ZÅeҮكs}{pn!GȾ&h@KzB~m{/{6~)0drNu҅if.Nr{܆U"7j~J0k.Yhdx맓bd~`e [MhU٣ 'p9^-FphM^|y4biUtvB ßM&.i3\̯s|9` .7U#N&,[2Xf0h2s`bP "\MTW`9=SCx6 ~Dʇy:B+7q)+ha& +SIm{AvN)~qIZCȡN]BDj 3uj4|PNIV\V9O"՛o7 X; JYބz0۵2JV_t-žzUh w6,_AE/p }4$B,ټX$?|!x,XgnJo !߂6x?Wa `\?u9^JԄ`d@_ބ +hNRMcEE=d.⬕I<x#,'q2Bp#/1mߨi#e#SgO)Tq圅uuFYJUe/P=Bqf-",p+ XvK8;`t`+3E*zW*CI-o a^EttlUEy8Xܚ3@ݔNJt7cu]t/}= iVu L9#4t.q!Ov8-ϊ@d)P6~N<ɴ8@!>G +F巤jYϊZh0St735cj>O^Bj"O5':dNʟ]#^)^))6٪9Q!etmSU#'*ku}Xjs׹sx]F0:=yt<2}i%v7qu_YdҷsG,:7IJЫ \ʦϕYAgh;Ck &9z1M'wKrAQ?(Ϣ#,3P51r%;s+6-:$k) wX\eyb1x[#qG3 D)eLw1|_/ZS1.` Bnʱ,gcGK:*jXM \en=-8Մ# VLXDe[%b9~ɲercqm=+ĸꤽB)a \sUN682ds'oqXZwN3iO9i?>_eqj%do(^XZ"tm>[>^$wZvʠLm <ŠBaCߕhN_/m10 H}GvTi:p=[M5tұ\(+TEJʊ`@ z "Hx (! +1skZܓQ-ueZcZcZiu |ufI9mҮ*3`#(@jU윤grZW8\yxH~C*WLYX(x%*Q#60'7b؄6BhG@аs7.f7vfΘћt~a,?;x3oQa7ؙ&PK +L>0+org/antlr/runtime/tree/TreeRewriter$2.classRN1=Nlv-B -@PߊQҶ٘İؑWT* GU/QZxx.7{pX{\,Xb; kҽ0&աJ#yh4~wx-MyOHa>34؛ N[u9d$$>TC/rǃm[6%/TeBqW]\ThXCzY(7 Q'POFSasOT&CU/a@hrN5jUKs&vvH|Gsu ֺGggxE:>&) e#e節_`w(+(5 )org/antlr/runtime/tree/TreeIterator.classTKoU=8ΤM$$-i3njh)Lӄ I/53c\*I܌+ï?<} ,Ȣf,KFqL2E<)[тHl6iMk1c55CzGثTO|.9Y9lrq{?hVUqXC;iBt +?kXt WT=Аd>Ƈ5rԛ,r$4,OikXa9JbPW \i[CkYY@s ZdiUkgOuR{AN dFyiR4?:@;dsv^sɮ&8\˦Vư>vR0ا25]˸!zpbPxCISx?<pZ'd{= (Aѣ$ !ĵ"-]&oBIzC8BB~CGPte=~d*T HBBCإ݁ШB= hUg g<$zx<>2{;1_ʼn'>lb 7"A運 C&rd$x ? RTEކ܇ocKDR#,l9UpB9F[ 8-LmRo DOsչ^H&^BӾu;?QEs~VػUhj*7/d1 +]w#{hU) jZ.VQnY5}>'~kޡ.n,l>(7wd'>a|$X'MS,#8Oy vǂ=B+}km?1G;U +/?PK +L>@i} C!'org/antlr/runtime/tree/TreeWizard.classZ |SN^7MS[(#G)PEP,"(JWp@h/%&5MyM's6'SPlŪNNsnMsonҒҔɹ/}ԳƋ<n ix;eӰ'SO.pyDãx$Tp<x<\cy2N O{0{r1grX9Io{0 /h8R|G./jxɃ{RyeyC ? ?'?LL5,K?WxSR>%_I5k>?HNo$bqhTW5[[V (45ԇ iWrӞ4V@[i V&u oI%sbp4/Ef7"?guxM-i*ɦKMKȲD,1YH%emdu/hD?_S8XL&ǜ룉Uz"R/:7$ )F뱋U]jm+xMxEOωՇl:"#O0MJn.:UfuqI3&hAE 3L=WkniK0d!H%O#Jk9 _31ۨ', HcY*<,LM6}HA_b^ȠHtU$e& |2Y0%È[oқh.+H#iXI2=+(٪`ԃ.=Ed$ڶB5=d03nNyK,.WNX:3%i,x9/$Gof$ޚP ޣ쾸UYfe9]B&Ya'Ғ[T&YLgIQ'OQbSxD|:βE}\L7t3$CKךf7fP˥<9!(`KUDy fVmaKCVqkb~KVeI5\FoV2Xlݶu4Xbf-pz%Exqa-^όNr8@v + IuLWDx,<'/fa/rXknOfG-;j + ­T&=/z7WI?qHZ Szy?//{.'^|2Yy?=䄆bܧç;S⃬\: + tpduiVlf|+\+4K&<^lgtE.  .`= oVQ|l}m LnI:27fA{9rԯ +ɒJ/85Ⱶr\RsRJgKy]4ru^:Tv-vεݑhbQ0mq~ssR =e*قX郞;]}Δ8 LHn@ii.ZiR^lm 4Ssҳr)n) sUM7xCa}/v u\1Uc#{>3dNL4v8dO!:ݲ-@ a>K46<' F^rpiA\y,x.2y֑F̷xBObܟ,M\&tb>ܸf17˝4ŦԚb>OjI_#'kn/4QBKT!>,.$;/ O]rx8bܘInd4hF }6L  X;U0bw߆Z6Lv'|.sSQeK7JS +#щR< x0M8”8JcC s<\}Afň22(-l4"wC +Hoc$B+y$efH"*o:҉!uchYؼH^b|_V˖*~KRF~D^ Jmf\*zN u`x%eD4-拑(tu+]~1]oc{O"X8')סZ :QVhӸ6&W˫,,a^G9`L +5l]cٺ*[XlYi>.٨|9$rʷh7R +( !Nl &g + U/eT-@㘻Q4r.Y'i$AiegѶi[%m;i9QTXT(-˖A-y-HUܢrfPC)Tni鶴p[ZM-dA  `R*Hڡ;7LVoj͗Ca1}hiY yds*vFt-2#ȸ%24Jj/;RΨ Q 0-hP#1+K,bXr\'¸^chNcXшC<;,V[0ɶuIAk#zG-f +M礹M߷=iy909|Di( AVQrHf5 +*  M'j +%lQгv +m`b =s4idݦe/)`9d EnMnݪD2Q<26⠁S;G#X ( N %YsHtE +|NP0eF : W@8 ],f`a؁Ib0C܏#v2qڸxI4l3org/antlr/runtime/tree/BufferedTreeNodeStream.classW x\uhi$ْ-YmyG2" `D$5c[4MY\Jd`YHh֦MۤmtKCsߛ Ho{9=缯/ #x2 +xW)mND%}A/j6DlQI/jVfbT."{/c*I_@BiE1Ⓗ|01=6@;!mC sݔPKgtn%>h'b#2\%To8f'' +owԹ5kcQ<'ht*zf6}>߳-܀. U6!]S.MLWunH" Z]d,l^њε6z- I8"K%lI XGIR]@0. ?pjVݢ㶈XݖF|H!%ú^nrNd*Ɛ + a2ʟ{ s-&ӵ0BJfpfD-Y1ؽɇ'WUQi&$F!jsi&,+"Ro|Y X;ˠ_p·t[9"pqv6}K(9ء7 2O.D&C'8ҚPS퓹ή{۬¸)ŤP0`0Ebab`F)sR+igRS:""oEqq@tKqŠ7*Q^QTÕxcTM`Q3(g"~bY=LGpջ`;X<vi"dshOx 3Trc4,g{h^u|PXI8fp,3VݽVh g@Y038"k99cP>(ŝ]8)y!W!\~}|6"6h7Djϔ=4$ +YהC*7gt2j !pNUq0>gcvKAhfਧN=m |Xch.ĘEpEBLHeekqapCBGPOuwJ͞a,渂9y9u1׫6M[ 4GݳD e +.KXJޭ0wk vo1Uǚ|#+Hq_Ќŋc2xrCкb!D؍^ϷT鄕,@.AD4J~:Wcr~AAz{uL6owUîVns{:**P+!$Q!!LHyyf3%CM^)B kmwE(1w*$D= }/\{w*wa 9DjhYyv{v|RR$ B sd@ Y$||_ w01YL Sъ@s/ i(n7NJGaKC4`~Bc~^Z;X͂Q,仅G .'ڋR̐yhk=ef/=^<|&A_S#ju=yNâBS©9 xN5T }]C\tt rc>蠴NI灬@VJÞ.=SSTNQCN قJy:;tw轞uRN.CeF~lY&yI5E>,fn|3yMQF.>ɨiNj*!IBTvh}%)_~!_P]~M,T&*KyR=AW/nai}^AJB?'t'!w~*HQxUjYV6PPG>JO7"/ry#ް[bU3&hiO UMeP1YIOyaccps]j.l +ӍcXVPJmX>I'298FDJNz:UqKg txݭ}iƅ3R};YZꨣ/KjCX\/.NsD܃Kx0gL^nznr(1tv1$O`Vay +G xJ| gH^/$~%k#_$R-/K򊬓out*ߔm-IɷeP#7|pu;3ݭ9f~m^* +za^=3U; ^vxΚar"W,qͮ/q PK +L>R 8 +,org/antlr/runtime/tree/CommonErrorNode.classVSUnBS"`C+B mE[ ` 66Z~(Kd7l +8#yҩ}Δt} gG8d,0N&{wν~U0Vn pEŧ,KHJXlE R,HLhC?$,KX OL0StFuB)@Sxx]@ʪ~A\ݣc 'J,,K@A'-m(t-KFL WyÎ&eLBX0"2FXD1*pN-]FC΂='2 SVk-, xIэ]zkqQ+zǑ:UMդƈE'2Q&S/zAp85Ec} jx8Q*rH %GCZ28jĵb^`յŋ! 2l 2VC{5M'C3qۂUn/*9Wnm5g=~^h+H6냍m9hqugFQ5}/{. W;X =tuO~S.E^8zX.@?> o0i=TG466ɫ4{Y+?;zAm 4mh*2ro$z|n_كk$//"(C\GDcJzq>4NBB6DHakLckL8u"842s=϶ Z e;'ЪKcQ$#/2Xwj48oڏ蠉5ZpVxw&$#Dq8g#|ȋ:DCyЅmPJO2IE"KxQ1C!OH-VKn3[+Z7-qޠp~ t5:*xET ]Ƶ: +z}l}]*cDN8]9۩e׃ XC6 4kel'dzQ PK +L>%ܴU.org/antlr/runtime/tree/TreePatternParser.classVSUzf$dX@G=7MHH[3408t@^ֻk%F/l>*|g_,}[;0 SU]u~`>pLQ1,Qa8[9<"Gx QF4db3a<rQLI 0a:8QÍL6fyZ*3"iN@{vƻ ;NrsSff uMJG 躧M+ɝ6Q1ģ1jL6MeFn\Kq_/S`kz8it kGS;a,ޛr~Bbyo >$GY-efS#3h)2\w"WHn )i˥؞h/+FȤuTj{iiFBqU`v"gmJ/dYT6X%A6Y*ײLG5d뗹yc̯N=S,8J2A3[kʹWAo=bóYsٖ%=jy$'Zc]Ҏt;3+pJVJ1OqlGmxQKxYG:Rхn ^ՑDأ fӊy [΃Hix]xep搎wt +X{jʶ]Y ܱ~ +6۪Hu%k@LJH+<:>S\v}rUr%wUGlOoHl +VIް +DbَkBrJD2ZQ,P*cH&Y@)N,^Ruv_NTHX=%2Z"I׽a߃ű1E~wϵ=A%-^Do|Z@szDmaD*ʸqԸۣՖ_c)X-j$%1\t( $]h/DqHJ, 9D.M;,(}~q;;z@sr ddjd"ɎEDP%}cB۔}nmy=ŝg wYE&LS3SK$BM`Eh۹ؔ ZhO渉1&fJqt+#S :b'QQH *tc[Pm"ͫb׬@_.ߵ*S/"ґ~\4ӗZ6<\vpޱCq3ߏ`q# bi((q13} 6qǯp5}<\(լg: +{4V_^pj^lm% +) Y&a~ 뻊3!I6uWsWv<aIV +v̧2bw@+vS`]rꂝWpC1@5l~/B|1oe pKchZq@-M0ٴ16aP`&v= 4Ne<ſ38էx HH|C +~o#O?EΊ/zH#GH e1! %K>LA"<{wz3?#T{  >JVW6D[O}I]Ē_Ç?*Rw@$0+ 1^^=bRqGԞb0j"PK +L>^*"(org/antlr/runtime/tree/TreeVisitor.classTOPݮkVS&u"* k֐,`,2(Vu^+|41j4(n Js=qqt@mC;228"Pda41\1|㑌2&ds]azݬ{oW-,K_3HvFԫdJ n[ ɢ]V-[7tݲLz(;v:%n7DT! ⮹oY[V_PmϜ&ls! Uo8=^Wja6,A(]_Bdp^Zy:xqj +У N [3A2 bZ3+Ŝy,(H㖂E<:QR+L`*{͋Os^4{f8 (7x /`@ce9Q/org/antlr/runtime/tree/TreeWizard$Visitor.classRMO1}N]l M!!EBF'k#yW\Z?o"P Rϼy3c.D|1'PjiHPnI-~]aڼ(J{\u< zLf AGfҦ!jj-LC,m4*r bkd~KnImaܹ VXq}E֧#qhdaMϬtJ[aLqA&e +cX{f%LOHwU:vA2*>3lvc؛]HAei:Sëҧ@/@HH+SLv]S@,`nK?v_CBs1ke>V+kp[xT57rMD^T7QKPK +L>K_qQ)org/antlr/runtime/tree/TreeFilter$1.classTmo0~ܔmwXWM| iHH}ԯSSj_4@?8FQs6y(a'j[.*.]TJf(Ja ItsiĈFsx#.wx*͐S !yP-[2T3,wQ루Qqt#-zj,Zj `J^e'*;DI5;>EaAx;٭ob`QLUF{ul3ղ!dٗq2!GjD+@v a<%ڜȄQul Ng kjEsjVFCjTb=49=1l"Y>K=eFuXlwҵLov+á7zWZLD>8>Vqa}Oaqִ w_cj7pnͅ4 \Hwp"ie0PK +L>9 +org/antlr/runtime/tree/TreeRewriter$1.classTn1=nBY6$ܡдi)!REf&6P@*E4(^yv<9s<_PǓ,,ǽ,Kbj%8Xu̐1] + ;~ M}ݗFo4=~z!?ˍ)ÍΐG41C[M4_2iE98d3?PK +L>  org/antlr/runtime/DFA.classVklw3yΚ$qxxvm^&S815ޝl&^Ϻmq[h - 4RSH& iHV Uo߽3^;ɏs=|܅?őĮy؍=14ao>9R7"Iɒ~9p@vtIJ0Cc18(a\ϥ7WX O.58aLQx\ l=s蓢9!5YkFzifA S=H< ǽ[c#+ iZbޱϘWp3n1+,Y+6k"9kGCflYg;ð!؎Ga(fr(5]J&2>Z9;6V1wAϵ̱).Rr .1G"3^vrNGLjtGy7OHbI/9!onm;'sdՒ:F"nni32N;ņ&I-oUʺhqkjyL'Z}7, zmJRP!Ǻ`]Z3=GK;;yżcnZYbZ[lIj3wx=x@?3ЇM/ +wMnO-if,4tL ~wA +h0*݅ e?9S Q&HTmE37`%ۥJ蠯eJh9ȡaɱ*lz Q#^_K='CLU +*LS4eߏ%| ^&kά;PCtYwB{=}FA84I J䆚5/L6W3y)u2Lr\IMI q\U:9ЭǴ ĨJDH`}'=p[KhGОNhm'$C! wGښN"B"rt7/}S5.u$|2#$b!IDOvJ<\Ww4}.'zcy!che{Υa9 2`i_cgҚ36v!+܍mjƔOE\ԫ9)nQz auMбC-;?#GvC,Ճ0E +6aGݟËqxo8ʘi~A>ţ8/_oɆEI{sX$Za` %qZf4 ($!tLScL4c0zw6rS ӫy%1_4)ԽB{?'M['Le!/+W!G UwH$3Jh-2+ԥbNzKh7+r&E7p%?BcWV|*I$gR H#hpҷp||V1܏Q1ׅ7eo1tMBo^TpOX6~wOJQ[OQԵ''"}ihR<Xi,org/antlr/runtime/NoViableAltException.classS[OP^RMQeEM$PftD_Q9g Tb|7ӯ3?|0 @&)H3%tV3hk'l$p_Â4%-8+DY:0,9\q:1:vk}\ xc4&Vvb:b?2ODA$w +G\Rvν2C!bMKֹ#xy]2 Fq~`rf͕,|<L dSr 7mF8T0I9s q9y9|9rj9Śtb +=&>߮VHf +I1ByHP6"T8k!(|L f dqrn2-|:%٫OR2f= {֠ 6~PK +L>META-INF/maven/PK +L>META-INF/maven/org.antlr/PK +L>'META-INF/maven/org.antlr/antlr-runtime/PK +>}4 .META-INF/maven/org.antlr/antlr-runtime/pom.xmlV]o6}F(j R8k8k +$lV7KE7@+ Wb<œ1bb|1~R%[qnmyY1IIhΰYx&<g37oO=o6Lo7;8XKc N~n<8WXԋS 18!*a:HVUy`irD[jaw]I G`~ Ԍl얰 е̙Z ]A1iTsNsdUMК!'DPkϲ +tJ&.gtL/X3ic(QG6Ƨ, +WÝߑ[KRҙCE_Z}= 3T5I| Z(UQ-+jAjU? %_DAҦ>;t%u<g.OdEN{GC WQ [iwhVP%DB]py+`eh݊c}4352)%%U. \`.b(:p`m;sFQ|g +|Ђ١C55r SIzjWpϗ/u"˥YT+ -4_L1v?VK^gd2ܸ]-wG -mM#aq+ q8tM m]SF#1Wl|og O{o +,uAV~xFމ}8e(N3;}`=T\4>!TiNJgKP5}8'x6xۛ)*=0MjqT)-(SHbK|7 #x֡rR\}R.*3A/wzZ +eec:ğ=C2 C;=ߛaG](l:)GPK +L> Ǒfi5META-INF/maven/org.antlr/antlr-runtime/pom.propertiesSvOK-J,IMQHTM,KR)MU*Q0T0262Pp Q0204*K-*ϳ53J//-L/JK+)J,*LKL. +tJJ2sSPK +L> AMETA-INF/PK +L>aSf}'META-INF/MANIFEST.MFPK +L>Aorg/PK +L> +Aorg/antlr/PK +L>A org/antlr/runtime/PK +L>A9org/antlr/runtime/misc/PK +L>Anorg/antlr/runtime/debug/PK +L>Aorg/antlr/runtime/tree/PK +L> y3%org/antlr/runtime/misc/IntArray.classPK +L>*eS ,org/antlr/runtime/misc/LookaheadStream.classPK +L>F  "3 org/antlr/runtime/misc/Stats.classPK +L>*. &org/antlr/runtime/misc/FastQueue.classPK +L>E )borg/antlr/runtime/misc/DoubleKeyMap.classPK +L>R^];gorg/antlr/runtime/TokenRewriteStream$RewriteOperation.classPK +L>^+H) org/antlr/runtime/ANTLRReaderStream.classPK +L>@v1#org/antlr/runtime/MismatchedNotSetException.classPK +L>Q{M/%org/antlr/runtime/SerializedGrammar$Block.classPK +L>;Tv(org/antlr/runtime/Token.classPK +L>n'Q-!+org/antlr/runtime/ParserRuleReturnScope.classPK +L> +C ,org/antlr/runtime/Parser.classPK +L>7~92org/antlr/runtime/TokenRewriteStream$InsertBeforeOp.classPK +L>~X )4org/antlr/runtime/CommonTokenStream.classPK +L>r0:org/antlr/runtime/MismatchedTokenException.classPK +L>6p&E![<org/antlr/runtime/IntStream.classPK +L>xݴ#{=org/antlr/runtime/TokenStream.classPK +L> 3>org/antlr/runtime/MismatchedTreeNodeException.classPK +L>5!^'@org/antlr/runtime/RuleReturnScope.classPK +L>7 ++ZBorg/antlr/runtime/BufferedTokenStream.classPK +L>M $Lorg/antlr/runtime/ClassicToken.classPK +L>= PA-Rorg/antlr/runtime/RecognizerSharedState.classPK +L>UC4Uorg/antlr/runtime/TokenRewriteStream$ReplaceOp.classPK +L>y /Xorg/antlr/runtime/LegacyCommonTokenStream.classPK +L> {"Xeorg/antlr/runtime/CharStream.classPK +L>q W.forg/antlr/runtime/SerializedGrammar$Rule.classPK +L>z-& +)horg/antlr/runtime/debug/DebugParser.classPK +L>[t20^morg/antlr/runtime/debug/DebugEventRepeater.classPK +L>|A +G sorg/antlr/runtime/debug/RemoteDebugEventSocketListener$ProxyToken.classPK +L> .worg/antlr/runtime/debug/DebugTreeAdaptor.classPK +L>9 -org/antlr/runtime/debug/DebugTreeParser.classPK +L>!(UIForg/antlr/runtime/debug/RemoteDebugEventSocketListener$ProxyTree.classPK +L>--<org/antlr/runtime/debug/RemoteDebugEventSocketListener.classPK +L>% 5org/antlr/runtime/debug/TraceDebugEventListener.classPK +L>I[-1ۦorg/antlr/runtime/debug/DebugTreeNodeStream.classPK +L>4@13:org/antlr/runtime/debug/Profiler$ProfileStats.classPK +L>*R456org/antlr/runtime/debug/BlankDebugEventListener.classPK +L>;;@$org/antlr/runtime/debug/Tracer.classPK +L>/a0org/antlr/runtime/debug/DebugEventListener.classPK +L>ccD|9*3ʺorg/antlr/runtime/debug/DebugEventSocketProxy.classPK +L>ysj+org/antlr/runtime/debug/DebugEventHub.classPK +L>s9Jorg/antlr/runtime/debug/Profiler$DecisionDescriptor.classPK +L>KmG| .-org/antlr/runtime/debug/ParseTreeBuilder.classPK +L>2Z=&org/antlr/runtime/debug/Profiler.classPK +L>Wd .&org/antlr/runtime/debug/DebugTokenStream.classPK +L>+X4org/antlr/runtime/debug/Profiler$DecisionEvent.classPK +L>Lmq(org/antlr/runtime/ANTLRInputStream.classPK +L>r'org/antlr/runtime/ANTLRFileStream.classPK +L>jWI ) org/antlr/runtime/ANTLRStringStream.classPK +L>*n~# org/antlr/runtime/CommonToken.classPK +L>x>]k.forg/antlr/runtime/UnwantedTokenException.classPK +L>AD)0org/antlr/runtime/MismatchedRangeException.classPK +L>).org/antlr/runtime/MismatchedSetException.classPK +L>GW2org/antlr/runtime/SerializedGrammar$TokenRef.classPK +L>Jn'org/antlr/runtime/CharStreamState.classPK +L>O8(0* org/antlr/runtime/TokenRewriteStream.classPK +L>>1T4org/antlr/runtime/SerializedGrammar$RuleRef.classPK +L>f% )E6org/antlr/runtime/SerializedGrammar.classPK +L>63mR#6&?org/antlr/runtime/BaseRecognizer.classPK +L>0(Xorg/antlr/runtime/FailedPredicateException.classPK +L>`gKP*tZorg/antlr/runtime/EarlyExitException.classPK +L>V \org/antlr/runtime/Lexer.classPK +L>c -;iorg/antlr/runtime/UnbufferedTokenStream.classPK +L>$#morg/antlr/runtime/TokenSource.classPK +L>ݽf +,norg/antlr/runtime/RecognitionException.classPK +L>iX 3torg/antlr/runtime/BitSet.classPK +L>Є|9-org/antlr/runtime/MissingTokenException.classPK +L>zOBorg/antlr/runtime/tree/BufferedTreeNodeStream$StreamIterator.classPK +L>&.org/antlr/runtime/tree/TreeVisitorAction.classPK +L>O D-horg/antlr/runtime/tree/DOTTreeGenerator.classPK +L>3uo 'org/antlr/runtime/tree/TreeFilter.classPK +L>^x)porg/antlr/runtime/tree/TreeWizard$2.classPK +L>޸8;\+Aorg/antlr/runtime/tree/TreeNodeStream.classPK +L>p WG8Řorg/antlr/runtime/tree/RewriteCardinalityException.classPK +L>jN5rorg/antlr/runtime/tree/RewriteRuleSubtreeStream.classPK +L>Mf&,̝org/antlr/runtime/tree/TreeFilter$fptr.classPK +L>j4 'ٞorg/antlr/runtime/tree/CommonTree.classPK +L>T=.org/antlr/runtime/tree/TreeRewriter$fptr.classPK +L>g 'org/antlr/runtime/tree/TreeParser.classPK +L>}ϙ 0#org/antlr/runtime/tree/TreeRuleReturnScope.classPK +L>O,6org/antlr/runtime/tree/TreeWizard$ContextVisitor.classPK +L>/-6org/antlr/runtime/tree/RewriteEarlyExitException.classPK +L>1n&8org/antlr/runtime/tree/RewriteEmptyStreamException.classPK +L>y{~ .|org/antlr/runtime/tree/CommonTreeAdaptor.classPK +L>{-мorg/antlr/runtime/tree/TreePatternLexer.classPK +L>qR +5,org/antlr/runtime/tree/RewriteRuleElementStream.classPK +L>ds1org/antlr/runtime/tree/CommonTreeNodeStream.classPK +L>p[I &org/antlr/runtime/tree/ParseTree.classPK +L>`)Oorg/antlr/runtime/tree/TreeWizard$1.classPK +L>b(r!rorg/antlr/runtime/tree/Tree.classPK +L>F8qr>#org/antlr/runtime/tree/TreeWizard$TreePatternTreeAdaptor.classPK +L> ת+org/antlr/runtime/tree/TreeRewriter$3.classPK +L>V\3org/antlr/runtime/tree/TreeWizard$TreePattern.classPK +L>ؤ~)org/antlr/runtime/tree/TreeFilter$3.classPK +L>g규)org/antlr/runtime/tree/TreeFilter$2.classPK +L>rF2)org/antlr/runtime/tree/TreeWizard$3.classPK +L>Ul)org/antlr/runtime/tree/TreeRewriter.classPK +L>_C( org/antlr/runtime/tree/TreeAdaptor.classPK +L>#e6 ,org/antlr/runtime/tree/BaseTreeAdaptor.classPK +L>Us|3org/antlr/runtime/tree/RewriteRuleTokenStream.classPK +L>SN,2;.org/antlr/runtime/tree/TreeWizard$WildcardTreePattern.classPK +L>Oc2org/antlr/runtime/tree/RewriteRuleNodeStream.classPK +L> %org/antlr/runtime/tree/BaseTree.classPK +L>0+ org/antlr/runtime/tree/TreeRewriter$2.classPK +L>+(5 )org/antlr/runtime/tree/TreeIterator.classPK +L>@i} C!'org/antlr/runtime/tree/TreeWizard.classPK +L>4l3}!org/antlr/runtime/tree/BufferedTreeNodeStream.classPK +L>R 8 +,:0org/antlr/runtime/tree/CommonErrorNode.classPK +L>%ܴU.,5org/antlr/runtime/tree/TreePatternParser.classPK +L>^*"(l<org/antlr/runtime/tree/TreeVisitor.classPK +L>Q/y?org/antlr/runtime/tree/TreeWizard$Visitor.classPK +L>K_qQ)lAorg/antlr/runtime/tree/TreeFilter$1.classPK +L>9 +Corg/antlr/runtime/tree/TreeRewriter$1.classPK +L>  Forg/antlr/runtime/DFA.classPK +L>Xi,/Morg/antlr/runtime/NoViableAltException.classPK +L>PMETA-INF/maven/PK +L>4PMETA-INF/maven/org.antlr/PK +L>'kPMETA-INF/maven/org.antlr/antlr-runtime/PK +>}4 .$PMETA-INF/maven/org.antlr/antlr-runtime/pom.xmlPK +L> Ǒfi5TMETA-INF/maven/org.antlr/antlr-runtime/pom.propertiesPK[,U \ No newline at end of file Property changes on: lucene/queryparser/lib/antlr-runtime-3.4.jar ___________________________________________________________________ Added: svn:mime-type + application/octet-stream Index: lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/aqp/TestAqpSLGMultiField.java =================================================================== --- lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/aqp/TestAqpSLGMultiField.java (revision 0) +++ lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/aqp/TestAqpSLGMultiField.java (revision 0) @@ -0,0 +1,392 @@ +package org.apache.lucene.queryparser.flexible.aqp; + +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import java.io.Reader; +import java.io.StringReader; +import java.util.HashMap; +import java.util.Map; + +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.analysis.MockAnalyzer; +import org.apache.lucene.analysis.standard.StandardAnalyzer; +import org.apache.lucene.document.Document; +import org.apache.lucene.document.TextField; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.queryparser.flexible.core.QueryNodeException; +import org.apache.lucene.queryparser.flexible.standard.QueryParserUtil; +import org.apache.lucene.queryparser.flexible.standard.TestQPHelper; +import org.apache.lucene.queryparser.flexible.standard.config.StandardQueryConfigHandler.Operator; +import org.apache.lucene.queryparser.flexible.aqp.AqpQueryParser; +import org.apache.lucene.queryparser.flexible.aqp.util.AqpQueryParserUtil; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.ScoreDoc; +import org.apache.lucene.search.BooleanClause.Occur; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.RAMDirectory; +import org.apache.lucene.util.IOUtils; + +/** + * This test case is a copy of the core Lucene query parser test, it was adapted + * to use new QueryParserHelper instead of the old query parser. + * + * Tests QueryParser. + */ +public class TestAqpSLGMultiField extends AqpTestAbstractCase { + + /** + * test stop words parsing for both the non static form, and for the + * corresponding static form (qtxt, fields[]). + */ + public void testStopwordsParsing() throws Exception { + assertStopQueryEquals("one", "b:one t:one"); + assertStopQueryEquals("one stop", "b:one t:one"); + assertStopQueryEquals("one (stop)", "b:one t:one"); + assertStopQueryEquals("one ((stop))", "b:one t:one"); + assertStopQueryEquals("stop", ""); + assertStopQueryEquals("(stop)", ""); + assertStopQueryEquals("((stop))", ""); + } + + // verify parsing of query using a stopping analyzer + private void assertStopQueryEquals(String qtxt, String expectedRes) + throws Exception { + String[] fields = { "b", "t" }; + Occur occur[] = { Occur.SHOULD, Occur.SHOULD }; + TestQPHelper.QPTestAnalyzer a = new TestQPHelper.QPTestAnalyzer(); + AqpQueryParser mfqp = getParser(); + mfqp.setMultiFields(fields); + mfqp.setAnalyzer(a); + + Query q = mfqp.parse(qtxt, null); + assertEquals(expectedRes, q.toString()); + + q = QueryParserUtil.parse(qtxt, fields, occur, a); + assertEquals(expectedRes, q.toString()); + } + + public void testSimple() throws Exception { + String[] fields = { "b", "t" }; + AqpQueryParser mfqp = getParser(); + mfqp.setMultiFields(fields); + mfqp.setAnalyzer(new StandardAnalyzer(TEST_VERSION_CURRENT)); + + Query q = mfqp.parse("one", null); + assertEquals("b:one t:one", q.toString()); + + q = mfqp.parse("one two", null); + assertEquals("(b:one t:one) (b:two t:two)", q.toString()); + + q = mfqp.parse("+one +two", null); + assertEquals("+(b:one t:one) +(b:two t:two)", q.toString()); + + q = mfqp.parse("+one -two -three", null); + assertEquals("+(b:one t:one) -(b:two t:two) -(b:three t:three)", + q.toString()); + + q = mfqp.parse("one^2 two", null); + assertEquals("((b:one t:one)^2.0) (b:two t:two)", q.toString()); + + mfqp.setAllowSlowFuzzy(true); + q = mfqp.parse("one~ two", null); + assertEquals("(b:one~0.5 t:one~0.5) (b:two t:two)", q.toString()); + + q = mfqp.parse("one~0.8 two^2", null); + assertEquals("(b:one~0.8 t:one~0.8) ((b:two t:two)^2.0)", q.toString()); + + q = mfqp.parse("one* two*", null); + assertEquals("(b:one* t:one*) (b:two* t:two*)", q.toString()); + + q = mfqp.parse("[a TO c] two", null); + assertEquals("(b:[a TO c] t:[a TO c]) (b:two t:two)", q.toString()); + + q = mfqp.parse("w?ldcard", null); + assertEquals("b:w?ldcard t:w?ldcard", q.toString()); + + q = mfqp.parse("\"foo bar\"", null); + assertEquals("b:\"foo bar\" t:\"foo bar\"", q.toString()); + + q = mfqp.parse("\"aa bb cc\" \"dd ee\"", null); + assertEquals("(b:\"aa bb cc\" t:\"aa bb cc\") (b:\"dd ee\" t:\"dd ee\")", + q.toString()); + + q = mfqp.parse("\"foo bar\"~4", null); + assertEquals("b:\"foo bar\"~4 t:\"foo bar\"~4", q.toString()); + + // LUCENE-1213: QueryParser was ignoring slop when phrase + // had a field. + q = mfqp.parse("b:\"foo bar\"~4", null); + assertEquals("b:\"foo bar\"~4", q.toString()); + + // make sure that terms which have a field are not touched: + q = mfqp.parse("one f:two", null); + assertEquals("(b:one t:one) f:two", q.toString()); + + // AND mode: + mfqp.setDefaultOperator(Operator.AND); + q = mfqp.parse("one two", null); + assertEquals("+(b:one t:one) +(b:two t:two)", q.toString()); + q = mfqp.parse("\"aa bb cc\" \"dd ee\"", null); + assertEquals("+(b:\"aa bb cc\" t:\"aa bb cc\") +(b:\"dd ee\" t:\"dd ee\")", + q.toString()); + + } + + public void testBoostsSimple() throws Exception { + Map boosts = new HashMap(); + boosts.put("b", Float.valueOf(5)); + boosts.put("t", Float.valueOf(10)); + String[] fields = { "b", "t" }; + AqpQueryParser mfqp = getParser(); + mfqp.setMultiFields(fields); + mfqp.setFieldsBoost(boosts); + mfqp.setAnalyzer(new StandardAnalyzer(TEST_VERSION_CURRENT)); + + // Check for simple + Query q = mfqp.parse("one", null); + assertEquals("b:one^5.0 t:one^10.0", q.toString()); + + // Check for AND + q = mfqp.parse("one AND two", null); + assertEquals("+(b:one^5.0 t:one^10.0) +(b:two^5.0 t:two^10.0)", + q.toString()); + + // Check for OR + q = mfqp.parse("one OR two", null); + assertEquals("(b:one^5.0 t:one^10.0) (b:two^5.0 t:two^10.0)", q.toString()); + + // Check for AND and a field + q = mfqp.parse("one AND two AND foo:test", null); + assertEquals("+(b:one^5.0 t:one^10.0) +(b:two^5.0 t:two^10.0) +foo:test", + q.toString()); + + q = mfqp.parse("one^3 AND two^4", null); + assertEquals("+((b:one^5.0 t:one^10.0)^3.0) +((b:two^5.0 t:two^10.0)^4.0)", + q.toString()); + } + + public void testStaticMethod1() throws Exception { + String[] fields = { "b", "t" }; + String[] queries = { "one", "two" }; + AqpQueryParser qp = getParser(); + qp.setAnalyzer(new StandardAnalyzer(TEST_VERSION_CURRENT)); + Query q = AqpQueryParserUtil.parse(qp, queries, fields); + assertEquals("b:one t:two", q.toString()); + + String[] queries2 = { "+one", "+two" }; + q = AqpQueryParserUtil.parse(qp, queries2, fields); + assertEquals("b:one t:two", q.toString()); + + String[] queries3 = { "one", "+two" }; + q = AqpQueryParserUtil.parse(qp, queries3, fields); + assertEquals("b:one t:two", q.toString()); + + String[] queries4 = { "one +more", "+two" }; + q = AqpQueryParserUtil.parse(qp, queries4, fields); + assertEquals("(b:one +b:more) t:two", q.toString()); + + String[] queries5 = { "blah" }; + try { + q = AqpQueryParserUtil.parse(qp, queries5, fields); + fail(); + } catch (IllegalArgumentException e) { + // expected exception, array length differs + } + + // check also with stop words for this static form (qtxts[], fields[]). + TestQPHelper.QPTestAnalyzer stopA = new TestQPHelper.QPTestAnalyzer(); + qp.setAnalyzer(stopA); + + String[] queries6 = { "((+stop))", "+((stop))" }; + q = AqpQueryParserUtil.parse(qp, queries6, fields); + assertEquals("", q.toString()); + + String[] queries7 = { "one ((+stop)) +more", "+((stop)) +two" }; + q = AqpQueryParserUtil.parse(qp, queries7, fields); + // well, aqp is better in removing the parens from top-level, + // so this is the correct result (the AqpQueryUtils has fundamental flaw + // anyway) + // original was: (b:one +b:more) (+t:two) + assertEquals("(b:one +b:more) t:two", q.toString()); + + } + + public void testStaticMethod2() throws QueryNodeException { + String[] fields = { "b", "t" }; + BooleanClause.Occur[] flags = { BooleanClause.Occur.MUST, + BooleanClause.Occur.MUST_NOT }; + Query q = QueryParserUtil.parse("one", fields, flags, new StandardAnalyzer( + TEST_VERSION_CURRENT)); + assertEquals("+b:one -t:one", q.toString()); + + q = QueryParserUtil.parse("one two", fields, flags, new StandardAnalyzer( + TEST_VERSION_CURRENT)); + assertEquals("+(b:one b:two) -(t:one t:two)", q.toString()); + + try { + BooleanClause.Occur[] flags2 = { BooleanClause.Occur.MUST }; + q = QueryParserUtil.parse("blah", fields, flags2, new StandardAnalyzer( + TEST_VERSION_CURRENT)); + fail(); + } catch (IllegalArgumentException e) { + // expected exception, array length differs + } + } + + public void testStaticMethod2Old() throws Exception { + String[] fields = { "b", "t" }; + BooleanClause.Occur[] flags = { BooleanClause.Occur.MUST, + BooleanClause.Occur.MUST_NOT }; + AqpQueryParser parser = getParser(); + parser.setMultiFields(fields); + parser.setAnalyzer(new StandardAnalyzer(TEST_VERSION_CURRENT)); + + Query q = QueryParserUtil.parse("one", fields, flags, new StandardAnalyzer( + TEST_VERSION_CURRENT));// , fields, flags, new + // StandardAnalyzer()); + assertEquals("+b:one -t:one", q.toString()); + + q = QueryParserUtil.parse("one two", fields, flags, new StandardAnalyzer( + TEST_VERSION_CURRENT)); + assertEquals("+(b:one b:two) -(t:one t:two)", q.toString()); + + try { + BooleanClause.Occur[] flags2 = { BooleanClause.Occur.MUST }; + q = QueryParserUtil.parse("blah", fields, flags2, new StandardAnalyzer( + TEST_VERSION_CURRENT)); + fail(); + } catch (IllegalArgumentException e) { + // expected exception, array length differs + } + } + + public void testStaticMethod3() throws QueryNodeException { + String[] queries = { "one", "two", "three" }; + String[] fields = { "f1", "f2", "f3" }; + BooleanClause.Occur[] flags = { BooleanClause.Occur.MUST, + BooleanClause.Occur.MUST_NOT, BooleanClause.Occur.SHOULD }; + Query q = QueryParserUtil.parse(queries, fields, flags, + new StandardAnalyzer(TEST_VERSION_CURRENT)); + assertEquals("+f1:one -f2:two f3:three", q.toString()); + + try { + BooleanClause.Occur[] flags2 = { BooleanClause.Occur.MUST }; + q = QueryParserUtil.parse(queries, fields, flags2, new StandardAnalyzer( + TEST_VERSION_CURRENT)); + fail(); + } catch (IllegalArgumentException e) { + // expected exception, array length differs + } + } + + public void testStaticMethod3Old() throws QueryNodeException { + String[] queries = { "one", "two" }; + String[] fields = { "b", "t" }; + BooleanClause.Occur[] flags = { BooleanClause.Occur.MUST, + BooleanClause.Occur.MUST_NOT }; + Query q = QueryParserUtil.parse(queries, fields, flags, + new StandardAnalyzer(TEST_VERSION_CURRENT)); + assertEquals("+b:one -t:two", q.toString()); + + try { + BooleanClause.Occur[] flags2 = { BooleanClause.Occur.MUST }; + q = QueryParserUtil.parse(queries, fields, flags2, new StandardAnalyzer( + TEST_VERSION_CURRENT)); + fail(); + } catch (IllegalArgumentException e) { + // expected exception, array length differs + } + } + + public void testAnalyzerReturningNull() throws Exception { + String[] fields = new String[] { "f1", "f2", "f3" }; + AqpQueryParser parser = getParser(); + parser.setMultiFields(fields); + parser.setAnalyzer(new AnalyzerReturningNull()); + + Query q = parser.parse("bla AND blo", null); + assertEquals("+(f2:bla f3:bla) +(f2:blo f3:blo)", q.toString()); + // the following queries are not affected as their terms are not + // analyzed anyway: + q = parser.parse("bla*", null); + assertEquals("f1:bla* f2:bla* f3:bla*", q.toString()); + q = parser.parse("bla~", null); + assertEquals("f1:bla~1 f2:bla~1 f3:bla~1", q.toString()); + q = parser.parse("[a TO c]", null); + assertEquals("f1:[a TO c] f2:[a TO c] f3:[a TO c]", q.toString()); + } + + public void testStopWordSearching() throws Exception { + Analyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT); + Directory ramDir = new RAMDirectory(); + IndexWriter iw = new IndexWriter(ramDir, newIndexWriterConfig( + TEST_VERSION_CURRENT, analyzer)); + Document doc = new Document(); + doc.add(newField("body", "blah the footest blah", TextField.TYPE_NOT_STORED)); + iw.addDocument(doc); + iw.close(); + + AqpQueryParser mfqp = getParser(); + + mfqp.setMultiFields(new String[] { "body" }); + mfqp.setAnalyzer(analyzer); + mfqp.setDefaultOperator(Operator.AND); + Query q = mfqp.parse("the footest", null); + IndexSearcher is = new IndexSearcher(DirectoryReader.open(ramDir)); + ScoreDoc[] hits = is.search(q, null, 1000).scoreDocs; + assertEquals(1, hits.length); + ramDir.close(); + } + + /** + * Return empty tokens for field "f1". + */ + private static class AnalyzerReturningNull extends Analyzer { + MockAnalyzer stdAnalyzer = new MockAnalyzer(random()); + + public AnalyzerReturningNull() { + super(new PerFieldReuseStrategy()); + } + + @Override + protected Reader initReader(String fieldName, Reader reader) { + if ("f1".equals(fieldName)) { + // we don't use the reader, so close it: + IOUtils.closeWhileHandlingException(reader); + // return empty reader, so MockTokenizer returns no tokens: + return new StringReader(""); + } else { + return super.initReader(fieldName, reader); + } + } + + @Override + public TokenStreamComponents createComponents(String fieldName, Reader reader) { + return stdAnalyzer.createComponents(fieldName, reader); + } + } + + // Uniquely for Junit 3 + public static junit.framework.Test suite() { + return new junit.framework.JUnit4TestAdapter(TestAqpSLGMultiField.class); + } + +} Index: lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/aqp/TestAqpSLGStandardTest.java =================================================================== --- lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/aqp/TestAqpSLGStandardTest.java (revision 0) +++ lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/aqp/TestAqpSLGStandardTest.java (revision 0) @@ -0,0 +1,1080 @@ +package org.apache.lucene.queryparser.flexible.aqp; + +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import java.io.IOException; +import java.io.Reader; +import java.text.DateFormat; +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.util.Calendar; +import java.util.Date; +import java.util.GregorianCalendar; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.TimeZone; + +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.analysis.MockAnalyzer; +import org.apache.lucene.analysis.MockTokenizer; +import org.apache.lucene.analysis.TokenFilter; +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.Tokenizer; +import org.apache.lucene.analysis.core.StopAnalyzer; +import org.apache.lucene.analysis.core.StopFilter; +import org.apache.lucene.analysis.core.WhitespaceAnalyzer; +import org.apache.lucene.analysis.standard.StandardAnalyzer; +import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; +import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; +import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; +import org.apache.lucene.document.DateTools; +import org.apache.lucene.document.Document; +import org.apache.lucene.document.TextField; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.Term; +import org.apache.lucene.queryparser.flexible.messages.MessageImpl; +import org.apache.lucene.queryparser.flexible.core.QueryNodeException; +import org.apache.lucene.queryparser.flexible.core.builders.QueryTreeBuilder; +import org.apache.lucene.queryparser.flexible.core.config.QueryConfigHandler; +import org.apache.lucene.queryparser.flexible.core.messages.QueryParserMessages; +import org.apache.lucene.queryparser.flexible.core.nodes.FuzzyQueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; +import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessorImpl; +import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessorPipeline; +import org.apache.lucene.queryparser.flexible.standard.config.StandardQueryConfigHandler.Operator; +import org.apache.lucene.queryparser.flexible.standard.nodes.WildcardQueryNode; +import org.apache.lucene.queryparser.flexible.standard.processors.GroupQueryNodeProcessor; +import org.apache.lucene.queryparser.flexible.aqp.AqpQueryParser; +import org.apache.lucene.queryparser.flexible.aqp.AqpSyntaxParser; +import org.apache.lucene.queryparser.flexible.aqp.parser.AqpStandardLuceneParser; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.FuzzyQuery; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.MultiPhraseQuery; +import org.apache.lucene.search.MultiTermQuery; +import org.apache.lucene.search.PhraseQuery; +import org.apache.lucene.search.PrefixQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.TermQuery; +import org.apache.lucene.search.TermRangeQuery; +import org.apache.lucene.search.WildcardQuery; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.RAMDirectory; +import org.apache.lucene.util.automaton.BasicAutomata; +import org.apache.lucene.util.automaton.CharacterRunAutomaton; + +/** + * This test case is a copy of the core Lucene query parser test, it was adapted + * to use new QueryParserHelper instead of the old query parser. + * + * TODO: modify the QueryParserHelper so that we can extend it (it is not + * flexible in getting the parser, otherwise we could use the test methods there + * for most part) + * + * Tests QueryParser. + */ +public class TestAqpSLGStandardTest extends AqpTestAbstractCase { + + public static Analyzer qpAnalyzer = new QPTestAnalyzer(); + + public static final class QPTestFilter extends TokenFilter { + private final CharTermAttribute termAtt = addAttribute(CharTermAttribute.class); + private final OffsetAttribute offsetAtt = addAttribute(OffsetAttribute.class); + + /** + * Filter which discards the token 'stop' and which expands the token + * 'phrase' into 'phrase1 phrase2' + */ + public QPTestFilter(TokenStream in) { + super(in); + } + + boolean inPhrase = false; + int savedStart = 0, savedEnd = 0; + + @Override + public boolean incrementToken() throws IOException { + if (inPhrase) { + inPhrase = false; + clearAttributes(); + termAtt.setEmpty().append("phrase2"); + offsetAtt.setOffset(savedStart, savedEnd); + return true; + } else + while (input.incrementToken()) { + if (termAtt.toString().equals("phrase")) { + inPhrase = true; + savedStart = offsetAtt.startOffset(); + savedEnd = offsetAtt.endOffset(); + termAtt.setEmpty().append("phrase1"); + offsetAtt.setOffset(savedStart, savedEnd); + return true; + } else if (!termAtt.toString().equals("stop")) + return true; + } + return false; + } + } + + public static final class QPTestAnalyzer extends Analyzer { + + /** Filters MockTokenizer with StopFilter. */ + @Override + public final TokenStreamComponents createComponents(String fieldName, + Reader reader) { + Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.SIMPLE, + true); + return new TokenStreamComponents(tokenizer, new QPTestFilter(tokenizer)); + } + } + + public static class QPTestParser extends AqpQueryParser { + public QPTestParser(QueryConfigHandler config, AqpSyntaxParser parser, + QueryNodeProcessorPipeline processor, QueryTreeBuilder builder) { + super(config, parser, processor, builder); + // TODO Auto-generated constructor stub + } + + public static AqpQueryParser init(Analyzer a) throws Exception { + AqpQueryParser p = AqpStandardLuceneParser.init(); + + ((QueryNodeProcessorPipeline) p.getQueryNodeProcessor()) + .add(new QPTestParserQueryNodeProcessor()); + p.setAnalyzer(a); + return p; + } + + private static class QPTestParserQueryNodeProcessor extends + QueryNodeProcessorImpl { + + @Override + protected QueryNode postProcessNode(QueryNode node) + throws QueryNodeException { + + return node; + + } + + @Override + protected QueryNode preProcessNode(QueryNode node) + throws QueryNodeException { + + if (node instanceof WildcardQueryNode || node instanceof FuzzyQueryNode) { + + throw new QueryNodeException(new MessageImpl( + QueryParserMessages.EMPTY_MESSAGE)); + + } + + return node; + + } + + @Override + protected List setChildrenOrder(List children) + throws QueryNodeException { + + return children; + + } + + } + + } + + @Override + public void setUp() throws Exception { + super.setUp(); + originalMaxClauses = BooleanQuery.getMaxClauseCount(); + } + + public void testConstantScoreAutoRewrite() throws Exception { + AqpQueryParser qp = getParser(); + qp.setAnalyzer(new WhitespaceAnalyzer(TEST_VERSION_CURRENT)); + + Query q = qp.parse("foo*bar", "field"); + assertTrue(q instanceof WildcardQuery); + assertEquals(MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT, + ((MultiTermQuery) q).getRewriteMethod()); + + q = qp.parse("foo*", "field"); + assertTrue(q instanceof PrefixQuery); + assertEquals(MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT, + ((MultiTermQuery) q).getRewriteMethod()); + + q = qp.parse("[a TO z]", "field"); + assertTrue(q instanceof TermRangeQuery); + assertEquals(MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT, + ((MultiTermQuery) q).getRewriteMethod()); + } + + public void testCJK() throws Exception { + // Test Ideographic Space - As wide as a CJK character cell (fullwidth) + // used google to translate the word "term" to japanese -> ?? + assertQueryEquals("term\u3000term\u3000term", null, + "term\u0020term\u0020term"); + assertQueryEqualsAllowLeadingWildcard("??\u3000??\u3000??", null, + "??\u0020??\u0020??"); + } + + public void testCJKTerm() throws Exception { + // individual CJK chars as terms + StandardAnalyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT); + + BooleanQuery expected = new BooleanQuery(); + expected.add(new TermQuery(new Term("field", "中")), + BooleanClause.Occur.SHOULD); + expected.add(new TermQuery(new Term("field", "国")), + BooleanClause.Occur.SHOULD); + + assertEquals(expected, getQuery("中国", analyzer)); + } + + public void testCJKBoostedTerm() throws Exception { + // individual CJK chars as terms + StandardAnalyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT); + + BooleanQuery expected = new BooleanQuery(); + expected.setBoost(0.5f); + expected.add(new TermQuery(new Term("field", "中")), + BooleanClause.Occur.SHOULD); + expected.add(new TermQuery(new Term("field", "国")), + BooleanClause.Occur.SHOULD); + + assertEquals(expected, getQuery("中国^0.5", analyzer)); + } + + public void testCJKPhrase() throws Exception { + // individual CJK chars as terms + StandardAnalyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT); + + PhraseQuery expected = new PhraseQuery(); + expected.add(new Term("field", "中")); + expected.add(new Term("field", "国")); + + assertEquals(expected, getQuery("\"中国\"", analyzer)); + } + + public void testCJKBoostedPhrase() throws Exception { + // individual CJK chars as terms + StandardAnalyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT); + + PhraseQuery expected = new PhraseQuery(); + expected.setBoost(0.5f); + expected.add(new Term("field", "中")); + expected.add(new Term("field", "国")); + + assertEquals(expected, getQuery("\"中国\"^0.5", analyzer)); + } + + public void testCJKSloppyPhrase() throws Exception { + // individual CJK chars as terms + StandardAnalyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT); + + PhraseQuery expected = new PhraseQuery(); + expected.setSlop(3); + expected.add(new Term("field", "中")); + expected.add(new Term("field", "国")); + + assertEquals(expected, getQuery("\"中国\"~3", analyzer)); + } + + public void testSimple() throws Exception { + assertQueryEquals("\"term germ\"~2", null, "\"term germ\"~2"); + assertQueryEquals("term term term", null, "term term term"); + assertQueryEquals("t�rm term term", new WhitespaceAnalyzer( + TEST_VERSION_CURRENT), "t�rm term term"); + assertQueryEquals("�mlaut", new WhitespaceAnalyzer(TEST_VERSION_CURRENT), + "�mlaut"); + + // XXX: not allowed, TODO??? + // assertQueryEquals("\"\"", new KeywordAnalyzer(), ""); + // assertQueryEquals("foo:\"\"", new KeywordAnalyzer(), "foo:"); + + assertQueryEquals("a AND b", null, "+a +b"); + assertQueryEquals("(a AND b)", null, "+a +b"); + assertQueryEquals("c OR (a AND b)", null, "c (+a +b)"); + + assertQueryEquals("a AND NOT b", null, "+a -b"); + assertQueryEquals("a NOT b", null, "+a -b"); + + assertQueryEquals("a AND -b", null, "+a -b"); + + assertQueryEquals("a AND !b", null, "+a -b"); + + assertQueryEquals("a && b", null, "+a +b"); + + assertQueryEquals("a && ! b", null, "+a -b"); + + assertQueryEquals("a OR b", null, "a b"); + assertQueryEquals("a || b", null, "a b"); + + assertQueryEquals("a OR !b", null, "a -b"); + + assertQueryEquals("a OR ! b", null, "a -b"); + + assertQueryEquals("a OR -b", null, "a -b"); + + assertQueryEquals("+term -term term", null, "+term -term term"); + assertQueryEquals("foo:term AND field:anotherTerm", null, + "+foo:term +anotherterm"); + assertQueryEquals("term AND \"phrase phrase\"", null, + "+term +\"phrase phrase\""); + assertQueryEquals("\"hello there\"", null, "\"hello there\""); + assertTrue(getQuery("a AND b", null) instanceof BooleanQuery); + assertTrue(getQuery("hello", null) instanceof TermQuery); + assertTrue(getQuery("\"hello there\"", null) instanceof PhraseQuery); + + assertQueryEquals("germ term^2.0", null, "germ term^2.0"); + assertQueryEquals("(term)^2.0", null, "term^2.0"); + assertQueryEquals("(germ term)^2.0", null, "(germ term)^2.0"); + assertQueryEquals("term^2.0", null, "term^2.0"); + assertQueryEquals("term^2", null, "term^2.0"); + assertQueryEquals("\"germ term\"^2.0", null, "\"germ term\"^2.0"); + assertQueryEquals("\"term germ\"^2", null, "\"term germ\"^2.0"); + + assertQueryEquals("(foo OR bar) AND (baz OR boo)", null, + "+(foo bar) +(baz boo)"); + + assertQueryEquals("((a OR b) AND NOT c) OR d", null, "(+(a b) -c) d"); + assertQueryEquals("((a OR b) NOT c) OR d", null, "(+(a b) -c) d"); + + assertQueryEquals("+(apple \"steve jobs\") -(foo bar baz)", null, + "+(apple \"steve jobs\") -(foo bar baz)"); + assertQueryEquals("+title:(dog OR cat) -author:\"bob dole\"", null, + "+(title:dog title:cat) -author:\"bob dole\""); + + AqpQueryParser qp = getParser(); + qp.setDefaultOperator(Operator.OR); + assertQueryMatch(qp, "title:(+a -b c)", "text", "+title:a -title:b title:c"); + + qp.setDefaultOperator(Operator.AND); + assertQueryMatch(qp, "title:(+a -b c)", "text", + "+title:a -title:b +title:c"); + + } + + public void testPunct() throws Exception { + Analyzer a = new WhitespaceAnalyzer(TEST_VERSION_CURRENT); + assertQueryEquals("a&b", a, "a&b"); + assertQueryEquals("a&&b", a, "a&&b"); + assertQueryEquals(".NET", a, ".NET"); + } + + public void testSlop() throws Exception { + + assertQueryEquals("\"term germ\"~2", null, "\"term germ\"~2"); + assertQueryEquals("\"term germ\"~2 flork", null, "\"term germ\"~2 flork"); + assertQueryEquals("\"term\"~2", null, "term"); + assertQueryEquals("\" \"~2 germ", null, "germ"); + assertQueryEquals("\"term germ\"~2^2", null, "\"term germ\"~2^2.0"); + } + + public void testNumber() throws Exception { + // The numbers go away because SimpleAnalzyer ignores them + assertQueryEquals("3", null, ""); + assertQueryEquals("term 1.0 1 2", null, "term"); + assertQueryEquals("term term1 term2", null, "term term term"); + + Analyzer a = new StandardAnalyzer(TEST_VERSION_CURRENT); + assertQueryEquals("3", a, "3"); + assertQueryEquals("term 1.0 1 2", a, "term 1.0 1 2"); + assertQueryEquals("term term1 term2", a, "term term1 term2"); + } + + public void testWildcard() throws Exception { + assertQueryEquals("term*", null, "term*"); + assertQueryEquals("term*^2", null, "term*^2.0"); + assertQueryEquals("term~", null, "term~2"); + assertQueryEquals("term~0.7", null, "term~1"); + + assertQueryEquals("term~^2", null, "term~2^2.0"); + + assertQueryEquals("term^2~", null, "term~2^2.0"); + assertQueryEquals("term*germ", null, "term*germ"); + assertQueryEquals("term*germ^3", null, "term*germ^3.0"); + + assertTrue(getQuery("term*", null) instanceof PrefixQuery); + assertTrue(getQuery("term*^2", null) instanceof PrefixQuery); + assertTrue(getQuery("term~", null) instanceof FuzzyQuery); + assertTrue(getQuery("term~0.7", null) instanceof FuzzyQuery); + + FuzzyQuery fq = (FuzzyQuery) getQuery("term~0.7", null); + assertEquals(1, fq.getMaxEdits()); + assertEquals(FuzzyQuery.defaultPrefixLength, fq.getPrefixLength()); + fq = (FuzzyQuery) getQuery("term~", null); + assertEquals(2, fq.getMaxEdits()); + assertEquals(FuzzyQuery.defaultPrefixLength, fq.getPrefixLength()); + + assertTrue(getQuery("term*germ", null) instanceof WildcardQuery); + + /* + * Tests to see that wild card terms are (or are not) properly lower-cased + * with propery parser configuration + */ + // First prefix queries: + // by default, convert to lowercase: + assertWildcardQueryEquals("Term*", true, "term*"); + // explicitly set lowercase: + assertWildcardQueryEquals("term*", true, "term*"); + assertWildcardQueryEquals("Term*", true, "term*"); + assertWildcardQueryEquals("TERM*", true, "term*"); + // explicitly disable lowercase conversion: + assertWildcardQueryEquals("term*", false, "term*"); + assertWildcardQueryEquals("Term*", false, "Term*"); + assertWildcardQueryEquals("TERM*", false, "TERM*"); + // Then 'full' wildcard queries: + // by default, convert to lowercase: + assertWildcardQueryEquals("Te?m", "te?m"); + // explicitly set lowercase: + assertWildcardQueryEquals("te?m", true, "te?m"); + assertWildcardQueryEquals("Te?m", true, "te?m"); + assertWildcardQueryEquals("TE?M", true, "te?m"); + assertWildcardQueryEquals("Te?m*gerM", true, "te?m*germ"); + // explicitly disable lowercase conversion: + assertWildcardQueryEquals("te?m", false, "te?m"); + assertWildcardQueryEquals("Te?m", false, "Te?m"); + assertWildcardQueryEquals("TE?M", false, "TE?M"); + assertWildcardQueryEquals("Te?m*gerM", false, "Te?m*gerM"); + // Fuzzy queries: + assertWildcardQueryEquals("Term~", "term~2"); + assertWildcardQueryEquals("Term~", true, "term~2"); + assertWildcardQueryEquals("Term~", false, "Term~2"); + // Range queries: + + // TODO: implement this on QueryParser + // Q0002E_INVALID_SYNTAX_CANNOT_PARSE: Syntax Error, cannot parse '[A TO + // C]': Lexical error at line 1, column 1. Encountered: "[" (91), after + // : "" + assertWildcardQueryEquals("[A TO C]", "[a TO c]"); + assertWildcardQueryEquals("[A TO C]", true, "[a TO c]"); + assertWildcardQueryEquals("[A TO C]", false, "[A TO C]"); + // Test suffix queries: first disallow + try { + assertWildcardQueryEquals("*Term", true, "*term"); + fail(); + } catch (QueryNodeException pe) { + // expected exception + } + try { + assertWildcardQueryEquals("?Term", true, "?term"); + fail(); + } catch (QueryNodeException pe) { + // expected exception + } + // Test suffix queries: then allow + assertWildcardQueryEquals("*Term", true, "*term", true); + assertWildcardQueryEquals("?Term", true, "?term", true); + } + + public void testLeadingWildcardType() throws Exception { + AqpQueryParser qp = getParser(null); + qp.setAllowLeadingWildcard(true); + assertEquals(WildcardQuery.class, qp.parse("t*erm*", "field").getClass()); + assertEquals(WildcardQuery.class, qp.parse("?term*", "field").getClass()); + assertEquals(WildcardQuery.class, qp.parse("*term*", "field").getClass()); + } + + public void testQPA() throws Exception { + assertQueryEquals("term term^3.0 term", qpAnalyzer, "term term^3.0 term"); + assertQueryEquals("term stop^3.0 term", qpAnalyzer, "term term"); + + assertQueryEquals("term term term", qpAnalyzer, "term term term"); + assertQueryEquals("term +stop term", qpAnalyzer, "term term"); + assertQueryEquals("term -stop term", qpAnalyzer, "term term"); + + assertQueryEquals("drop AND (stop) AND roll", qpAnalyzer, "+drop +roll"); + assertQueryEquals("term +(stop) term", qpAnalyzer, "term term"); + assertQueryEquals("term -(stop) term", qpAnalyzer, "term term"); + + assertQueryEquals("drop AND stop AND roll", qpAnalyzer, "+drop +roll"); + // rca TODO: plug the modifier GroupQueryNodeProcessor + // expected: term phrase1 phrase2 term + assertQueryEquals("term phrase term", qpAnalyzer, + "term (phrase1 phrase2) term"); + + // TODO: plug the modifier GroupQueryNodeProcessor + // expected: term phrase1 phrase2 term + assertQueryEquals("term AND NOT phrase term", qpAnalyzer, + "(+term -(phrase1 phrase2)) term"); + + assertQueryEquals("stop^3", qpAnalyzer, ""); + assertQueryEquals("stop", qpAnalyzer, ""); + assertQueryEquals("(stop)^3", qpAnalyzer, ""); + assertQueryEquals("((stop))^3", qpAnalyzer, ""); + assertQueryEquals("(stop^3)", qpAnalyzer, ""); + assertQueryEquals("((stop)^3)", qpAnalyzer, ""); + assertQueryEquals("(stop)", qpAnalyzer, ""); + assertQueryEquals("((stop))", qpAnalyzer, ""); + assertTrue(getQuery("term term term", qpAnalyzer) instanceof BooleanQuery); + assertTrue(getQuery("term +stop", qpAnalyzer) instanceof TermQuery); + } + + public void testRange() throws Exception { + assertQueryEquals("[ a TO z]", null, "[a TO z]"); + assertEquals(MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT, + ((TermRangeQuery) getQuery("[ a TO z]", null)).getRewriteMethod()); + + AqpQueryParser qp = getParser(); + + qp.setMultiTermRewriteMethod(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE); + assertEquals(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE, + ((TermRangeQuery) qp.parse("[ a TO z]", "field")).getRewriteMethod()); + + assertQueryEquals("[ a TO z ]", null, "[a TO z]"); + assertQueryEquals("{ a TO z}", null, "{a TO z}"); + assertQueryEquals("{ a TO z }", null, "{a TO z}"); + assertQueryEquals("{ a TO z }^2.0", null, "{a TO z}^2.0"); + assertQueryEquals("[ a TO z] OR bar", null, "[a TO z] bar"); + assertQueryEquals("[ a TO z] AND bar", null, "+[a TO z] +bar"); + assertQueryEquals("( bar blar { a TO z}) ", null, "bar blar {a TO z}"); + + // the original expected value was: gack (bar blar {a TO z}) + assertQueryEquals("gack ( bar blar { a TO z}) ", null, + "gack bar blar {a TO z}"); + } + + /** + * removed in lucene-4.0 public void testFarsiRangeCollating() throws + * Exception { Directory ramDir = newDirectory(); IndexWriter iw = new + * IndexWriter(ramDir, newIndexWriterConfig(TEST_VERSION_CURRENT, new + * WhitespaceAnalyzer(TEST_VERSION_CURRENT))); Document doc = new Document(); + * doc.add(newField("content", "\u0633\u0627\u0628", Field.Store.YES, + * Field.Index.NOT_ANALYZED)); iw.addDocument(doc); iw.close(); IndexSearcher + * is = new IndexSearcher(ramDir, true); + * + * AqpQueryParser qp = getParser(); qp.setAnalyzer(new + * WhitespaceAnalyzer(TEST_VERSION_CURRENT)); + * + * // Neither Java 1.4.2 nor 1.5.0 has Farsi Locale collation available in // + * RuleBasedCollator. However, the Arabic Locale seems to order the // Farsi + * // characters properly. Collator c = Collator.getInstance(new + * Locale("ar")); qp.setRangeCollator(c); + * + * // Unicode order would include U+0633 in [ U+062F - U+0698 ], but Farsi // + * orders the U+0698 character before the U+0633 character, so the // single + * // index Term below should NOT be returned by a ConstantScoreRangeQuery // + * with a Farsi Collator (or an Arabic one for the case when Farsi is // not + * // supported). + * + * // Test ConstantScoreRangeQuery + * qp.setMultiTermRewriteMethod(MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE); + * ScoreDoc[] result = is.search(qp.parse("[ \u062F TO \u0698 ]", "content"), + * null, 1000).scoreDocs; + * assertEquals("The index Term should not be included.", 0, result.length); + * + * result = is.search(qp.parse("[ \u0633 TO \u0638 ]", "content"), null, + * 1000).scoreDocs; assertEquals("The index Term should be included.", 1, + * result.length); + * + * // Test RangeQuery + * qp.setMultiTermRewriteMethod(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE); + * result = is.search(qp.parse("[ \u062F TO \u0698 ]", "content"), null, + * 1000).scoreDocs; assertEquals("The index Term should not be included.", 0, + * result.length); + * + * result = is.search(qp.parse("[ \u0633 TO \u0638 ]", "content"), null, + * 1000).scoreDocs; assertEquals("The index Term should be included.", 1, + * result.length); + * + * is.close(); ramDir.close(); } + */ + + public void testDateRange() throws Exception { + String startDate = getLocalizedDate(2002, 1, 1); + String endDate = getLocalizedDate(2002, 1, 4); + Calendar endDateExpected = new GregorianCalendar(); + endDateExpected.clear(); + endDateExpected.set(2002, 1, 4, 23, 59, 59); + endDateExpected.set(Calendar.MILLISECOND, 999); + final String defaultField = "default"; + final String monthField = "month"; + final String hourField = "hour"; + AqpQueryParser qp = getParser(); + + Map dateRes = new HashMap(); + + // set a field specific date resolution + dateRes.put(monthField, DateTools.Resolution.MONTH); + qp.setDateResolution(dateRes); + + // set default date resolution to MILLISECOND + qp.setDateResolution(DateTools.Resolution.MILLISECOND); + + // set second field specific date resolution + dateRes.put(hourField, DateTools.Resolution.HOUR); + qp.setDateResolution(dateRes); + + // for this field no field specific date resolution has been set, + // so verify if the default resolution is used + assertDateRangeQueryEquals(qp, defaultField, startDate, endDate, + endDateExpected.getTime(), DateTools.Resolution.MILLISECOND); + + // verify if field specific date resolutions are used for these two + // fields + assertDateRangeQueryEquals(qp, monthField, startDate, endDate, + endDateExpected.getTime(), DateTools.Resolution.MONTH); + + assertDateRangeQueryEquals(qp, hourField, startDate, endDate, + endDateExpected.getTime(), DateTools.Resolution.HOUR); + } + + public void testEscaped() throws Exception { + Analyzer a = new WhitespaceAnalyzer(TEST_VERSION_CURRENT); + + /* + * assertQueryEquals("\\[brackets", a, "\\[brackets"); + * assertQueryEquals("\\[brackets", null, "brackets"); + * assertQueryEquals("\\\\", a, "\\\\"); assertQueryEquals("\\+blah", a, + * "\\+blah"); assertQueryEquals("\\(blah", a, "\\(blah"); + * + * assertQueryEquals("\\-blah", a, "\\-blah"); assertQueryEquals("\\!blah", + * a, "\\!blah"); assertQueryEquals("\\{blah", a, "\\{blah"); + * assertQueryEquals("\\}blah", a, "\\}blah"); assertQueryEquals("\\:blah", + * a, "\\:blah"); assertQueryEquals("\\^blah", a, "\\^blah"); + * assertQueryEquals("\\[blah", a, "\\[blah"); assertQueryEquals("\\]blah", + * a, "\\]blah"); assertQueryEquals("\\\"blah", a, "\\\"blah"); + * assertQueryEquals("\\(blah", a, "\\(blah"); assertQueryEquals("\\)blah", + * a, "\\)blah"); assertQueryEquals("\\~blah", a, "\\~blah"); + * assertQueryEquals("\\*blah", a, "\\*blah"); assertQueryEquals("\\?blah", + * a, "\\?blah"); //assertQueryEquals("foo \\&\\& bar", a, + * "foo \\&\\& bar"); //assertQueryEquals("foo \\|| bar", a, + * "foo \\|| bar"); //assertQueryEquals("foo \\AND bar", a, + * "foo \\AND bar"); + */ + + assertQueryEquals("\\*", a, "*"); + + assertQueryEquals("\\a", a, "a"); + + assertQueryEquals("a\\-b:c", a, "a-b:c"); + assertQueryEquals("a\\+b:c", a, "a+b:c"); + assertQueryEquals("a\\:b:c", a, "a:b:c"); + assertQueryEquals("a\\\\b:c", a, "a\\b:c"); + + assertQueryEquals("a:b\\-c", a, "a:b-c"); + assertQueryEquals("a:b\\+c", a, "a:b+c"); + assertQueryEquals("a:b\\:c", a, "a:b:c"); + assertQueryEquals("a:b\\\\c", a, "a:b\\c"); + + assertQueryEquals("a:b\\-c*", a, "a:b-c*"); + assertQueryEquals("a:b\\+c*", a, "a:b+c*"); + assertQueryEquals("a:b\\:c*", a, "a:b:c*"); + + assertQueryEquals("a:b\\\\c*", a, "a:b\\c*"); + + assertQueryEquals("a:b\\-?c", a, "a:b-?c"); + assertQueryEquals("a:b\\+?c", a, "a:b+?c"); + assertQueryEquals("a:b\\:?c", a, "a:b:?c"); + + assertQueryEquals("a:b\\\\?c", a, "a:b\\?c"); + + assertQueryEquals("a:b\\-c~", a, "a:b-c~1"); + assertQueryEquals("a:b\\+c~", a, "a:b+c~1"); + assertQueryEquals("a:b\\:c~", a, "a:b:c~1"); + assertQueryEquals("a:b\\\\c~", a, "a:b\\c~1"); + + // TODO: implement Range queries on QueryParser + assertQueryEquals("[ a\\- TO a\\+ ]", null, "[a- TO a+]"); + assertQueryEquals("[ a\\: TO a\\~ ]", null, "[a: TO a~]"); + assertQueryEquals("[ a\\\\ TO a\\* ]", null, "[a\\ TO a*]"); + + assertQueryEquals( + "[\"c\\:\\\\temp\\\\\\~foo0.txt\" TO \"c\\:\\\\temp\\\\\\~foo9.txt\"]", + a, "[c:\\temp\\~foo0.txt TO c:\\temp\\~foo9.txt]"); + + assertQueryEquals("a\\\\\\+b", a, "a\\+b"); + + assertQueryEquals("a \\\"b c\\\" d", a, "a \"b c\" d"); + assertQueryEquals("\"a \\\"b c\\\" d\"", a, "\"a \"b c\" d\""); + assertQueryEquals("\"a \\+b c d\"", a, "\"a +b c d\""); + + assertQueryEquals("c\\:\\\\temp\\\\\\~foo.txt", a, "c:\\temp\\~foo.txt"); + + assertQueryNodeException("XY\\"); // there must be a character after the + // escape char + + // test unicode escaping + assertQueryEquals("a\\u0062c", a, "abc"); + assertQueryEquals("XY\\u005a", a, "XYZ"); + assertQueryEquals("XY\\u005A", a, "XYZ"); + assertQueryEquals("\"a \\\\\\u0028\\u0062\\\" c\"", a, "\"a \\(b\" c\""); + + assertQueryNodeException("XY\\u005G"); // test non-hex character in escaped + // unicode sequence + assertQueryNodeException("XY\\u005"); // test incomplete escaped unicode + // sequence + + // Tests bug LUCENE-800 + assertQueryEquals("(item:\\\\ item:ABCD\\\\)", a, "item:\\ item:ABCD\\"); + assertQueryNodeException("(item:\\\\ item:ABCD\\\\))"); // unmatched closing + assertQueryNodeException("(item:\\\\ item:ABCD\\\\)))"); // unmatched closing + assertQueryNodeException("(item:\\\\ item:ABCD\\\\) foo)"); // unmatched closing + + // paranthesis + assertQueryEquals("\\*", a, "*"); + assertQueryEquals("\\\\", a, "\\"); // escaped backslash + + assertQueryNodeException("\\"); // a backslash must always be escaped + + // LUCENE-1189 + // the original result was set to be: + // assertQueryEquals("(\"a\\\\\") or (\"b\")", a, "a\\ or b"); + // however, this is wrong, because the query is: ("a\\") or ("b") + // the AQP parser is handling escaped chars (even escaped escapes) + // correctly + assertQueryEquals("(\"a\\\\\") or (\"b\")", a, "a\\ b"); + assertQueryEquals("(foo:\"a\\\\\") or (bar:\"b\")", a, "foo:a\\ bar:b"); + } + + public void testQueryStringEscaping() throws Exception { + Analyzer a = new WhitespaceAnalyzer(TEST_VERSION_CURRENT); + + assertEscapedQueryEquals("a-b:c", a, "a\\-b\\:c"); + assertEscapedQueryEquals("a+b:c", a, "a\\+b\\:c"); + assertEscapedQueryEquals("a:b:c", a, "a\\:b\\:c"); + assertEscapedQueryEquals("a\\b:c", a, "a\\\\b\\:c"); + + assertEscapedQueryEquals("a:b-c", a, "a\\:b\\-c"); + assertEscapedQueryEquals("a:b+c", a, "a\\:b\\+c"); + assertEscapedQueryEquals("a:b:c", a, "a\\:b\\:c"); + assertEscapedQueryEquals("a:b\\c", a, "a\\:b\\\\c"); + + assertEscapedQueryEquals("a:b-c*", a, "a\\:b\\-c\\*"); + assertEscapedQueryEquals("a:b+c*", a, "a\\:b\\+c\\*"); + assertEscapedQueryEquals("a:b:c*", a, "a\\:b\\:c\\*"); + + assertEscapedQueryEquals("a:b\\\\c*", a, "a\\:b\\\\\\\\c\\*"); + + assertEscapedQueryEquals("a:b-?c", a, "a\\:b\\-\\?c"); + assertEscapedQueryEquals("a:b+?c", a, "a\\:b\\+\\?c"); + assertEscapedQueryEquals("a:b:?c", a, "a\\:b\\:\\?c"); + + assertEscapedQueryEquals("a:b?c", a, "a\\:b\\?c"); + + assertEscapedQueryEquals("a:b-c~", a, "a\\:b\\-c\\~"); + assertEscapedQueryEquals("a:b+c~", a, "a\\:b\\+c\\~"); + assertEscapedQueryEquals("a:b:c~", a, "a\\:b\\:c\\~"); + assertEscapedQueryEquals("a:b\\c~", a, "a\\:b\\\\c\\~"); + + assertEscapedQueryEquals("[ a - TO a+ ]", null, "\\[ a \\- TO a\\+ \\]"); + assertEscapedQueryEquals("[ a : TO a~ ]", null, "\\[ a \\: TO a\\~ \\]"); + assertEscapedQueryEquals("[ a\\ TO a* ]", null, "\\[ a\\\\ TO a\\* \\]"); + + // LUCENE-881 + assertEscapedQueryEquals("|| abc ||", a, "\\|\\| abc \\|\\|"); + assertEscapedQueryEquals("&& abc &&", a, "\\&\\& abc \\&\\&"); + } + + public void testTabNewlineCarriageReturn() throws Exception { + assertQueryEqualsDOA("+weltbank +worlbank", null, "+weltbank +worlbank"); + + assertQueryEqualsDOA("+weltbank\n+worlbank", null, "+weltbank +worlbank"); + assertQueryEqualsDOA("weltbank \n+worlbank", null, "+weltbank +worlbank"); + assertQueryEqualsDOA("weltbank \n +worlbank", null, "+weltbank +worlbank"); + + assertQueryEqualsDOA("+weltbank\r+worlbank", null, "+weltbank +worlbank"); + assertQueryEqualsDOA("weltbank \r+worlbank", null, "+weltbank +worlbank"); + assertQueryEqualsDOA("weltbank \r +worlbank", null, "+weltbank +worlbank"); + + assertQueryEqualsDOA("+weltbank\r\n+worlbank", null, "+weltbank +worlbank"); + assertQueryEqualsDOA("weltbank \r\n+worlbank", null, "+weltbank +worlbank"); + assertQueryEqualsDOA("weltbank \r\n +worlbank", null, "+weltbank +worlbank"); + assertQueryEqualsDOA("weltbank \r \n +worlbank", null, + "+weltbank +worlbank"); + + assertQueryEqualsDOA("+weltbank\t+worlbank", null, "+weltbank +worlbank"); + assertQueryEqualsDOA("weltbank \t+worlbank", null, "+weltbank +worlbank"); + assertQueryEqualsDOA("weltbank \t +worlbank", null, "+weltbank +worlbank"); + } + + public void testSimpleDAO() throws Exception { + assertQueryEqualsDOA("term term term", null, "+term +term +term"); + assertQueryEqualsDOA("term +term term", null, "+term +term +term"); + assertQueryEqualsDOA("term term +term", null, "+term +term +term"); + assertQueryEqualsDOA("term +term +term", null, "+term +term +term"); + assertQueryEqualsDOA("-term term term", null, "-term +term +term"); + } + + public void testBoost() throws Exception { + CharacterRunAutomaton stopSet = new CharacterRunAutomaton( + BasicAutomata.makeString("on")); + Analyzer oneStopAnalyzer = new MockAnalyzer(random(), MockTokenizer.SIMPLE, + true, stopSet); + AqpQueryParser qp = getParser(); + qp.setAnalyzer(oneStopAnalyzer); + + Query q = qp.parse("on^1.0", "field"); + assertNotNull(q); + q = qp.parse("\"hello\"^2.0", "field"); + assertNotNull(q); + assertEquals(q.getBoost(), (float) 2.0, (float) 0.5); + q = qp.parse("hello^2.0", "field"); + assertNotNull(q); + assertEquals(q.getBoost(), (float) 2.0, (float) 0.5); + q = qp.parse("\"on\"^1.0", "field"); + assertNotNull(q); + + AqpQueryParser qp2 = getParser(); + qp2.setAnalyzer(new StandardAnalyzer(TEST_VERSION_CURRENT)); + + q = qp2.parse("the^3", "field"); + // "the" is a stop word so the result is an empty query: + assertNotNull(q); + assertEquals("", q.toString()); + assertEquals(1.0f, q.getBoost(), 0.01f); + } + + public void testException() throws Exception { + assertQueryNodeException("*leadingWildcard"); // disallowed by default + assertQueryNodeException("(foo bar"); + + assertQueryNodeException("\"some phrase"); + assertQueryNodeException("foo bar))"); + assertQueryNodeException("field:term:with:colon some more terms"); + assertQueryNodeException("(sub query)^5.0^2.0 plus more"); + assertQueryNodeException("secret AND illegal) AND access:confidential"); + } + + public void testCustomQueryParserWildcard() throws Exception { + try { + QPTestParser.init(new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse( + "a?t", "contents"); + fail("Wildcard queries should not be allowed"); + } catch (QueryNodeException expected) { + // expected exception + } + } + + public void testCustomQueryParserFuzzy() throws Exception { + try { + QPTestParser.init(new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse( + "xunit~", "contents"); + fail("Fuzzy queries should not be allowed"); + } catch (QueryNodeException expected) { + // expected exception + } + } + + public void testBooleanQuery() throws Exception { + BooleanQuery.setMaxClauseCount(2); + try { + AqpQueryParser qp = getParser(); + qp.setAnalyzer(new WhitespaceAnalyzer(TEST_VERSION_CURRENT)); + + qp.parse("one two three", "field"); + fail("ParseException expected due to too many boolean clauses"); + } catch (QueryNodeException expected) { + // too many boolean clauses, so ParseException is expected + } + } + + /** + * This test differs from TestPrecedenceQueryParser + */ + public void testPrecedence() throws Exception { + AqpQueryParser qp1 = getParser(); + qp1.setAnalyzer(new WhitespaceAnalyzer(TEST_VERSION_CURRENT)); + + AqpQueryParser qp2 = getParser(); + qp2.setAnalyzer(new WhitespaceAnalyzer(TEST_VERSION_CURRENT)); + + // TODO: to achieve standard lucene behaviour (no operator precedence) + // modify the GroupQueryNodeProcessor to recognize our new BooleanQN classes + // then do: + QueryNodeProcessorPipeline processor = (QueryNodeProcessorPipeline) qp1 + .getQueryNodeProcessor(); + processor.add(new GroupQueryNodeProcessor()); + + Query query1 = qp1.parse("A AND B OR C AND D", "field"); + Query query2 = qp2.parse("+A +B +C +D", "field"); + + assertEquals(query1, query2); + } + + public void testLocalDateFormat() throws IOException, QueryNodeException, + ParseException { + Directory ramDir = new RAMDirectory(); + IndexWriter iw = new IndexWriter(ramDir, newIndexWriterConfig( + TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT))); + addDateDoc("a", 2005, 12, 2, 10, 15, 33, iw); + addDateDoc("b", 2005, 12, 4, 22, 15, 00, iw); + iw.close(); + IndexSearcher is = new IndexSearcher(DirectoryReader.open(ramDir)); + + SimpleDateFormat format = new SimpleDateFormat("dd/MM/yyyy", Locale.ROOT); + Date d1_12 = format.parse("1/12/2005"); + Date d3_12 = format.parse("3/12/2005"); + Date d4_12 = format.parse("4/12/2005"); + Date d28_12 = format.parse("28/12/2005"); + + DateFormat df = DateFormat.getDateInstance(DateFormat.SHORT, + Locale.getDefault()); + String dec1 = df.format(d1_12); + String dec2 = df.format(format.parse("2/12/2005")); + String dec3 = df.format(d3_12); + String dec4 = df.format(d4_12); + String dec28 = df.format(d28_12); + + assertHits(2, String.format("[%s TO %s]", dec1, dec28), is); + assertHits(2, String.format("[%s TO %s]", dec1, dec4), is); + + assertHits(2, String.format("{%s TO %s}", dec1, dec28), is); + assertHits(1, String.format("{%s TO %s}", dec1, dec4), is); + assertHits(0, String.format("{%s TO %s}", dec3, dec4), is); + + ramDir.close(); + } + + public void testStopwords() throws Exception { + AqpQueryParser qp = getParser(); + qp.setAnalyzer(new StopAnalyzer(TEST_VERSION_CURRENT, StopFilter + .makeStopSet(TEST_VERSION_CURRENT, "the", "foo"))); + + Query result = qp.parse("a:the OR a:foo", "a"); + assertNotNull("result is null and it shouldn't be", result); + assertTrue("result is not a BooleanQuery", result instanceof BooleanQuery); + assertTrue(((BooleanQuery) result).clauses().size() + " does not equal: " + + 0, ((BooleanQuery) result).clauses().size() == 0); + result = qp.parse("a:woo OR a:the", "a"); + assertNotNull("result is null and it shouldn't be", result); + assertTrue("result is not a TermQuery", result instanceof TermQuery); + + result = qp.parse( + "(fieldX:xxxxx OR fieldy:xxxxxxxx)^2 AND (fieldx:the OR fieldy:foo)", + "a"); + + assertNotNull("result is null and it shouldn't be", result); + assertTrue("result is not a BooleanQuery", result instanceof BooleanQuery); + if (VERBOSE) + System.out.println("Result: " + result); + assertTrue(((BooleanQuery) result).clauses().size() + " does not equal: " + + 2, ((BooleanQuery) result).clauses().size() == 2); + } + + public void testPositionIncrement() throws Exception { + AqpQueryParser qp = getParser(); + qp.setAnalyzer(new StopAnalyzer(TEST_VERSION_CURRENT, StopFilter + .makeStopSet(TEST_VERSION_CURRENT, "the", "in", "are", "this"))); + + qp.setEnablePositionIncrements(true); + + String qtxt = "\"the words in poisitions pos02578 are stopped in this phrasequery\""; + // 0 2 5 7 8 + int expectedPositions[] = { 1, 3, 4, 6, 9 }; + PhraseQuery pq = (PhraseQuery) qp.parse(qtxt, "a"); + // System.out.println("Query text: "+qtxt); + // System.out.println("Result: "+pq); + Term t[] = pq.getTerms(); + int pos[] = pq.getPositions(); + for (int i = 0; i < t.length; i++) { + // System.out.println(i+". "+t[i]+" pos: "+pos[i]); + assertEquals("term " + i + " = " + t[i] + " has wrong term-position!", + expectedPositions[i], pos[i]); + } + } + + public void testMatchAllDocs() throws Exception { + AqpQueryParser qp = getParser(); + qp.setAnalyzer(new WhitespaceAnalyzer(TEST_VERSION_CURRENT)); + + assertEquals(new MatchAllDocsQuery(), qp.parse("*:*", "field")); + assertEquals(new MatchAllDocsQuery(), qp.parse("(*:*)", "field")); + BooleanQuery bq = (BooleanQuery) qp.parse("+*:* -*:*", "field"); + assertTrue(bq.getClauses()[0].getQuery() instanceof MatchAllDocsQuery); + assertTrue(bq.getClauses()[1].getQuery() instanceof MatchAllDocsQuery); + } + + private class CannedTokenizer extends Tokenizer { + private int upto = 0; + private final PositionIncrementAttribute posIncr = addAttribute(PositionIncrementAttribute.class); + private final CharTermAttribute term = addAttribute(CharTermAttribute.class); + + public CannedTokenizer(Reader reader) { + super(reader); + } + + @Override + public boolean incrementToken() { + clearAttributes(); + if (upto == 4) { + return false; + } + if (upto == 0) { + posIncr.setPositionIncrement(1); + term.setEmpty().append("a"); + } else if (upto == 1) { + posIncr.setPositionIncrement(1); + term.setEmpty().append("b"); + } else if (upto == 2) { + posIncr.setPositionIncrement(0); + term.setEmpty().append("c"); + } else { + posIncr.setPositionIncrement(0); + term.setEmpty().append("d"); + } + upto++; + return true; + } + + @Override + public void reset() throws IOException { + super.reset(); + this.upto = 0; + } + } + + private class CannedAnalyzer extends Analyzer { + @Override + public TokenStreamComponents createComponents(String ignored, + Reader alsoIgnored) { + return new TokenStreamComponents(new CannedTokenizer(alsoIgnored)); + } + } + + public void testMultiPhraseQuery() throws Exception { + Directory dir = newDirectory(); + IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( + TEST_VERSION_CURRENT, new CannedAnalyzer())); + Document doc = new Document(); + doc.add(newField("field", "", TextField.TYPE_NOT_STORED)); + w.addDocument(doc); + w.commit(); + IndexReader r = DirectoryReader.open(w.getDirectory()); + IndexSearcher s = newSearcher(r); + + Query q = QPTestParser.init(new CannedAnalyzer()).parse("\"a\"", "field"); + assertTrue(q instanceof MultiPhraseQuery); + assertEquals(1, s.search(q, 10).totalHits); + r.close(); + w.close(); + dir.close(); + } + + // Uniquely for Junit 3 + public static junit.framework.Test suite() { + return new junit.framework.JUnit4TestAdapter(TestAqpSLGStandardTest.class); + } + +} Index: lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/aqp/TestAqpSLGMultiAnalyzer.java =================================================================== --- lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/aqp/TestAqpSLGMultiAnalyzer.java (revision 0) +++ lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/aqp/TestAqpSLGMultiAnalyzer.java (revision 0) @@ -0,0 +1,268 @@ +package org.apache.lucene.queryparser.flexible.aqp; + +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import java.io.IOException; +import java.io.Reader; + +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.analysis.MockTokenizer; +import org.apache.lucene.analysis.TokenFilter; +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.Tokenizer; +import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; +import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; +import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; +import org.apache.lucene.analysis.tokenattributes.TypeAttribute; +import org.apache.lucene.queryparser.flexible.core.QueryNodeException; +import org.apache.lucene.queryparser.flexible.standard.config.StandardQueryConfigHandler.Operator; +import org.apache.lucene.queryparser.flexible.aqp.AqpQueryParser; +import org.apache.lucene.queryparser.flexible.aqp.parser.AqpStandardLuceneParser; +import org.apache.lucene.search.Query; + +/** + * This test case is a copy of the core Lucene query parser test, it was adapted + * to use new QueryParserHelper instead of the old query parser. + * + * Test QueryParser's ability to deal with Analyzers that return more than one + * token per position or that return tokens with a position increment > 1. + */ +public class TestAqpSLGMultiAnalyzer extends AqpTestAbstractCase { + + private static int multiToken = 0; + + public void testMultiAnalyzer() throws QueryNodeException, Exception { + + AqpQueryParser qp = getParser(); + qp.setDefaultOperator(Operator.OR); + qp.setAnalyzer(new MultiAnalyzer()); + + // trivial, no multiple tokens: + assertEquals("foo", qp.parse("foo", "").toString()); + assertEquals("foo", qp.parse("\"foo\"", "").toString()); + assertEquals("foo foobar", qp.parse("foo foobar", "").toString()); + assertEquals("\"foo foobar\"", qp.parse("\"foo foobar\"", "").toString()); + assertEquals("\"foo foobar blah\"", qp.parse("\"foo foobar blah\"", "") + .toString()); + + // two tokens at the same position: + assertEquals("(multi multi2) foo", qp.parse("multi foo", "").toString()); + assertEquals("foo (multi multi2)", qp.parse("foo multi", "").toString()); + assertEquals("(multi multi2) (multi multi2)", qp.parse("multi multi", "") + .toString()); + Query q = qp.parse("(foo multi) +(bar multi)", ""); + assertEquals("foo (multi multi2) +(bar (multi multi2))", + qp.parse("(foo multi) +(bar multi)", "").toString()); + assertEquals("+(foo (multi multi2)) field:\"bar (multi multi2)\"", qp + .parse("+(foo multi) field:\"bar multi\"", "").toString()); + + // phrases: + assertEquals("\"(multi multi2) foo\"", qp.parse("\"multi foo\"", "") + .toString()); + assertEquals("\"foo (multi multi2)\"", qp.parse("\"foo multi\"", "") + .toString()); + assertEquals("\"foo (multi multi2) foobar (multi multi2)\"", + qp.parse("\"foo multi foobar multi\"", "").toString()); + + // fields: + assertEquals("(field:multi field:multi2) field:foo", + qp.parse("field:multi field:foo", "").toString()); + assertEquals("field:\"(multi multi2) foo\"", + qp.parse("field:\"multi foo\"", "").toString()); + + // three tokens at one position: + assertEquals("triplemulti multi3 multi2", qp.parse("triplemulti", "") + .toString()); + assertEquals("foo (triplemulti multi3 multi2) foobar", + qp.parse("foo triplemulti foobar", "").toString()); + + // phrase with non-default slop: + assertEquals("\"(multi multi2) foo\"~10", qp.parse("\"multi foo\"~10", "") + .toString()); + + // phrase with non-default boost: + assertEquals("\"(multi multi2) foo\"^2.0", qp.parse("\"multi foo\"^2", "") + .toString()); + + // phrase after changing default slop + qp.setDefaultPhraseSlop(99); + assertEquals("\"(multi multi2) foo\"~99 bar", + qp.parse("\"multi foo\" bar", "").toString()); + assertEquals("\"(multi multi2) foo\"~99 \"foo bar\"~2", + qp.parse("\"multi foo\" \"foo bar\"~2", "").toString()); + qp.setDefaultPhraseSlop(0); + + // non-default operator: + qp.setDefaultOperator(Operator.AND); + assertEquals("+(multi multi2) +foo", qp.parse("multi foo", "").toString()); + + } + + // public void testMultiAnalyzerWithSubclassOfQueryParser() throws + // ParseException { + // this test doesn't make sense when using the new QueryParser API + // DumbQueryParser qp = new DumbQueryParser("", new MultiAnalyzer()); + // qp.setPhraseSlop(99); // modified default slop + // + // // direct call to (super's) getFieldQuery to demonstrate differnce + // // between phrase and multiphrase with modified default slop + // assertEquals("\"foo bar\"~99", + // qp.getSuperFieldQuery("","foo bar").toString()); + // assertEquals("\"(multi multi2) bar\"~99", + // qp.getSuperFieldQuery("","multi bar").toString()); + // + // + // // ask sublcass to parse phrase with modified default slop + // assertEquals("\"(multi multi2) foo\"~99 bar", + // qp.parse("\"multi foo\" bar").toString()); + // + // } + + public void testPosIncrementAnalyzer() throws QueryNodeException, Exception { + AqpQueryParser qp = getParser(); + qp.setDefaultOperator(Operator.OR); + + qp.setAnalyzer(new PosIncrementAnalyzer()); + + assertEquals("quick brown", qp.parse("the quick brown", "").toString()); + assertEquals("\"quick brown\"", qp.parse("\"the quick brown\"", "") + .toString()); + assertEquals("quick brown fox", qp.parse("the quick brown fox", "") + .toString()); + assertEquals("\"quick brown fox\"", qp.parse("\"the quick brown fox\"", "") + .toString()); + } + + /** + * Expands "multi" to "multi" and "multi2", both at the same position, and + * expands "triplemulti" to "triplemulti", "multi3", and "multi2". + */ + private class MultiAnalyzer extends Analyzer { + + @Override + public TokenStreamComponents createComponents(String fieldName, + Reader reader) { + Tokenizer result = new MockTokenizer(reader, MockTokenizer.WHITESPACE, + true); + return new TokenStreamComponents(result, new TestFilter(result)); + } + } + + private final class TestFilter extends TokenFilter { + + private String prevType; + private int prevStartOffset; + private int prevEndOffset; + + private final CharTermAttribute termAtt = addAttribute(CharTermAttribute.class); + private final PositionIncrementAttribute posIncrAtt = addAttribute(PositionIncrementAttribute.class); + private final OffsetAttribute offsetAtt = addAttribute(OffsetAttribute.class); + private final TypeAttribute typeAtt = addAttribute(TypeAttribute.class); + + public TestFilter(TokenStream in) { + super(in); + } + + @Override + public final boolean incrementToken() throws java.io.IOException { + if (multiToken > 0) { + termAtt.setEmpty().append("multi" + (multiToken + 1)); + offsetAtt.setOffset(prevStartOffset, prevEndOffset); + typeAtt.setType(prevType); + posIncrAtt.setPositionIncrement(0); + multiToken--; + return true; + } else { + boolean next = input.incrementToken(); + if (!next) { + return false; + } + prevType = typeAtt.type(); + prevStartOffset = offsetAtt.startOffset(); + prevEndOffset = offsetAtt.endOffset(); + String text = termAtt.toString(); + if (text.equals("triplemulti")) { + multiToken = 2; + return true; + } else if (text.equals("multi")) { + multiToken = 1; + return true; + } else { + return true; + } + } + } + + @Override + public void reset() throws IOException { + super.reset(); + this.prevType = null; + this.prevStartOffset = 0; + this.prevEndOffset = 0; + } + } + + /** + * Analyzes "the quick brown" as: quick(incr=2) brown(incr=1). Does not work + * correctly for input other than "the quick brown ...". + */ + private class PosIncrementAnalyzer extends Analyzer { + + @Override + public TokenStreamComponents createComponents(String fieldName, + Reader reader) { + Tokenizer result = new MockTokenizer(reader, MockTokenizer.WHITESPACE, + true); + return new TokenStreamComponents(result, new TestPosIncrementFilter( + result)); + } + } + + private class TestPosIncrementFilter extends TokenFilter { + + private final CharTermAttribute termAtt = addAttribute(CharTermAttribute.class); + private final PositionIncrementAttribute posIncrAtt = addAttribute(PositionIncrementAttribute.class); + + public TestPosIncrementFilter(TokenStream in) { + super(in); + } + + @Override + public final boolean incrementToken() throws java.io.IOException { + while (input.incrementToken()) { + if (termAtt.toString().equals("the")) { + // stopword, do nothing + } else if (termAtt.toString().equals("quick")) { + posIncrAtt.setPositionIncrement(2); + return true; + } else { + posIncrAtt.setPositionIncrement(1); + return true; + } + } + return false; + } + + } + + // Uniquely for Junit 3 + public static junit.framework.Test suite() { + return new junit.framework.JUnit4TestAdapter(TestAqpSLGMultiAnalyzer.class); + } + +} Index: lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/aqp/parser/BuildAST.java =================================================================== --- lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/aqp/parser/BuildAST.java (revision 0) +++ lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/aqp/parser/BuildAST.java (revision 0) @@ -0,0 +1,109 @@ +package org.apache.lucene.queryparser.flexible.aqp.parser; + +import org.antlr.runtime.*; +import org.antlr.runtime.tree.*; +import org.antlr.stringtemplate.*; +import java.lang.reflect.*; + +import org.apache.lucene.queryparser.flexible.aqp.parser.*; + +/** + * A utility class for generating the dot/graph representations of the query + * + * + */ + +// import +// org.apache.lucene.queryparser.flexible.aqp.parser.StandardLuceneGrammarLexer; +// import +// org.apache.lucene.queryparser.flexible.aqp.parser.StandardLuceneGrammarParser; + +/* + * Arguments: - grammar - query - rule-name (optional, default "mainQ") - action + * (optional, default: dot) + */ +public class BuildAST { + @SuppressWarnings("unchecked") + public static void main(String[] args) throws Exception { + String grammar = args[0]; + String ruleName = "mainQ"; + String action = "dot"; + + if (args.length > 2) { + ruleName = args[2]; + } + + if (args.length > 3) { + if (args[3].toLowerCase().equals("dot")) { + action = "dot"; + } else if (args[3].toLowerCase().equals("tree")) { + action = "tree"; + } else { + throw new Exception("Unknown argument " + args[3] + + ". Allowed actions: dot,tree"); + } + } + + System.err.println("Grammar: " + grammar + " rule:" + ruleName + + "\nquery: " + (args.length > 1 ? args[1] : "--")); + String input = args.length > 1 ? args[1] : "No input given"; + + ANTLRStringStream in = new ANTLRStringStream(input); + // Lexer lexer = new StandardLuceneGrammarLexer(in); + // CommonTokenStream ts = new CommonTokenStream(lexer); + + // System.err.println(ts.toString()); + + // CommonTokenStream tokens = new CommonTokenStream(lexer); + + // StandardLuceneGrammarParser parser = new + // StandardLuceneGrammarParser(tokens); + // StandardLuceneGrammarParser.mainQ_return returnValue = parser.mainQ(); + // CommonTree tree = (CommonTree)returnValue.getTree(); + + // get the Classes + Class clsLexer = Class + .forName("org.apache.lucene.queryparser.flexible.aqp.parser." + grammar + + "Lexer"); + Class clsParser = Class + .forName("org.apache.lucene.queryparser.flexible.aqp.parser." + grammar + + "Parser"); + + // insantiate lexer with one parameter + Class partypes[] = new Class[1]; + partypes[0] = CharStream.class; + Constructor ctLexer = clsLexer.getConstructor(partypes); + + Object arglist[] = new Object[1]; + arglist[0] = in; + Object iLexer = ctLexer.newInstance(arglist); + + // get tokens + CommonTokenStream tokens = new CommonTokenStream( + (TokenSource) clsLexer.cast(iLexer)); + + // instantiate parser using parameters + Class partypes2[] = new Class[1]; + partypes2[0] = TokenStream.class; + Constructor ct = clsParser.getConstructor(partypes2); + + Object arglist2[] = new Object[1]; + arglist2[0] = tokens; + Object iParser = ct.newInstance(arglist2); + + // call the mainQ parser rule + Method iParserMainQ = clsParser.getDeclaredMethod(ruleName); + Object retVal = iParserMainQ.invoke(iParser); + Method getMethod = iParserMainQ.getReturnType().getMethod("getTree"); + CommonTree tree = (CommonTree) (getMethod.invoke(retVal)); + + // print the output + if (action.equals("dot")) { + DOTTreeGenerator gen = new DOTTreeGenerator(); + StringTemplate st = gen.toDOT(tree); + System.out.println(st); + } else if (action.equals("tree")) { + System.out.println(tree.toStringTree()); + } + } +} Index: lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/aqp/TestAqpExtendedLGSimple.java =================================================================== --- lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/aqp/TestAqpExtendedLGSimple.java (revision 0) +++ lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/aqp/TestAqpExtendedLGSimple.java (revision 0) @@ -0,0 +1,55 @@ +package org.apache.lucene.queryparser.flexible.aqp; + +import org.apache.lucene.queryparser.flexible.aqp.AqpQueryParser; +import org.apache.lucene.queryparser.flexible.aqp.builders.AqpNearQueryNodeBuilder; +import org.apache.lucene.queryparser.flexible.aqp.builders.AqpQueryTreeBuilder; +import org.apache.lucene.queryparser.flexible.aqp.nodes.AqpNearQueryNode; +import org.apache.lucene.queryparser.flexible.aqp.parser.AqpStandardLuceneParser; + +/** + * This test case is extending {@link TestAqpSLGSimple} + * we just use a different grammar + * + * Just an example to show how easy it is to add new + * functionality to the parser. + * + * 1. change grammar + * 2. add new builders/processors + * + * Note: If the ExtendedLuceneGrammar was a separate parser, + * it is much better to create its own configuration. See + * {@link AqpStandardLuceneParser#init(String)} for details + * + */ +public class TestAqpExtendedLGSimple extends TestAqpSLGSimple { + + + @Override + public void setUp() throws Exception { + super.setUp(); + setGrammarName("ExtendedLuceneGrammar"); + } + + public void testExtensions() throws Exception { + + AqpQueryParser qp = getParser(); + ((AqpQueryTreeBuilder) qp.getQueryBuilder()).setBuilder(AqpNearQueryNode.class, new AqpNearQueryNodeBuilder()); + + assertQueryMatch(qp, "this NEAR that", "field", + "spanNear([field:this, field:that], 5, true)"); + + assertQueryMatch(qp, "this NEAR3 that", "field", + "spanNear([field:this, field:that], 3, true)"); + + assertQueryMatch(qp, "this NEAR3 (that OR foo*)", "field", + "spanNear([field:this, spanOr([field:that, SpanMultiTermQueryWrapper(field:foo*)])], 3, true)"); + + + } + + // Uniquely for Junit 3 + public static junit.framework.Test suite() { + return new junit.framework.JUnit4TestAdapter(TestAqpExtendedLGSimple.class); + } + +} Index: lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/aqp/AqpTestAbstractCase.java =================================================================== --- lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/aqp/AqpTestAbstractCase.java (revision 0) +++ lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/aqp/AqpTestAbstractCase.java (revision 0) @@ -0,0 +1,430 @@ +package org.apache.lucene.queryparser.flexible.aqp; + +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import java.io.IOException; +import java.text.DateFormat; +import java.util.Calendar; +import java.util.Date; +import java.util.GregorianCalendar; +import java.util.Iterator; +import java.util.Locale; +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.analysis.core.SimpleAnalyzer; +import org.apache.lucene.analysis.core.WhitespaceAnalyzer; +import org.apache.lucene.document.DateTools; +import org.apache.lucene.document.Document; +import org.apache.lucene.document.Field; +import org.apache.lucene.document.StringField; +import org.apache.lucene.document.TextField; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.queryparser.flexible.core.QueryNodeException; +import org.apache.lucene.queryparser.flexible.core.QueryParserHelper; +import org.apache.lucene.queryparser.flexible.core.config.QueryConfigHandler; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; +import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessor; +import org.apache.lucene.queryparser.flexible.standard.QueryParserUtil; +import org.apache.lucene.queryparser.flexible.standard.StandardQueryParser; +import org.apache.lucene.queryparser.flexible.standard.config.StandardQueryConfigHandler.Operator; +import org.apache.lucene.queryparser.flexible.standard.processors.StandardQueryNodeProcessorPipeline; +import org.apache.lucene.queryparser.flexible.aqp.AqpQueryParser; +import org.apache.lucene.queryparser.flexible.aqp.parser.AqpStandardLuceneParser; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.ScoreDoc; +import org.apache.lucene.util.LuceneTestCase; + +/** + * This test case is a copy of the core Lucene query parser test, it was adapted + * to use new QueryParserHelper instead of the old query parser. + * + * Tests QueryParser. + */ +public class AqpTestAbstractCase extends LuceneTestCase { + + public int originalMaxClauses; + public boolean debugParser = false; + protected String grammarName = "StandardLuceneGrammar"; + protected int noFailures = 0; + + public void setUp() throws Exception { + super.setUp(); + originalMaxClauses = BooleanQuery.getMaxClauseCount(); + } + + public static void fail(String message) { + System.err.println(message); + LuceneTestCase.fail(message); + } + + public void setDebug(boolean d) { + debugParser = d; + } + + public void setGrammarName(String name) { + grammarName = name; + } + + public String getGrammarName() { + return grammarName; + } + + public AqpQueryParser getParser(Analyzer a) throws Exception { + if (a == null) + a = new SimpleAnalyzer(TEST_VERSION_CURRENT); + AqpQueryParser qp = getParser(); + qp.setAnalyzer(a); + return qp; + + } + + public QueryParserHelper getParser(Analyzer a, boolean standard) + throws Exception { + if (standard) { + StandardQueryParser sp = new StandardQueryParser(a); + if (this.debugParser) { + sp.setQueryNodeProcessor(new DebuggingQueryNodeProcessorPipeline(sp + .getQueryConfigHandler())); + } + return sp; + } else { + return getParser(a); + } + } + + public AqpQueryParser getParser() throws Exception { + AqpQueryParser qp = AqpStandardLuceneParser.init(getGrammarName()); + qp.setDebug(this.debugParser); + return qp; + } + + public Query getQuery(String query, Analyzer a) throws Exception { + return getParser(a).parse(query, "field"); + } + + public Query getQueryAllowLeadingWildcard(String query, Analyzer a) + throws Exception { + AqpQueryParser parser = getParser(a); + parser.setAllowLeadingWildcard(true); + return parser.parse(query, "field"); + } + + public Query assertQueryEquals(String query, Analyzer a, String result) + throws Exception { + Query q = getQuery(query, a); + String s = q.toString("field"); + if (!s.equals(result)) { + debugFail(q.toString(), result, s); + } + return q; + } + + public Query assertQueryEquals(String query, Analyzer a, String result, + Class clazz) throws Exception { + Query q = assertQueryEquals(query, a, result); + if (!q.getClass().isAssignableFrom(clazz)) { + debugFail(q.toString(), result, + "Query is not: " + clazz + " but: " + q.getClass()); + } + return q; + } + + public void assertQueryEqualsAllowLeadingWildcard(String query, Analyzer a, + String result) throws Exception { + Query q = getQueryAllowLeadingWildcard(query, a); + String s = q.toString("field"); + if (!s.equals(result)) { + fail("Query /" + query + "/ yielded /" + s + "/, expecting /" + result + + "/"); + } + } + + public void assertQueryEquals(AqpQueryParser qp, String field, String query, + String result) throws Exception { + Query q = qp.parse(query, field); + String s = q.toString(field); + if (!s.equals(result)) { + fail("Query /" + query + "/ yielded /" + s + "/, expecting /" + result + + "/"); + } + } + + public void assertEscapedQueryEquals(String query, Analyzer a, String result) + throws Exception { + String escapedQuery = QueryParserUtil.escape(query); + if (!escapedQuery.equals(result)) { + fail("Query /" + query + "/ yielded /" + escapedQuery + "/, expecting /" + + result + "/"); + } + } + + public void assertWildcardQueryEquals(String query, boolean lowercase, + String result, boolean allowLeadingWildcard) throws Exception { + AqpQueryParser qp = getParser(null); + qp.setLowercaseExpandedTerms(lowercase); + qp.setAllowLeadingWildcard(allowLeadingWildcard); + Query q = qp.parse(query, "field"); + String s = q.toString("field"); + if (!s.equals(result)) { + fail("WildcardQuery /" + query + "/ yielded /" + s + "/, expecting /" + + result + "/"); + } + } + + public void assertWildcardQueryEquals(String query, boolean lowercase, + String result) throws Exception { + assertWildcardQueryEquals(query, lowercase, result, false); + } + + public void assertWildcardQueryEquals(String query, String result) + throws Exception { + AqpQueryParser qp = getParser(null); + Query q = qp.parse(query, "field"); + String s = q.toString("field"); + if (!s.equals(result)) { + fail("WildcardQuery /" + query + "/ yielded /" + s + "/, expecting /" + + result + "/"); + } + } + + public Query getQueryDOA(String query, Analyzer a) throws Exception { + if (a == null) + a = new SimpleAnalyzer(TEST_VERSION_CURRENT); + AqpQueryParser qp = getParser(); + qp.setAnalyzer(a); + qp.setDefaultOperator(Operator.AND); + + return qp.parse(query, "field"); + + } + + public void assertQueryEqualsDOA(String query, Analyzer a, String result) + throws Exception { + Query q = getQueryDOA(query, a); + String s = q.toString("field"); + if (!s.equals(result)) { + fail("Query /" + query + "/ yielded /" + s + "/, expecting /" + result + + "/"); + } + } + + /** for testing DateTools support */ + private String getDate(String s, DateTools.Resolution resolution) + throws Exception { + DateFormat df = DateFormat.getDateInstance(DateFormat.SHORT); + return getDate(df.parse(s), resolution); + } + + /** for testing DateTools support */ + private String getDate(Date d, DateTools.Resolution resolution) { + return DateTools.dateToString(d, resolution); + } + + public String escapeDateString(String s) { + if (s.contains(" ")) { + return "\"" + s + "\""; + } else { + return s; + } + } + + public String getLocalizedDate(int year, int month, int day) { + DateFormat df = DateFormat.getDateInstance(DateFormat.SHORT); + Calendar calendar = new GregorianCalendar(); + calendar.clear(); + calendar.set(year, month, day); + calendar.set(Calendar.HOUR_OF_DAY, 23); + calendar.set(Calendar.MINUTE, 59); + calendar.set(Calendar.SECOND, 59); + calendar.set(Calendar.MILLISECOND, 999); + return df.format(calendar.getTime()); + } + + public void assertDateRangeQueryEquals(AqpQueryParser qp, String field, + String startDate, String endDate, Date endDateInclusive, + DateTools.Resolution resolution) throws Exception { + assertQueryEquals( + qp, + field, + field + ":[" + escapeDateString(startDate) + " TO " + + escapeDateString(endDate) + "]", + "[" + getDate(startDate, resolution) + " TO " + + getDate(endDateInclusive, resolution) + "]"); + assertQueryEquals( + qp, + field, + field + ":{" + escapeDateString(startDate) + " TO " + + escapeDateString(endDate) + "}", + "{" + getDate(startDate, resolution) + " TO " + + getDate(endDate, resolution) + "}"); + } + + public void assertHits(int expected, String query, IndexSearcher is) + throws IOException, QueryNodeException { + AqpQueryParser qp; + try { + qp = getParser(); + } catch (Exception e) { + e.printStackTrace(); + throw new QueryNodeException(e); + } + qp.setAnalyzer(new WhitespaceAnalyzer(TEST_VERSION_CURRENT)); + // qp.setLocale(Locale.ENGLISH); + qp.setDateResolution(DateTools.Resolution.DAY); + + Query q = qp.parse(query, "date"); + ScoreDoc[] hits = is.search(q, null, 1000).scoreDocs; + assertEquals(expected, hits.length); + } + + public void assertQueryNodeException(String queryString) throws Exception { + try { + getQuery(queryString, null); + } catch (QueryNodeException expected) { + return; + } + debugFail("ParseException expected, not thrown"); + } + + public void addDateDoc(String content, int year, int month, int day, + int hour, int minute, int second, IndexWriter iw) throws IOException { + Document d = new Document(); + d.add(new Field("f", content, TextField.TYPE_STORED)); + Calendar cal = Calendar.getInstance(Locale.ROOT); + cal.set(year, month - 1, day, hour, minute, second); + d.add(new Field("date", getDate(cal.getTime(), DateTools.Resolution.DAY), + StringField.TYPE_NOT_STORED)); + iw.addDocument(d); + } + + public void assertQueryMatch(AqpQueryParser qp, String queryString, + String defaultField, String expectedResult) throws Exception { + + try { + Query query = qp.parse(queryString, defaultField); + String queryParsed = query.toString(); + + if (!queryParsed.equals(expectedResult)) { + + if (this.debugParser) { + + System.out.println("query:\t\t" + queryString); + + if (qp.getDebug() != true) { // it already printed debug + qp.setDebug(true); + qp.parse(queryString, defaultField); + qp.setDebug(false); + } + System.out.println(""); + System.out.println("query:\t\t" + queryString); + System.out.println("result:\t\t" + queryParsed); + + } + + String msg = "Query /" + queryString + "/ with field: " + defaultField + + "/ yielded /" + queryParsed + "/, expecting /" + expectedResult + + "/"; + + debugFail(queryString, expectedResult, queryParsed); + + } else { + if (this.debugParser) { + System.out.println("OK \"" + queryString + "\" ---> " + queryParsed); + } + } + } catch (Exception e) { + if (this.debugParser) { + System.err.println(queryString); + e.printStackTrace(); + } else { + throw e; + } + } + + } + + public void debugFail(String message) { + if (this.debugParser) { + System.err.println("Number of failures: " + ++noFailures); + System.err.println(message); + } else { + fail(message); + } + } + + public void debugFail(String query, String expected, String actual) { + if (this.debugParser) { + System.err.println("Number of failures: " + ++noFailures); + System.err.println("query:/" + query + "/ \nexpected:\n" + expected + + " \nactual:\n" + actual + "/"); + } else { + assertEquals(expected, actual); + } + } + + @Override + public void tearDown() throws Exception { + BooleanQuery.setMaxClauseCount(originalMaxClauses); + super.tearDown(); + } + + class DebuggingQueryNodeProcessorPipeline extends + StandardQueryNodeProcessorPipeline { + DebuggingQueryNodeProcessorPipeline(QueryConfigHandler queryConfig) { + super(queryConfig); + } + + public QueryNode process(QueryNode queryTree) throws QueryNodeException { + String oldVal = null; + String newVal = null; + + oldVal = queryTree.toString(); + int i = 1; + System.out.println(" 0. starting"); + System.out.println("--------------------------------------------"); + System.out.println(oldVal); + + Iterator it = this.iterator(); + + QueryNodeProcessor processor; + while (it.hasNext()) { + processor = it.next(); + + System.out.println(" " + i + ". step " + + processor.getClass().toString()); + queryTree = processor.process(queryTree); + newVal = queryTree.toString(); + System.out.println(" Tree changed: " + + (newVal.equals(oldVal) ? "NO" : "YES")); + System.out.println("--------------------------------------------"); + System.out.println(newVal); + oldVal = newVal; + i += 1; + } + + System.out.println(""); + System.out.println("final result:"); + System.out.println("--------------------------------------------"); + System.out.println(queryTree.toString()); + return queryTree; + + } + } + +} Index: lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/aqp/TestAqpSLGSimple.java =================================================================== --- lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/aqp/TestAqpSLGSimple.java (revision 0) +++ lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/aqp/TestAqpSLGSimple.java (revision 0) @@ -0,0 +1,246 @@ +package org.apache.lucene.queryparser.flexible.aqp; + +import org.apache.lucene.analysis.core.WhitespaceAnalyzer; +import org.apache.lucene.queryparser.flexible.core.QueryNodeException; +import org.apache.lucene.queryparser.flexible.core.builders.QueryTreeBuilder; +import org.apache.lucene.queryparser.flexible.core.nodes.BooleanQueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.BoostQueryNode; +import org.apache.lucene.queryparser.flexible.standard.StandardQueryParser; +import org.apache.lucene.queryparser.flexible.standard.builders.BooleanQueryNodeBuilder; +import org.apache.lucene.queryparser.flexible.standard.builders.BoostQueryNodeBuilder; +import org.apache.lucene.queryparser.flexible.standard.config.StandardQueryConfigHandler.Operator; +import org.apache.lucene.queryparser.flexible.aqp.AqpQueryParser; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.Query; + +/** + * This test case is a copy of the core Lucene query parser test, it was adapted + * to use new QueryParserHelper instead of the old query parser. + * + * Tests QueryParser. + */ +public class TestAqpSLGSimple extends AqpTestAbstractCase { + + private boolean verbose = true; + + private int originalMaxClauses; + + @Override + public void setUp() throws Exception { + super.setUp(); + originalMaxClauses = BooleanQuery.getMaxClauseCount(); + setGrammarName("StandardLuceneGrammar"); + } + + public void testBooleanQuery() throws Exception { + + WhitespaceAnalyzer analyzer = new WhitespaceAnalyzer(TEST_VERSION_CURRENT); + + AqpQueryParser qp = getParser(analyzer); + + StandardQueryParser sp = (StandardQueryParser) getParser(analyzer, true); + + // DEFAULT OPERATOR IS AND + qp.setDefaultOperator(Operator.AND); + sp.setDefaultOperator(Operator.AND); + + // test the clause rewriting/optimization + Query q = qp.parse("a -(-(+(-(x)^0.6))^0.2)^0.3", ""); + assertQueryMatch(qp, "a -(-(+(-(x)^0.6))^0.2)^0.3", "", "+a -x^0.3"); + assertQueryMatch(qp, "-(-(+(-(x)^0.6))^0.2)^0.3", "", "x^0.3"); // not + // minus, + // because + // that is + // not + // allowed + assertQueryMatch(qp, "-(-(+(-(x)^0.6))^0.2)^", "", "x"); // because defualt + // boost is 1.0f + assertQueryMatch(qp, "-(-(+(-(x)^))^0.2)^0.1", "", "x^0.1"); // because + // defualt + // boost is + // 1.0f + + Query qa = sp.parse("kahnn-strauss", "x"); + Query qb = qp.parse("kahnn-strauss", "x"); + assertQueryMatch(qp, "kahnn-strauss", "x", qa.toString()); + + qa = sp.parse("a \\\"b \\\"c d", "x"); + qb = qp.parse("a \\\"b \\\"c d", "x"); + assertQueryMatch(qp, "a \\\"b \\\"c d", "x", qa.toString()); + + qa = sp.parse("\"a \\\"b c\\\" d\"", "x"); + qb = qp.parse("\"a \\\"b c\\\" d\"", "x"); + assertQueryMatch(qp, "\"a \\\"b c\\\" d\"", "x", qa.toString()); + + assertQueryMatch(qp, "+(-(-(-(x)^0.6))^0.2)^", "field", "field:x"); + assertQueryMatch(qp, "+(-(-(-(x)^0.6))^0.2)^0.5", "field", "field:x^0.5"); + + // the first element will be positive, because the negative X NOT Y + // is currently implemented that way (the first must return something) + // TODO: this should be: + // "((+field:a +field:b)^0.8) -((+field:x +field:y)^0.2)" + + assertQueryMatch(qp, "(+(-(a b)))^0.8 OR -(x y)^0.2", "field", + "+((+field:a +field:b)^0.8) -((+field:x +field:y)^0.2)"); + + assertQueryMatch(qp, "(+(-(a b)))^0.8 AND -(x y)^0.2", "field", + "+((+field:a +field:b)^0.8) -((+field:x +field:y)^0.2)"); + + assertQueryMatch(qp, "(+(-(a b)))^0.8 -(x y)", "field", + "+((+field:a +field:b)^0.8) -(+field:x +field:y)"); + // or does -(x y) have different semantics? ... -field:x -field:y + // +((-(+field:a +field:b))^0.8) -field:x -field:y + + assertQueryMatch(qp, "+((+(-(a b)))^0.8)^0.7 OR -(x y)^0.2", "field", + "+((+field:a +field:b)^0.7) -((+field:x +field:y)^0.2)"); + + assertQueryMatch(qp, "+title:(dog cat)", "field", "+title:dog +title:cat"); + + assertQueryMatch(qp, "title:(+dog -cat)", "field", "+title:dog -title:cat"); + + assertQueryMatch(qp, "\\*", "field", "field:*"); + + assertQueryMatch(qp, "term~", "field", "field:term~2"); + assertQueryMatch(qp, "term~1", "field", "field:term~1"); + assertQueryMatch(qp, "term~2", "field", "field:term~2"); + + qp.setAllowSlowFuzzy(true); + assertQueryMatch(qp, "term~", "field", "field:term~0.5"); + assertQueryMatch(qp, "term~0.1", "field", "field:term~0.1"); + assertQueryMatch(qp, "term~0.2", "field", "field:term~0.2"); + qp.setAllowSlowFuzzy(false); + + assertQueryMatch(qp, "something", "field", "field:something"); + + assertQueryMatch(qp, "x:something", "field", "x:something"); + + assertQueryMatch(qp, "x:\"something else\"", "field", + "x:\"something else\""); + + assertQueryMatch(qp, "x:\"someth*\"", "field", "x:someth*"); + + assertQueryMatch(qp, "x:\"someth?ng\"", "field", "x:someth?ng"); + + assertQueryMatch(qp, "A AND B C AND D", "field", + "+field:A +field:B +field:C +field:D"); + + assertQueryMatch(qp, "A AND B C AND D OR E", "field", + "+(+field:A +field:B) +((+field:C +field:D) field:E)"); + + assertQueryMatch(qp, "one OR +two", "f", "f:one +f:two"); + + assertQueryMatch(qp, "one OR two NOT three", "field", + "field:one (+field:two -field:three)"); + + assertQueryMatch(qp, "one OR (two AND three) NOT four", "field", + "field:one (+(+field:two +field:three) -field:four)"); + + assertQueryMatch(qp, "-one -two", "field", "-field:one -field:two"); + + assertQueryMatch(qp, "x:one NOT y:two -three^0.5", "field", + "+(+x:one -y:two) -field:three^0.5"); + + qp.setAllowSlowFuzzy(true); + assertQueryMatch(qp, "one NOT two -three~0.2", "field", + "+(+field:one -field:two) -field:three~0.2"); + + assertQueryMatch(qp, "one NOT two NOT three~0.2", "field", + "+field:one -field:two -field:three~0.2"); + + assertQueryMatch(qp, "one two^0.5 three~0.2", "field", + "+field:one +field:two^0.5 +field:three~0.2"); + qp.setAllowSlowFuzzy(false); + + assertQueryMatch(qp, "one NOT two -three~0.2", "field", + "+(+field:one -field:two) -field:three~2"); + + assertQueryMatch(qp, "one NOT two NOT three~0.2", "field", + "+field:one -field:two -field:three~2"); + + assertQueryMatch(qp, "one two^0.5 three~0.2", "field", + "+field:one +field:two^0.5 +field:three~2"); + + q = qp.parse("one (two three)^0.8", "field"); + + // I know where the problem is: builder does not have access + // to the config, so that when a default operator is changed + // *after* the parser was instantiated, the builder behaves + // the old way(s) -- this is a bigger problem, and I didn't + // want to change the flex parser API + + ((QueryTreeBuilder) qp.getQueryBuilder()).setBuilder(BooleanQueryNode.class, new BooleanQueryNodeBuilder(BooleanClause.Occur.MUST)); + + qa = qp.parse("one (two three)^0.8", "field"); + assertQueryMatch(qp, "one (two three)^0.8", "field", + "+field:one +((+field:two +field:three)^0.8)"); + + assertQueryMatch(qp, "one (x:two three)^0.8", "field", + "+field:one +((+x:two +field:three)^0.8)"); + + // TODO: the original value was -((+one:two +one:three)^0.8) + // but that is not a valid Lucene query (or is it?) + assertQueryMatch(qp, "-one:(two three)^0.8", "field", + "(+one:two +one:three)^0.8"); + + + assertQueryMatch(qp, "one:(two three)^0.8", "field", + "(+one:two +one:three)^0.8"); + + assertQueryMatch(qp, "+one:(two three)^0.8", "field", + "(+one:two +one:three)^0.8"); + + assertQueryMatch(qp, "[one TO five]", "field", "field:[one TO five]"); + + assertQueryMatch(qp, "z:[one TO five]", "field", "z:[one TO five]"); + + assertQueryMatch(qp, "{one TO five}", "field", "field:{one TO five}"); + + assertQueryMatch(qp, "z:{one TO five}", "field", "z:{one TO five}"); + + assertQueryMatch(qp, "z:{\"one\" TO \"five\"}", "field", "z:{one TO five}"); + + assertQueryMatch(qp, "z:{one TO *}", "field", "z:{one TO *}"); + + assertQueryMatch(qp, "this +(that)", "field", "+field:this +field:that"); + + assertQueryMatch(qp, "(this) (that)", "field", "+field:this +field:that"); + + assertQueryMatch(qp, "this ((((+(that))))) ", "field", + "+field:this +field:that"); + + assertQueryMatch(qp, "this (+(that)^0.7)", "field", + "+field:this +field:that^0.7"); + + assertQueryMatch(qp, "this (+(that thus)^0.7)", "field", + "+field:this +((+field:that +field:thus)^0.7)"); + + assertQueryMatch(qp, "this (-(+(that thus))^0.7)", "field", + "+field:this -((+field:that +field:thus)^0.7)"); + + assertQueryMatch(qp, "this (+(-(+(-(that thus))^0.1))^0.3)", "field", + "+field:this +((+field:that +field:thus)^0.3)"); + + BooleanQuery.setMaxClauseCount(2); + try { + qp = getParser(new WhitespaceAnalyzer(TEST_VERSION_CURRENT)); + + qp.parse("one two three", "field"); + fail("ParseException expected due to too many boolean clauses"); + } catch (QueryNodeException expected) { + // too many boolean clauses, so ParseException is expected + } + + assertQueryMatch(qp, "*:*", "field", "*:*"); + qp.setAllowLeadingWildcard(true); + assertQueryMatch(qp, "*", "field", "field:*"); + assertQueryMatch(qp, "field:*", "field", "field:*"); + + } + + // Uniquely for Junit 3 + public static junit.framework.Test suite() { + return new junit.framework.JUnit4TestAdapter(TestAqpSLGSimple.class); + } + +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/builders/BooleanQueryNodeBuilder.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/builders/BooleanQueryNodeBuilder.java (revision 1484512) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/builders/BooleanQueryNodeBuilder.java (working copy) @@ -40,14 +40,26 @@ *
* It takes in consideration if the children is a {@link ModifierQueryNode} to * define the {@link BooleanClause}. + * + * TODO: this class is an example why also builders should have access to the + * parser configuration. Because we should not decide for the user what is the + * default modifier. I am solving it by passing the parameter, but that is less + * than optimal solution. + * */ public class BooleanQueryNodeBuilder implements StandardQueryBuilder { + private BooleanClause.Occur defaultModifier; + + public BooleanQueryNodeBuilder() { - // empty constructor + defaultModifier = BooleanClause.Occur.SHOULD; + } + + public BooleanQueryNodeBuilder(BooleanClause.Occur defaultModifier) { + this.defaultModifier = defaultModifier; } - @Override public BooleanQuery build(QueryNode queryNode) throws QueryNodeException { BooleanQueryNode booleanNode = (BooleanQueryNode) queryNode; @@ -84,7 +96,7 @@ } - private static BooleanClause.Occur getModifierValue(QueryNode node) { + private BooleanClause.Occur getModifierValue(QueryNode node) { if (node instanceof ModifierQueryNode) { ModifierQueryNode mNode = ((ModifierQueryNode) node); @@ -103,7 +115,7 @@ } - return BooleanClause.Occur.SHOULD; + return defaultModifier; } Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/AqpSyntaxParser.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/AqpSyntaxParser.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/AqpSyntaxParser.java (revision 0) @@ -0,0 +1,40 @@ +package org.apache.lucene.queryparser.flexible.aqp; + +import org.antlr.runtime.TokenStream; +import org.apache.lucene.queryparser.flexible.core.QueryNodeParseException; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; +import org.apache.lucene.queryparser.flexible.core.parser.SyntaxParser; + +public interface AqpSyntaxParser extends SyntaxParser { + /** + * @param grammarName + * - the name of the query + * @throws Exception + * - there are different implementations (should we want different + * interfaces?) some may be loading grammars on the fly, others will + * load grammars directly + * + * @return AqpSyntaxParser + */ + public AqpSyntaxParser initializeGrammar(String grammarName) + throws QueryNodeParseException; + + /** + * This method should return the stream of tokens, it can be used to modify + * the original query before it gets executed + * + * @param input + * - original query + * @return TokenStream + * - (un)modified stream of tokens + * @throws QueryNodeParseException + */ + public TokenStream getTokenStream(CharSequence input) + throws QueryNodeParseException; + + public QueryNode parseTokenStream(TokenStream tokens, CharSequence query, + CharSequence field) throws QueryNodeParseException; + + public QueryNode parse(CharSequence query, CharSequence field) + throws QueryNodeParseException; +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/config/AqpFeedbackImpl.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/config/AqpFeedbackImpl.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/config/AqpFeedbackImpl.java (revision 0) @@ -0,0 +1,68 @@ +package org.apache.lucene.queryparser.flexible.aqp.config; + +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import java.util.ArrayList; +import java.util.List; + +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; +import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessor; +import org.apache.lucene.util.AttributeImpl; +import org.apache.lucene.queryparser.flexible.aqp.config.AqpFeedbackEventHandler.ACTION; + +public class AqpFeedbackImpl extends AttributeImpl implements AqpFeedback { + + private static final long serialVersionUID = 5178148416076100953L; + + private List events = new ArrayList(); + private List handlers = new ArrayList(); + + @Override + public void clear() { + events.clear(); + } + + @Override + public void copyTo(AttributeImpl target) { + throw new UnsupportedOperationException(); + } + + public void registerEventHandler(AqpFeedbackEventHandler handler) { + handlers.add(handler); + } + + public AqpFeedbackEvent createEvent(TYPE level, + Class qnClass, QueryNode node, String msg, + Object... args) { + return new AqpFeedbackEventImpl(level, qnClass, node, msg, args); + } + + public void sendEvent(AqpFeedbackEvent event) { + for (AqpFeedbackEventHandler handler : handlers) { + ACTION r = handler.handle(event); + if (r == ACTION.STOP) { + return; + } else if (r == ACTION.SAVE_EVENT) { + if (!events.contains(event)) { + events.add(event); + } + } + } + } + +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/config/AqpFeedbackEvent.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/config/AqpFeedbackEvent.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/config/AqpFeedbackEvent.java (revision 0) @@ -0,0 +1,19 @@ +package org.apache.lucene.queryparser.flexible.aqp.config; + +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; +import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessor; +import org.apache.lucene.util.Attribute; + +public interface AqpFeedbackEvent extends Attribute { + + public AqpFeedback.TYPE getType(); + + public Class getCaller(); + + public QueryNode getNode(); + + public String getMessage(); + + public Object[] getArgs(); + +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/config/AqpFeedbackEventImpl.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/config/AqpFeedbackEventImpl.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/config/AqpFeedbackEventImpl.java (revision 0) @@ -0,0 +1,46 @@ +package org.apache.lucene.queryparser.flexible.aqp.config; + +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; +import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessor; +import org.apache.lucene.queryparser.flexible.aqp.config.AqpFeedback.TYPE; + +public class AqpFeedbackEventImpl implements AqpFeedbackEvent { + + private AqpFeedback.TYPE type = null; + private Class caller = null; + private QueryNode node = null; + private String message = null; + private Object[] args = null; + + AqpFeedbackEventImpl(AqpFeedback.TYPE type, + Class processorClass, QueryNode node, + String message, Object... args) { + + this.type = type; + this.caller = processorClass; + this.node = node; + this.message = message; + this.args = args; + + } + + public TYPE getType() { + return type; + } + + public Class getCaller() { + return caller; + } + + public QueryNode getNode() { + return node; + } + + public String getMessage() { + return message; + } + + public Object[] getArgs() { + return args; + } +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/config/AqpFeedbackEventHandler.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/config/AqpFeedbackEventHandler.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/config/AqpFeedbackEventHandler.java (revision 0) @@ -0,0 +1,21 @@ +package org.apache.lucene.queryparser.flexible.aqp.config; + +import org.apache.lucene.queryparser.flexible.aqp.config.AqpFeedbackEvent; + +public interface AqpFeedbackEventHandler { + + public enum ACTION { + STOP, SAVE_EVENT + }; + + /** + * Handles the {@link AqpFeedbackEvent} + * + * If it returns false, the next registered event handler will not get a + * chance to handle the event. + * + * @param event + * @return + */ + public ACTION handle(AqpFeedbackEvent event); +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/config/AqpFeedback.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/config/AqpFeedback.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/config/AqpFeedback.java (revision 0) @@ -0,0 +1,48 @@ +package org.apache.lucene.queryparser.flexible.aqp.config; + +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; +import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessor; +import org.apache.lucene.util.Attribute; + +/** + * This attribute is used to collect feedback messages and suggestions from the + * query parser + * + * WARNING: experimental, may change soon! + */ +public interface AqpFeedback extends Attribute { + + public enum TYPE { + DEBUG, INFO, WARN, ERROR, SYNTAX_SUGGESTION, DEPRECATED + }; + + /* + * I am NOT trying to re-implement a wheel, I am just confused what is the + * proper way to wrap SLF4J used by SOLR (but not by Lucene) and not introduce + * it as a dependency to Lucene + */ + public AqpFeedbackEvent createEvent(TYPE level, + Class qnClass, QueryNode node, String msg, + Object... args); + + public void sendEvent(AqpFeedbackEvent event); + + public void registerEventHandler(AqpFeedbackEventHandler handler); +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/nodes/AqpNearQueryNode.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/nodes/AqpNearQueryNode.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/nodes/AqpNearQueryNode.java (revision 0) @@ -0,0 +1,89 @@ +package org.apache.lucene.queryparser.flexible.aqp.nodes; + +import java.util.List; +import org.apache.lucene.queryparser.flexible.messages.MessageImpl; +import org.apache.lucene.queryparser.flexible.aqp.processors.AqpOPERATORProcessor; +import org.apache.lucene.queryparser.flexible.core.QueryNodeError; +import org.apache.lucene.queryparser.flexible.core.messages.QueryParserMessages; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNodeImpl; +import org.apache.lucene.queryparser.flexible.core.parser.EscapeQuerySyntax; + +/** + * For nodes connected through proximity operators, eg. + * + *
+ *   foo NEAR bar
+ *   
+ * + * @see AqpOPERATORProcessor + * + */ +public class AqpNearQueryNode extends QueryNodeImpl implements QueryNode { + + private static final long serialVersionUID = 8806759327487974314L; + private Integer slop = null; + private boolean inOrder = true; + + public AqpNearQueryNode(List children, int proximity) { + if (children == null) { + throw new QueryNodeError( + new MessageImpl(QueryParserMessages.PARAMETER_VALUE_NOT_SUPPORTED, + "children", "null")); + } + allocate(); + setLeaf(false); + add(children); + this.slop = proximity; + } + + public CharSequence toQueryString(EscapeQuerySyntax escapeSyntaxParser) { + if (getChild() == null) + return ""; + + String leftParenthensis = ""; + String rightParenthensis = ""; + + if (getChild() != null && getChild() instanceof AqpNearQueryNode) { + leftParenthensis = "("; + rightParenthensis = ")"; + } + + return leftParenthensis + "#" + + getChild().toQueryString(escapeSyntaxParser) + rightParenthensis; + + } + + public String toString() { + StringBuffer bo = new StringBuffer(); + bo.append("\n"); + for (QueryNode child : this.getChildren()) { + bo.append(child.toString()); + bo.append("\n"); + } + bo.append("\n"); + return bo.toString(); + } + + public QueryNode getChild() { + return getChildren().get(0); + } + + public Integer getSlop() { + return slop; + } + + public void setSlop(Integer prox) { + slop = prox; + } + + public boolean getInOrder() { + return inOrder; + } + + public void setInOrder(boolean order) { + inOrder = order; + } +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/nodes/AqpAnalyzedQueryNode.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/nodes/AqpAnalyzedQueryNode.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/nodes/AqpAnalyzedQueryNode.java (revision 0) @@ -0,0 +1,63 @@ +package org.apache.lucene.queryparser.flexible.aqp.nodes; + +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import org.apache.lucene.queryparser.flexible.aqp.processors.AqpAnalyzerQueryNodeProcessor; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNodeImpl; +import org.apache.lucene.queryparser.flexible.core.parser.EscapeQuerySyntax; + +/** + * A {@link NonAnalyzedQueryNode} represents a query that was already be + * processed by an analyzer. The child (typically only one) is the result of a + * call to another analyzer. + * + * @see AqpAnalyzerQueryNodeProcessor + */ +public class AqpAnalyzedQueryNode extends QueryNodeImpl { + + /** + * @param node + * - query node + */ + public AqpAnalyzedQueryNode(QueryNode node) { + allocate(); + setLeaf(false); + this.add(node); + } + + @Override + public String toString() { + return "" + this.getChild() + ""; + } + + @Override + public AqpAnalyzedQueryNode cloneTree() throws CloneNotSupportedException { + AqpAnalyzedQueryNode clone = (AqpAnalyzedQueryNode) super.cloneTree(); + // nothing to do here + return clone; + } + + public CharSequence toQueryString(EscapeQuerySyntax escapeSyntaxParser) { + return this.getChildren().get(0).toQueryString(escapeSyntaxParser); + } + + public QueryNode getChild() { + return this.getChildren().get(0); + } +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/nodes/AqpAdsabsRegexQueryNode.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/nodes/AqpAdsabsRegexQueryNode.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/nodes/AqpAdsabsRegexQueryNode.java (revision 0) @@ -0,0 +1,41 @@ +package org.apache.lucene.queryparser.flexible.aqp.nodes; + +import org.apache.lucene.queryparser.flexible.aqp.builders.AqpFieldQueryNodeRegexBuilder; +import org.apache.lucene.queryparser.flexible.core.builders.QueryTreeBuilder; +import org.apache.lucene.queryparser.flexible.core.nodes.FieldQueryNode; +import org.apache.lucene.search.RegexpQuery; + +/** + * This node will be turned into the {@link RegexpQuery} by + * {@link AqpFieldQueryNodeRegexBuilder}. But the appropriate + * builder must be configured. + * + * @see instances of {@link QueryTreeBuilder} + * + */ +public class AqpAdsabsRegexQueryNode extends AqpNonAnalyzedQueryNode { + + public AqpAdsabsRegexQueryNode(CharSequence field, CharSequence text, + int begin, int end) { + super(field, text, begin, end); + } + + public AqpAdsabsRegexQueryNode(FieldQueryNode fqn) { + this(fqn.getField(), fqn.getText(), fqn.getBegin(), fqn.getEnd()); + } + + @Override + public String toString() { + return ""; + } + + @Override + public AqpAdsabsRegexQueryNode cloneTree() throws CloneNotSupportedException { + AqpAdsabsRegexQueryNode clone = (AqpAdsabsRegexQueryNode) super.cloneTree(); + + // nothing to do here + + return clone; + } + +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/nodes/AqpDisjunctionQueryNode.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/nodes/AqpDisjunctionQueryNode.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/nodes/AqpDisjunctionQueryNode.java (revision 0) @@ -0,0 +1,68 @@ +package org.apache.lucene.queryparser.flexible.aqp.nodes; + +import java.util.List; + +import org.apache.lucene.queryparser.flexible.core.QueryNodeError; +import org.apache.lucene.queryparser.flexible.core.messages.QueryParserMessages; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNodeImpl; +import org.apache.lucene.queryparser.flexible.core.parser.EscapeQuerySyntax; +import org.apache.lucene.queryparser.flexible.messages.MessageImpl; + +public class AqpDisjunctionQueryNode extends QueryNodeImpl { + + private Float tieBreaker = null; + + public AqpDisjunctionQueryNode(List children, float tieBreaker) { + if (children == null) { + throw new QueryNodeError( + new MessageImpl(QueryParserMessages.PARAMETER_VALUE_NOT_SUPPORTED, + "children", "null")); + } + allocate(); + setLeaf(false); + add(children); + this.tieBreaker = tieBreaker; + } + + public CharSequence toQueryString(EscapeQuerySyntax escapeSyntaxParser) { + if (getChildren().size() == 0) + return ""; + + StringBuilder sb = new StringBuilder(); + sb.append("("); + boolean notFirst = false; + for (QueryNode child : getChildren()) { + if (notFirst) { + sb.append(" | "); + } + sb.append(child.toQueryString(escapeSyntaxParser)); + notFirst = true; + } + sb.append(")"); + return sb.toString(); + + } + + public String toString() { + StringBuffer bo = new StringBuffer(); + bo.append("\n"); + for (QueryNode child : this.getChildren()) { + bo.append(child.toString()); + bo.append("\n"); + } + bo.append("\n"); + return bo.toString(); + } + + public Float getTieBreaker() { + return tieBreaker; + } + + public void setTieBreaker(Float tieBreaker) { + this.tieBreaker = tieBreaker; + } + +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/nodes/SlowFuzzyQueryNode.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/nodes/SlowFuzzyQueryNode.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/nodes/SlowFuzzyQueryNode.java (revision 0) @@ -0,0 +1,18 @@ +package org.apache.lucene.queryparser.flexible.aqp.nodes; + +import org.apache.lucene.queryparser.flexible.core.nodes.FuzzyQueryNode; + +/** + * Exactly the same as FuzzyQueryNode but it will be transformed into a + * SlowFuzzyQuery + * + * + */ +public class SlowFuzzyQueryNode extends FuzzyQueryNode { + + public SlowFuzzyQueryNode(CharSequence field, CharSequence term, + float minSimilarity, int begin, int end) { + super(field, term, minSimilarity, begin, end); + } + +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/nodes/AqpANTLRNode.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/nodes/AqpANTLRNode.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/nodes/AqpANTLRNode.java (revision 0) @@ -0,0 +1,249 @@ +package org.apache.lucene.queryparser.flexible.aqp.nodes; + +import java.util.ArrayList; +import java.util.List; + +import org.antlr.runtime.CommonToken; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNodeImpl; +import org.apache.lucene.queryparser.flexible.core.parser.EscapeQuerySyntax; +import org.apache.lucene.queryparser.flexible.aqp.util.AqpCommonTree; + +/** + * When Aqp parser starts processing the AST (abstract syntax tree) + * every node in the tree is made of {@link AqpANTLRNode} and it wraps + * the {@link AqpCommonTree} through which you can access information + * about the string, its position, type etc... these are courtesy of + * ANTLR. + * + * We provide a few utility methods for setting different attributes + * of the original ANTLR object. + * + */ +public class AqpANTLRNode extends QueryNodeImpl { + + private static final long serialVersionUID = 5128762709928473351L; + + private AqpCommonTree tree; + + private int tokenType; + + private String tokenLabel; + + private String tokenName; + + private String tokenInput = null; + + /** + * @param node + * - AST node + */ + public AqpANTLRNode(AqpCommonTree node) { + + tree = node; + String input = node.getTokenInput(); + + if (input != null) { + setTokenInput(input); + } + + setTokenLabel(node.getTokenLabel()); + + setTokenType(node.getTokenType()); + + setTokenName(node.getTypeLabel()); + + if (node.getChildCount() > 0) { + setLeaf(false); + allocate(); + } + } + + public CharSequence toQueryString(EscapeQuerySyntax escaper) { + if (getTokenInput() != null) { + return "(" + getTokenLabel() + ":" + getTokenInput() + ")"; + } else { + return getTokenLabel(); + } + } + + public String toStringNodeOnly() { + if (getTokenInput() != null) { + return ""; + } else { + return ""; + } + } + + public String toString() { + return toString(0); + } + + public String toString(int level) { + StringBuffer buf = new StringBuffer(); + buf.append("\n"); + for (int i = 0; i < level; i++) { + buf.append(" "); + } + + buf.append(" children = this.getChildren(); + + if (children != null) { + buf.append(">"); + for (QueryNode child : children) { + if (child instanceof AqpANTLRNode) { + buf.append(((AqpANTLRNode) child).toString(level + 4)); + } else { + buf.append(child.toString()); + } + } + } + + if (isLeaf()) { + buf.append("/>"); + } else { + buf.append("\n"); + for (int i = 0; i < level; i++) { + buf.append(" "); + } + buf.append(""); + } + + return buf.toString(); + } + + public int getTokenType() { + return tokenType; + } + + public void setTokenType(int tokenType) { + this.tokenType = tokenType; + } + + /** + * Label is what is displayed in the AST tree, for example and, And, AND will + * all have label=AND + * + * (But their internal name is an 'OPERATOR') + * + * @return + */ + public String getTokenLabel() { + return tokenLabel; + } + + public void setTokenLabel(String tokenLabel) { + this.tokenLabel = tokenLabel; + } + + public String getTokenName() { + return tokenName; + } + + /** + * Name is the name of the group, ie. 'AND' is an OPERATOR (but its label + * says: 'AND') + * + * @param tokenName + */ + public void setTokenName(String tokenName) { + this.tokenName = tokenName; + } + + public String getTokenInput() { + return tokenInput; + } + + public void setTokenInput(String tokenInput) { + this.tokenInput = tokenInput; + } + + public int getTokenStart() { + return tree.getStartIndex(); + } + + public int getTokenEnd() { + return tree.getStopIndex(); + } + + public AqpCommonTree getTree() { + return tree; + } + + public int getInputTokenStart() { + return ((CommonToken) tree.getToken()).getCharPositionInLine();// getStartIndex(); + } + + public int getInputTokenEnd() { + return ((CommonToken) tree.getToken()).getStopIndex(); + } + + public void setInputTokenEnd(int stop) { + ((CommonToken) tree.getToken()).setStopIndex(stop); + } + + public void setInputTokenStart(int start) { + ((CommonToken) tree.getToken()).setStartIndex(start); + } + + public AqpANTLRNode getChild(String tokenLabel) { + List children = getChildren(); + if (children != null) { + for (QueryNode child : children) { + AqpANTLRNode n = (AqpANTLRNode) child; + if (n.getTokenLabel().equals(tokenLabel)) { + return n; + } + } + + } + return null; + } + + public AqpANTLRNode findChild(String tokenLabel) { + ArrayList lst = new ArrayList(); + findChild(this, tokenLabel, lst); + + if (lst.size() == 1) { + return (AqpANTLRNode) lst.get(0); + } else if (lst.size() > 1) { + throw new RuntimeException( + "This method is not meant to search for n>1 nodes"); + } + return null; + } + + private void findChild(QueryNode node, String tokenLabel, + ArrayList lst) { + if (((AqpANTLRNode) node).getTokenLabel().equals(tokenLabel)) { + lst.add(node); + } else { + if (!node.isLeaf()) { + for (QueryNode child : node.getChildren()) { + findChild(child, tokenLabel, lst); + } + } + } + } + + public Float getTokenInputFloat() { + if (this.tokenInput != null) { + return Float.valueOf(this.tokenInput); + } + return null; + } + +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/nodes/AqpDefopQueryNode.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/nodes/AqpDefopQueryNode.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/nodes/AqpDefopQueryNode.java (revision 0) @@ -0,0 +1,67 @@ +package org.apache.lucene.queryparser.flexible.aqp.nodes; + +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import java.util.List; + +import org.apache.lucene.queryparser.flexible.core.nodes.ModifierQueryNode.Modifier; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; +import org.apache.lucene.queryparser.flexible.standard.config.StandardQueryConfigHandler; + +/** + * A {@link AqpDefopQueryNode} represents the default boolean operation + * performed on a list of nodes. + * + * This behaves the same way as any AqpBooleanQueryNode but we have the + * advantage of knowing which tokens were marked by the DEFOP operator and later + * on we can look at them and process specially (in the logic that explicit AND + * is stronger than implicit AND) + * + * @see AqpBooleanQueryNode + * @see StandardQueryConfigHandler.Operator + */ +public class AqpDefopQueryNode extends AqpBooleanQueryNode { + + /** + * @param clauses + * - the query nodes to be joined + */ + public AqpDefopQueryNode(List clauses, + StandardQueryConfigHandler.Operator op) { + super(clauses); + + if ((clauses == null) || (clauses.size() == 0)) { + throw new IllegalArgumentException( + "DEFOP query must have at least one clause"); + } + + if (op.equals(StandardQueryConfigHandler.Operator.AND)) { + operator = "AND"; + applyModifier(clauses, Modifier.MOD_REQ); + } else if (op.equals(StandardQueryConfigHandler.Operator.OR)) { + operator = "OR"; + applyModifier(clauses, Modifier.MOD_NONE); + } + + // unfortunately we have to do it like this (when subclassing from + // BooleanQueryNode) + set(clauses); + + } + +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/nodes/AqpBooleanQueryNode.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/nodes/AqpBooleanQueryNode.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/nodes/AqpBooleanQueryNode.java (revision 0) @@ -0,0 +1,114 @@ +package org.apache.lucene.queryparser.flexible.aqp.nodes; + +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import java.util.List; + +import org.apache.lucene.queryparser.flexible.core.nodes.BooleanQueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.GroupQueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.ModifierQueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.ModifierQueryNode.Modifier; +import org.apache.lucene.queryparser.flexible.core.parser.EscapeQuerySyntax; + +/** + * A {@link AqpBooleanQueryNode} represents base boolean operation performed on + * a list of nodes. It will apply the @{link ModifierQueryNode} to the clauses. + * The normal behaviour is not to override the ModifierQueryNode values, if + * already present. + */ +public class AqpBooleanQueryNode extends BooleanQueryNode { + + private static final long serialVersionUID = -5974910790857168198L; + + protected String operator = "DEFOP"; + protected boolean overrideModifiers = false; + + /** + * @param clauses + * - the query nodes to be op'ed + */ + public AqpBooleanQueryNode(List clauses) { + super(clauses); + + } + + @Override + public String toString() { + if (getChildren() == null || getChildren().size() == 0) + return ""; + StringBuilder sb = new StringBuilder(); + sb.append(""); + for (QueryNode child : getChildren()) { + sb.append("\n"); + sb.append(child.toString()); + + } + sb.append("\n"); + return sb.toString(); + } + + @Override + public CharSequence toQueryString(EscapeQuerySyntax escapeSyntaxParser) { + if (getChildren() == null || getChildren().size() == 0) + return ""; + + StringBuilder sb = new StringBuilder(); + String filler = ""; + for (QueryNode child : getChildren()) { + sb.append(filler).append(child.toQueryString(escapeSyntaxParser)); + filler = " " + operator + " "; + } + + // in case is root or the parent is a group node avoid parenthesis + if ((getParent() != null && getParent() instanceof GroupQueryNode) + || isRoot()) + return sb.toString(); + else + return "( " + sb.toString() + " )"; + } + + public void applyModifier(List clauses, Modifier mod) { + for (int i = 0; i < clauses.size(); i++) { + QueryNode child = clauses.get(i); + + if (child instanceof ModifierQueryNode || child instanceof GroupQueryNode) { + if (overrideModifiers) { + clauses + .set(i, + new ModifierQueryNode(((ModifierQueryNode) child).getChild(), + mod)); + } + } else { + clauses.set(i, new ModifierQueryNode(child, mod)); + } + } + } + + public void setOverrideModifiers(boolean val) { + this.overrideModifiers = val; + } + + public void setOperator(String op) { + operator = op; + } + + public String getOperator() { + return operator; + } +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/nodes/AqpOrQueryNode.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/nodes/AqpOrQueryNode.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/nodes/AqpOrQueryNode.java (revision 0) @@ -0,0 +1,51 @@ +package org.apache.lucene.queryparser.flexible.aqp.nodes; + +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import java.util.List; + +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; + +/** + * A {@link AqpOrQueryNode} represents an OR boolean operation performed on a + * list of nodes. + * + * @see AqpBooleanQueryNode + */ +public class AqpOrQueryNode extends AqpBooleanQueryNode { + + private static final long serialVersionUID = 8472252510866053747L; + + /** + * @param clauses + * - the query nodes to be or'ed + */ + public AqpOrQueryNode(List clauses) { + super(clauses); + + operator = "OR"; + + // applyModifier(clauses, Modifier.MOD_NONE); + + // unfortunately we have to do it like this (when subclassing from + // BooleanQueryNode) + set(clauses); + + } + +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/nodes/AqpNotQueryNode.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/nodes/AqpNotQueryNode.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/nodes/AqpNotQueryNode.java (revision 0) @@ -0,0 +1,72 @@ +package org.apache.lucene.queryparser.flexible.aqp.nodes; + +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import java.util.List; + +import org.apache.lucene.queryparser.flexible.core.nodes.ModifierQueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.ModifierQueryNode.Modifier; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; + +/** + * A {@link AqpNotQueryNode} represents an NOT boolean operation performed on a + * list of nodes. + * + *
+ * + * The first node is set to be required + * {@link ModifierQueryNode.Modifier#MOD_REQ} and the rest of the clauses will + * have a {@link ModifierQueryNode.Modifier#MOD_NOT} + * + * @see AqpBooleanQueryNode + */ +public class AqpNotQueryNode extends AqpBooleanQueryNode { + + private static final long serialVersionUID = 4054514488434283069L; + + /** + * @param clauses + * - the query nodes to be and'ed + */ + public AqpNotQueryNode(List clauses) { + super(clauses); + + operator = "NOT"; + + if ((clauses == null) || (clauses.size() < 2)) { + throw new IllegalArgumentException( + "NOT query must have at least two clauses"); + } + + QueryNode firstNode = clauses.get(0); + applyModifier(clauses, Modifier.MOD_NOT); + // reset the first node (if it was wrapped, ie not already having user + // specified MODIFIER) + if (!firstNode.equals(clauses.get(0))) { + clauses + .set( + 0, + new ModifierQueryNode(((ModifierQueryNode) clauses.get(0)) + .getChild(), Modifier.MOD_REQ)); + } + + set(clauses); + + } + +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/nodes/AqpFuzzyModifierNode.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/nodes/AqpFuzzyModifierNode.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/nodes/AqpFuzzyModifierNode.java (revision 0) @@ -0,0 +1,69 @@ +package org.apache.lucene.queryparser.flexible.aqp.nodes; + +import org.apache.lucene.queryparser.flexible.messages.MessageImpl; +import org.apache.lucene.queryparser.flexible.aqp.processors.AqpFuzzyModifierProcessor; +import org.apache.lucene.queryparser.flexible.core.QueryNodeError; +import org.apache.lucene.queryparser.flexible.core.messages.QueryParserMessages; +import org.apache.lucene.queryparser.flexible.core.nodes.ModifierQueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNodeImpl; +import org.apache.lucene.queryparser.flexible.core.parser.EscapeQuerySyntax; + +/** + * {@link AqpFuzzyModifierNode} is consumed by the {@link AqpFuzzyModifierProcessor} + * which will decide what is the appropriate operation for '~' operator. E.g. + * + *
+ *  
+ *    "foo bar"~5  == slop query node
+ *    "foo"~5      == fuzzy search for 'foo' 
+ *  
+ */ +public class AqpFuzzyModifierNode extends QueryNodeImpl implements QueryNode { + + private static final long serialVersionUID = -3059874057254791689L; + private Float fuzzy; + + public AqpFuzzyModifierNode(QueryNode query, Float fuzzy) { + if (query == null) { + throw new QueryNodeError(new MessageImpl( + QueryParserMessages.PARAMETER_VALUE_NOT_SUPPORTED, "query", "null")); + } + + allocate(); + setLeaf(false); + add(query); + this.fuzzy = fuzzy; + } + + public CharSequence toQueryString(EscapeQuerySyntax escapeSyntaxParser) { + if (getChild() == null) + return ""; + + String leftParenthensis = ""; + String rightParenthensis = ""; + + if (getChild() != null && getChild() instanceof ModifierQueryNode) { + leftParenthensis = "("; + rightParenthensis = ")"; + } + + return leftParenthensis + getChild().toQueryString(escapeSyntaxParser) + + rightParenthensis + "~" + this.fuzzy.toString(); + + } + + public String toString() { + return "" + "\n" + + getChild().toString() + "\n"; + } + + public QueryNode getChild() { + return getChildren().get(0); + } + + public Float getFuzzyValue() { + return fuzzy; + } + +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/nodes/AqpAndQueryNode.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/nodes/AqpAndQueryNode.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/nodes/AqpAndQueryNode.java (revision 0) @@ -0,0 +1,57 @@ +package org.apache.lucene.queryparser.flexible.aqp.nodes; + +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import java.util.List; + +import org.apache.lucene.queryparser.flexible.core.nodes.ModifierQueryNode.Modifier; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; + +/** + * A {@link AqpAndQueryNode} represents an AND boolean operation performed on a + * list of nodes. + * + * @see AqpBooleanQueryNode + */ +public class AqpAndQueryNode extends AqpBooleanQueryNode { + + private static final long serialVersionUID = -4148186404006404927L; + + /** + * @param clauses + * - the query nodes to be and'ed + */ + public AqpAndQueryNode(List clauses) { + super(clauses); + + operator = "AND"; + + if ((clauses == null) || (clauses.size() == 0)) { + throw new IllegalArgumentException( + "AND query must have at least one clause"); + } + + applyModifier(clauses, Modifier.MOD_REQ); + + // unfortunately we have to do it like this (when subclassing from + // BooleanQueryNode) + set(clauses); + + } + +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/nodes/AqpNonAnalyzedQueryNode.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/nodes/AqpNonAnalyzedQueryNode.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/nodes/AqpNonAnalyzedQueryNode.java (revision 0) @@ -0,0 +1,66 @@ +package org.apache.lucene.queryparser.flexible.aqp.nodes; + +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import org.apache.lucene.queryparser.flexible.core.nodes.FieldQueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.QuotedFieldQueryNode; + +/** + * A {@link NonAnalyzedQueryNode} represents a query that will not be processed + * by an analyzer. It will be served to the search engine as it is + * + * Example: e(+) + */ +public class AqpNonAnalyzedQueryNode extends QuotedFieldQueryNode { + + private static final long serialVersionUID = 6921391439471630844L; + + /** + * @param field + * - field name + * @param text + * - the query + * @param begin + * - position in the query string + * @param end + * - position in the query string + */ + public AqpNonAnalyzedQueryNode(CharSequence field, CharSequence text, + int begin, int end) { + super(field, text, begin, end); + } + + public AqpNonAnalyzedQueryNode(FieldQueryNode fqn) { + this(fqn.getField(), fqn.getText(), fqn.getBegin(), fqn.getEnd()); + } + + @Override + public String toString() { + return ""; + } + + @Override + public AqpNonAnalyzedQueryNode cloneTree() throws CloneNotSupportedException { + AqpNonAnalyzedQueryNode clone = (AqpNonAnalyzedQueryNode) super.cloneTree(); + + // nothing to do here + + return clone; + } + +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/builders/AqpQueryTreeBuilder.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/builders/AqpQueryTreeBuilder.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/builders/AqpQueryTreeBuilder.java (revision 0) @@ -0,0 +1,113 @@ +package org.apache.lucene.queryparser.flexible.aqp.builders; + +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import org.apache.lucene.queryparser.flexible.core.QueryNodeException; +import org.apache.lucene.queryparser.flexible.core.builders.QueryBuilder; +import org.apache.lucene.queryparser.flexible.core.builders.QueryTreeBuilder; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; +import org.apache.lucene.queryparser.flexible.standard.builders.StandardQueryBuilder; +import org.apache.lucene.queryparser.flexible.standard.processors.StandardQueryNodeProcessorPipeline; +import org.apache.lucene.search.Query; + +/** + * This query tree builder only defines the necessary methods for + * debugging.
+ * + * @see QueryTreeBuilder + * @see StandardQueryNodeProcessorPipeline + */ +public class AqpQueryTreeBuilder extends QueryTreeBuilder implements + StandardQueryBuilder { + + private boolean debug = false; + private int counter = 0; + + public AqpQueryTreeBuilder(boolean debug) { + this.setDebug(debug); + init(); + } + + public AqpQueryTreeBuilder() { + init(); + } + + public void setDebug(boolean val) { + if (val != debug) { + debug = val; + init(); + } + debug = val; + } + + public boolean isInDebugMode() { + return debug; + } + + public void init() { + throw new IllegalAccessError("AqpQueryTreeBuilder must be subclassed and has the init() method"); + } + + @Override + public Query build(QueryNode queryNode) throws QueryNodeException { + this.counter = 0; + return (Query) super.build(queryNode); + } + + + public void setBuilder(Class queryNodeClass, + QueryBuilder builder) { + if (this.debug) { + super.setBuilder(queryNodeClass, new DebuggingNodeBuilder(queryNodeClass, + builder)); + } else { + super.setBuilder(queryNodeClass, builder); + } + } + + class DebuggingNodeBuilder implements QueryBuilder { + private Class clazz = null; + private QueryBuilder realBuilder = null; + + DebuggingNodeBuilder(Class queryNodeClass, + QueryBuilder builder) { + clazz = queryNodeClass; + realBuilder = builder; + } + + public Object build(QueryNode queryNode) throws QueryNodeException { + System.out.println("--------------------------------------------"); + System.out.println("step " + counter++ + "."); + System.out.println("builder: " + realBuilder.getClass().getName()); + System.out.println("node: " + clazz.getName()); + System.out.println(queryNode.toString()); + System.out.println(" -->"); + Object result = realBuilder.build(queryNode); + if (result != null) { + System.out.println(((Query) result).toString() + " <" + + result.getClass().getName() + ">"); + } else { + System.out.println("null"); + } + System.out.println("--------------------------------------------"); + return result; + } + + }; + +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/builders/AqpSlowFuzzyQueryNodeBuilder.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/builders/AqpSlowFuzzyQueryNodeBuilder.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/builders/AqpSlowFuzzyQueryNodeBuilder.java (revision 0) @@ -0,0 +1,26 @@ +package org.apache.lucene.queryparser.flexible.aqp.builders; + +import org.apache.lucene.index.Term; +import org.apache.lucene.queryparser.flexible.aqp.nodes.SlowFuzzyQueryNode; +import org.apache.lucene.queryparser.flexible.core.QueryNodeException; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; +import org.apache.lucene.queryparser.flexible.standard.builders.StandardQueryBuilder; +import org.apache.lucene.sandbox.queries.SlowFuzzyQuery; + +@SuppressWarnings("deprecation") +public class AqpSlowFuzzyQueryNodeBuilder implements StandardQueryBuilder { + + public AqpSlowFuzzyQueryNodeBuilder() { + // empty constructor + } + + public SlowFuzzyQuery build(QueryNode queryNode) throws QueryNodeException { + SlowFuzzyQueryNode fuzzyNode = (SlowFuzzyQueryNode) queryNode; + + return new SlowFuzzyQuery(new Term(fuzzyNode.getFieldAsString(), + fuzzyNode.getTextAsString()), fuzzyNode.getSimilarity(), + fuzzyNode.getPrefixLength()); + + } + +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/builders/AqpFieldQueryNodeRegexBuilder.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/builders/AqpFieldQueryNodeRegexBuilder.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/builders/AqpFieldQueryNodeRegexBuilder.java (revision 0) @@ -0,0 +1,26 @@ +package org.apache.lucene.queryparser.flexible.aqp.builders; + +import org.apache.lucene.index.Term; +import org.apache.lucene.queryparser.flexible.core.QueryNodeException; +import org.apache.lucene.queryparser.flexible.core.nodes.FieldQueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; +import org.apache.lucene.queryparser.flexible.standard.builders.StandardQueryBuilder; +import org.apache.lucene.search.Query; +//import org.apache.lucene.sandbox.queries.regex.RegexQuery; +import org.apache.lucene.search.RegexpQuery; + +public class AqpFieldQueryNodeRegexBuilder implements StandardQueryBuilder { + + public AqpFieldQueryNodeRegexBuilder() { + // empty constructor + } + + public Query build(QueryNode queryNode) throws QueryNodeException { + FieldQueryNode fieldNode = (FieldQueryNode) queryNode; + + return new RegexpQuery(new Term(fieldNode.getFieldAsString(), + fieldNode.getTextAsString())); + + } + +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/builders/AqpFieldQueryNodeBuilder.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/builders/AqpFieldQueryNodeBuilder.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/builders/AqpFieldQueryNodeBuilder.java (revision 0) @@ -0,0 +1,31 @@ +package org.apache.lucene.queryparser.flexible.aqp.builders; + +import org.apache.lucene.index.Term; +import org.apache.lucene.queryparser.flexible.core.QueryNodeException; +import org.apache.lucene.queryparser.flexible.core.nodes.FieldQueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; +import org.apache.lucene.queryparser.flexible.standard.builders.StandardQueryBuilder; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.TermQuery; + +public class AqpFieldQueryNodeBuilder implements StandardQueryBuilder { + + public AqpFieldQueryNodeBuilder() { + // empty constructor + } + + public Query build(QueryNode queryNode) throws QueryNodeException { + FieldQueryNode fieldNode = (FieldQueryNode) queryNode; + + if (fieldNode.getFieldAsString().equals("*") + && fieldNode.getTextAsString().equals("*")) { + return new MatchAllDocsQuery(); + } + + return new TermQuery(new Term(fieldNode.getFieldAsString(), + fieldNode.getTextAsString())); + + } + +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/builders/AqpNearQueryNodeBuilder.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/builders/AqpNearQueryNodeBuilder.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/builders/AqpNearQueryNodeBuilder.java (revision 0) @@ -0,0 +1,213 @@ +package org.apache.lucene.queryparser.flexible.aqp.builders; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import org.apache.lucene.queryparser.flexible.messages.MessageImpl; +import org.apache.lucene.queryparser.flexible.core.QueryNodeException; +import org.apache.lucene.queryparser.flexible.core.builders.QueryBuilder; +import org.apache.lucene.queryparser.flexible.core.builders.QueryTreeBuilder; +import org.apache.lucene.queryparser.flexible.core.messages.QueryParserMessages; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; +import org.apache.lucene.queryparser.flexible.aqp.nodes.AqpNearQueryNode; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanClause.Occur; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.PrefixQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.TermQuery; +import org.apache.lucene.search.WildcardQuery; +import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper; +import org.apache.lucene.search.spans.SpanNearQuery; +import org.apache.lucene.search.spans.SpanNotQuery; +import org.apache.lucene.search.spans.SpanOrQuery; +import org.apache.lucene.search.spans.SpanQuery; +import org.apache.lucene.search.spans.SpanTermQuery; + +/** + * The builder for the {@link AqpNearQueryNode}, example query: + * + *
+ *   dog NEAR/5 cat
+ *  
+ * + *

+ * After the AST tree was parsed, and synonyms were found, + * we may have the following tree: + * + *

+ *

+ *        AqpNearQueryNode(5)
+ *                |
+ *            ------------------------------    
+ *           /                              \
+ *         OR                         QueryNode(cat)
+ *          |
+ *       -----------------   
+ *      /                 \ 
+ *   QueryNode(dog)     QueryNode(canin)
+ *   
+ *  
+ * + * + *

+ * Since Lucene cannot handle these queries, the flex builder + * must rewrite them, effectively producing + * + *

+ * SpanNear(SpanOr(dog | cat), SpanTerm(cat), 5)
+ * 
+ * + * + * This builder does not know (yet) how to handle cases of + * mixed boolean operators, eg. + * + *
+ * (dog AND (cat OR fat)) NEAR/5 batman
+ * 
+ * + * @see QueryNodeProcessorPipeline + * @see AqpNearQueryNode + * + */ +public class AqpNearQueryNodeBuilder implements QueryBuilder { + + public AqpNearQueryNodeBuilder() { + // empty constructor + } + + public Object build(QueryNode queryNode) throws QueryNodeException { + AqpNearQueryNode nearNode = (AqpNearQueryNode) queryNode; + + List children = nearNode.getChildren(); + + if (children != null) { + SpanQuery[] clauses = new SpanQuery[children.size()]; + + int i = 0; + for (QueryNode child : children) { + Object obj = child.getTag(QueryTreeBuilder.QUERY_TREE_BUILDER_TAGID); + if (obj != null) { + clauses[i++] = getSpanQuery(obj, nearNode); + } else { + throw new QueryNodeException(new MessageImpl( + QueryParserMessages.LUCENE_QUERY_CONVERSION_ERROR, + "One of the clauses inside AqpNearQueryNode is null")); + } + } + + return new SpanNearQuery(clauses, nearNode.getSlop(), + nearNode.getInOrder()); + } + + throw new QueryNodeException(new MessageImpl( + QueryParserMessages.LUCENE_QUERY_CONVERSION_ERROR, + "Illegal state for: " + nearNode.toString())); + } + + protected SpanQuery getSpanQuery(Object obj, AqpNearQueryNode nearNode) + throws QueryNodeException { + Query q = (Query) obj; + if (q instanceof SpanQuery) { + return (SpanQuery) q; + } else if (q instanceof TermQuery) { + return new SpanTermQuery(((TermQuery) q).getTerm()); + } else if (q instanceof WildcardQuery) { + return new SpanMultiTermQueryWrapper((WildcardQuery) q); + } else if (q instanceof PrefixQuery) { + return new SpanMultiTermQueryWrapper((PrefixQuery) q); + } else if (q instanceof BooleanQuery) { + return convertBooleanToSpan((BooleanQuery) q, nearNode); + } else { + throw new QueryNodeException(new MessageImpl( + QueryParserMessages.LUCENE_QUERY_CONVERSION_ERROR, q.toString(), + "(yet) Unsupported clause inside span query: " + + q.getClass().getName())); + } + } + + /* + * Silly convertor for now it can handle only boolean queries of the same type + * (ie not mixed cases). To do that, I have to build a graph (tree) and maybe + * of only pairs (?) + */ + protected SpanQuery convertBooleanToSpan(BooleanQuery q, + AqpNearQueryNode nearNode) throws QueryNodeException { + BooleanClause[] clauses = q.getClauses(); + SpanQuery[] spanClauses = new SpanQuery[clauses.length]; + Occur o = null; + int i = 0; + for (BooleanClause c : clauses) { + if (o != null && !o.equals(c.getOccur())) { + throw new QueryNodeException(new MessageImpl( + QueryParserMessages.LUCENE_QUERY_CONVERSION_ERROR, q.toString(), + "(yet) Unsupported clause inside span query: " + + q.getClass().getName())); + } + o = c.getOccur(); + spanClauses[i] = getSpanQuery(c.getQuery(), nearNode); + i++; + } + + if (o.equals(Occur.MUST)) { + return new SpanNearQuery(spanClauses, nearNode.getSlop(), + nearNode.getInOrder()); + } else if (o.equals(Occur.SHOULD)) { + return new SpanOrQuery(spanClauses); + } else if (o.equals(Occur.MUST_NOT)) { + SpanQuery[] exclude = new SpanQuery[spanClauses.length - 1]; + for (int j = 1; j < spanClauses.length; j++) { + exclude[j - 1] = spanClauses[j]; + } + return new SpanNotQuery(spanClauses[0], new SpanOrQuery(exclude)); + } + + throw new QueryNodeException(new MessageImpl( + QueryParserMessages.LUCENE_QUERY_CONVERSION_ERROR, q.toString(), + "Congratulations! You have hit (yet) unsupported case: " + + q.getClass().getName())); + } + + class Leaf { + public List members = new ArrayList(); + public BooleanClause left; + public Leaf right; + + public Leaf(BooleanClause left, Leaf right) { + this.left = left; + this.right = right; + } + } + + /* + * Creates a tree of the clauses, according to operator precedence: + * + * Thus: D +C -A -B becomes: + * + * - / \ A - / \ B + / \ C D + */ + private Leaf constructTree(BooleanClause[] clauses) { + List toProcess = Arrays.asList(clauses); + Leaf leaf = new Leaf(null, null); + leaf.members = toProcess; + + // from highest priority + // findNots(leaf); + // findAnds(leaf); + // findOrs(leaf); + return leaf; + } + + private void findNots(Leaf leaf) { + + for (BooleanClause m : leaf.members) { + if (m.getOccur().equals(Occur.MUST_NOT)) { + leaf.members.remove(m); + leaf.left = m; + } + } + + } + +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/AqpSyntaxParserAbstract.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/AqpSyntaxParserAbstract.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/AqpSyntaxParserAbstract.java (revision 0) @@ -0,0 +1,35 @@ +package org.apache.lucene.queryparser.flexible.aqp; + +import org.antlr.runtime.TokenStream; +import org.apache.lucene.queryparser.flexible.core.QueryNodeParseException; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; + +/** + * All ANTLR parsers should subclass {@link AqpSyntaxParser} and + * provide two methods: + * + * {@link AqpSyntaxParser}{@link #getTokenStream(CharSequence)} + * {@link AqpSyntaxParser}{@link #parseTokenStream(TokenStream, CharSequence, CharSequence) + * + * Optionally, the new class can also override + * + * {@link AqpSyntaxParser}{@link #initializeGrammar(String) + * + * The default implementation is using reflection and is able + * to instantiate any grammar provided that the top parse rule + * is called mainQ and that the grammar is producing + * AST tree. + * + */ +public abstract class AqpSyntaxParserAbstract implements AqpSyntaxParser { + + /** + * Parse the query and return the {@link QueryNode}. ANTLR will + * do the parsing and we return AST. + */ + public QueryNode parse(CharSequence query, CharSequence field) + throws QueryNodeParseException { + TokenStream tokens = getTokenStream(query); + return parseTokenStream(tokens, query, field); + } +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/AqpSyntaxParserLoadableImpl.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/AqpSyntaxParserLoadableImpl.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/AqpSyntaxParserLoadableImpl.java (revision 0) @@ -0,0 +1,180 @@ +package org.apache.lucene.queryparser.flexible.aqp; + +import java.lang.reflect.Constructor; +import java.lang.reflect.Method; + +import org.antlr.runtime.ANTLRStringStream; +import org.antlr.runtime.CommonTokenStream; +import org.antlr.runtime.Lexer; +import org.antlr.runtime.Parser; +import org.antlr.runtime.RecognitionException; +import org.antlr.runtime.TokenSource; +import org.antlr.runtime.TokenStream; +import org.antlr.runtime.tree.TreeAdaptor; +import org.apache.lucene.queryparser.flexible.messages.Message; +import org.apache.lucene.queryparser.flexible.messages.MessageImpl; +import org.apache.lucene.queryparser.flexible.core.QueryNodeParseException; +import org.apache.lucene.queryparser.flexible.core.messages.QueryParserMessages; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; + +import org.apache.lucene.queryparser.flexible.aqp.parser.AqpStandardLuceneParser; +import org.apache.lucene.queryparser.flexible.aqp.util.AqpCommonTree; +import org.apache.lucene.queryparser.flexible.aqp.util.AqpCommonTreeAdaptor; + +/** + * This implementation can load any AST grammar from the repository of grammars + * without a need to provide a Java implementation. It uses reflection, so it + * might be slower than a dedicated parsing class. + * + * Every grammar must have a top-level rule called mainQ + * + * And every grammar must return AST. + * + * If you know that you are going to instantiate specific parser, then + * you should not use this generic class. + * + * @see AqpSyntaxParserAbstract + * @see AqpStandardLuceneParser#init() + * + */ +public class AqpSyntaxParserLoadableImpl extends AqpSyntaxParserAbstract { + + @SuppressWarnings("rawtypes") + private Class clsLexer; + @SuppressWarnings("rawtypes") + private Class clsParser; + + private Object iLexer; + private Object iParser; + + private Method invokeMainQ; + private Method getTreeMethod; + private Method getNumberOfSyntaxErrorsMethod; + + private Lexer lexer; + private Parser parser; + + private String[] tokenNames; + + public AqpSyntaxParserLoadableImpl() { + // empty constructor + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + public AqpSyntaxParser initializeGrammar(String grammarName) + throws QueryNodeParseException { + + try { + // get the Classes + clsLexer = Class + .forName("org.apache.lucene.queryparser.flexible.aqp.parser." + + grammarName + "Lexer"); + clsParser = Class + .forName("org.apache.lucene.queryparser.flexible.aqp.parser." + + grammarName + "Parser"); + + // instantiate lexer with no parameter + Class partypes[] = new Class[0]; + // partypes[0] = CharStream.class; + Constructor ctLexer = clsLexer.getConstructor(partypes); + Object arglist[] = new Object[0]; + iLexer = ctLexer.newInstance(arglist); + + // instantiate parser using no parameters + // ANTLRStringStream fakeInput = new ANTLRStringStream("none"); + CommonTokenStream fakeTokens = new CommonTokenStream( + (TokenSource) clsLexer.cast(iLexer)); + Class partypes2[] = new Class[1]; + partypes2[0] = TokenStream.class; + Constructor ct = clsParser.getConstructor(partypes2); + iParser = ct.newInstance(fakeTokens); + + parser = (Parser) iParser; + lexer = (Lexer) iLexer; + + // get tokenNames + Method getTokenNames = clsParser.getDeclaredMethod("getTokenNames"); + tokenNames = (String[]) getTokenNames.invoke(iParser); + + // create adaptor + AqpCommonTreeAdaptor adaptor = new AqpCommonTreeAdaptor(tokenNames); + + // set adaptor + Method setTreeAdaptor = clsParser.getDeclaredMethod("setTreeAdaptor", + TreeAdaptor.class); + setTreeAdaptor.invoke(iParser, adaptor); + + // get the mainQ parser rule & return value + invokeMainQ = clsParser.getDeclaredMethod("mainQ"); + getTreeMethod = invokeMainQ.getReturnType().getMethod("getTree"); + getNumberOfSyntaxErrorsMethod = clsParser + .getMethod("getNumberOfSyntaxErrors"); + + // AqpCommonTree ast = parseTest("hey:joe"); + + return this; + + } catch (Exception e) { + e.printStackTrace(); + throw new QueryNodeParseException(e); + } + + } + + public TokenStream getTokenStream(CharSequence query) { + ANTLRStringStream input = new ANTLRStringStream(query.toString()); + lexer.setCharStream(input); + + // get tokens + CommonTokenStream tokens = new CommonTokenStream( + (TokenSource) clsLexer.cast(iLexer)); + return tokens; + } + + public QueryNode parseTokenStream(TokenStream tokens, CharSequence query, + CharSequence field) throws QueryNodeParseException { + + // set tokens + parser.setTokenStream(tokens); + + // get tree back + Object retVal; + AqpCommonTree astTree; + + try { + retVal = invokeMainQ.invoke(iParser); + astTree = (AqpCommonTree) (getTreeMethod.invoke(retVal)); + + // this prevents parser from recovering, however it can also interfere + // with custom error handling (if present inside the grammar) + Object errNo = getNumberOfSyntaxErrorsMethod.invoke(iParser); + + if (errNo instanceof Integer && ((Integer) errNo > 0)) { + throw new Error( + "The parser reported a syntax error, antlrqueryparser hates errors!"); + } + } catch (Error e) { + Message message = new MessageImpl( + QueryParserMessages.INVALID_SYNTAX_CANNOT_PARSE, query, + e.getMessage()); + QueryNodeParseException ee = new QueryNodeParseException(e); + ee.setQuery(query); + ee.setNonLocalizedMessage(message); + throw ee; + } catch (Exception e) { // TODO: these exceptions are from the code, should + // not be printed + // e.printStackTrace(); + QueryNodeParseException ee = new QueryNodeParseException(e); + throw ee; + } + + try { + return astTree.toQueryNodeTree(); + } catch (RecognitionException e) { + throw new QueryNodeParseException(new MessageImpl(query + " >> " + + parser.getErrorMessage(e, parser.getTokenNames()))); + } + + } + +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/NestedParseException.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/NestedParseException.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/NestedParseException.java (revision 0) @@ -0,0 +1,32 @@ +package org.apache.lucene.queryparser.flexible.aqp; + +/** + * This class is used explicitly (and only) for exceptions that happen inside a + * QueryParser (AQP). We want to have a mechanism to raise and exception that is + * not interfering with existing interfaces. But which is caught by the + * {@link AqpAdsabsQueryParser} + * + * + */ + +public class NestedParseException extends RuntimeException { + + private static final long serialVersionUID = -3943145526662562552L; + + public NestedParseException() { + super(); + } + + public NestedParseException(String message, Throwable cause) { + super(message, cause); + } + + public NestedParseException(String string) { + super(string); + } + + public NestedParseException(Throwable cause) { + super(cause); + } + +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/parser/StandardLuceneGrammarParser.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/parser/StandardLuceneGrammarParser.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/parser/StandardLuceneGrammarParser.java (revision 0) @@ -0,0 +1,6079 @@ +// $ANTLR 3.4 /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g 2013-06-27 18:55:22 + + package org.apache.lucene.queryparser.flexible.aqp.parser; + + +import org.antlr.runtime.*; +import java.util.Stack; +import java.util.List; +import java.util.ArrayList; +import java.util.Map; +import java.util.HashMap; + +import org.antlr.runtime.tree.*; + + +@SuppressWarnings({"all", "warnings", "unchecked"}) +public class StandardLuceneGrammarParser extends UnforgivingParser { + public static final String[] tokenNames = new String[] { + "", "", "", "", "AMPER", "AND", "ATOM", "BOOST", "CARAT", "CLAUSE", "COLON", "DATE_TOKEN", "DQUOTE", "ESC_CHAR", "FIELD", "FUZZY", "INT", "LBRACK", "LCURLY", "LPAREN", "MINUS", "MODIFIER", "NOT", "NUMBER", "OPERATOR", "OR", "PHRASE", "PHRASE_ANYTHING", "PLUS", "QANYTHING", "QDATE", "QMARK", "QNORMAL", "QPHRASE", "QPHRASETRUNC", "QRANGEEX", "QRANGEIN", "QTRUNCATED", "RBRACK", "RCURLY", "RPAREN", "SQUOTE", "STAR", "TERM_CHAR", "TERM_NORMAL", "TERM_START_CHAR", "TERM_TRUNCATED", "TILDE", "TMODIFIER", "TO", "VBAR", "WS" + }; + + public static final int EOF=-1; + public static final int AMPER=4; + public static final int AND=5; + public static final int ATOM=6; + public static final int BOOST=7; + public static final int CARAT=8; + public static final int CLAUSE=9; + public static final int COLON=10; + public static final int DATE_TOKEN=11; + public static final int DQUOTE=12; + public static final int ESC_CHAR=13; + public static final int FIELD=14; + public static final int FUZZY=15; + public static final int INT=16; + public static final int LBRACK=17; + public static final int LCURLY=18; + public static final int LPAREN=19; + public static final int MINUS=20; + public static final int MODIFIER=21; + public static final int NOT=22; + public static final int NUMBER=23; + public static final int OPERATOR=24; + public static final int OR=25; + public static final int PHRASE=26; + public static final int PHRASE_ANYTHING=27; + public static final int PLUS=28; + public static final int QANYTHING=29; + public static final int QDATE=30; + public static final int QMARK=31; + public static final int QNORMAL=32; + public static final int QPHRASE=33; + public static final int QPHRASETRUNC=34; + public static final int QRANGEEX=35; + public static final int QRANGEIN=36; + public static final int QTRUNCATED=37; + public static final int RBRACK=38; + public static final int RCURLY=39; + public static final int RPAREN=40; + public static final int SQUOTE=41; + public static final int STAR=42; + public static final int TERM_CHAR=43; + public static final int TERM_NORMAL=44; + public static final int TERM_START_CHAR=45; + public static final int TERM_TRUNCATED=46; + public static final int TILDE=47; + public static final int TMODIFIER=48; + public static final int TO=49; + public static final int VBAR=50; + public static final int WS=51; + + // delegates + public UnforgivingParser[] getDelegates() { + return new UnforgivingParser[] {}; + } + + // delegators + + + public StandardLuceneGrammarParser(TokenStream input) { + this(input, new RecognizerSharedState()); + } + public StandardLuceneGrammarParser(TokenStream input, RecognizerSharedState state) { + super(input, state); + } + +protected TreeAdaptor adaptor = new CommonTreeAdaptor(); + +public void setTreeAdaptor(TreeAdaptor adaptor) { + this.adaptor = adaptor; +} +public TreeAdaptor getTreeAdaptor() { + return adaptor; +} + public String[] getTokenNames() { return StandardLuceneGrammarParser.tokenNames; } + public String getGrammarFileName() { return "/dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g"; } + + + public static class mainQ_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "mainQ" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:103:1: mainQ : ( clauseOr )+ EOF -> ^( OPERATOR[\"DEFOP\"] ( clauseOr )+ ) ; + public final StandardLuceneGrammarParser.mainQ_return mainQ() throws RecognitionException { + StandardLuceneGrammarParser.mainQ_return retval = new StandardLuceneGrammarParser.mainQ_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + Token EOF2=null; + StandardLuceneGrammarParser.clauseOr_return clauseOr1 =null; + + + Object EOF2_tree=null; + RewriteRuleTokenStream stream_EOF=new RewriteRuleTokenStream(adaptor,"token EOF"); + RewriteRuleSubtreeStream stream_clauseOr=new RewriteRuleSubtreeStream(adaptor,"rule clauseOr"); + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:103:7: ( ( clauseOr )+ EOF -> ^( OPERATOR[\"DEFOP\"] ( clauseOr )+ ) ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:104:2: ( clauseOr )+ EOF + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:104:2: ( clauseOr )+ + int cnt1=0; + loop1: + do { + int alt1=2; + int LA1_0 = input.LA(1); + + if ( ((LA1_0 >= LBRACK && LA1_0 <= MINUS)||LA1_0==NUMBER||(LA1_0 >= PHRASE && LA1_0 <= PLUS)||LA1_0==QMARK||LA1_0==STAR||LA1_0==TERM_NORMAL||LA1_0==TERM_TRUNCATED) ) { + alt1=1; + } + + + switch (alt1) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:104:2: clauseOr + { + pushFollow(FOLLOW_clauseOr_in_mainQ212); + clauseOr1=clauseOr(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_clauseOr.add(clauseOr1.getTree()); + + } + break; + + default : + if ( cnt1 >= 1 ) break loop1; + if (state.backtracking>0) {state.failed=true; return retval;} + EarlyExitException eee = + new EarlyExitException(1, input); + throw eee; + } + cnt1++; + } while (true); + + + EOF2=(Token)match(input,EOF,FOLLOW_EOF_in_mainQ215); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_EOF.add(EOF2); + + + // AST REWRITE + // elements: clauseOr + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 104:16: -> ^( OPERATOR[\"DEFOP\"] ( clauseOr )+ ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:104:19: ^( OPERATOR[\"DEFOP\"] ( clauseOr )+ ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(OPERATOR, "DEFOP") + , root_1); + + if ( !(stream_clauseOr.hasNext()) ) { + throw new RewriteEarlyExitException(); + } + while ( stream_clauseOr.hasNext() ) { + adaptor.addChild(root_1, stream_clauseOr.nextTree()); + + } + stream_clauseOr.reset(); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "mainQ" + + + public static class clauseOr_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "clauseOr" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:108:1: clauseOr : (first= clauseAnd -> $first) ( or others= clauseAnd -> ^( OPERATOR[\"OR\"] ( clauseAnd )+ ) )* ; + public final StandardLuceneGrammarParser.clauseOr_return clauseOr() throws RecognitionException { + StandardLuceneGrammarParser.clauseOr_return retval = new StandardLuceneGrammarParser.clauseOr_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + StandardLuceneGrammarParser.clauseAnd_return first =null; + + StandardLuceneGrammarParser.clauseAnd_return others =null; + + StandardLuceneGrammarParser.or_return or3 =null; + + + RewriteRuleSubtreeStream stream_clauseAnd=new RewriteRuleSubtreeStream(adaptor,"rule clauseAnd"); + RewriteRuleSubtreeStream stream_or=new RewriteRuleSubtreeStream(adaptor,"rule or"); + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:109:3: ( (first= clauseAnd -> $first) ( or others= clauseAnd -> ^( OPERATOR[\"OR\"] ( clauseAnd )+ ) )* ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:109:5: (first= clauseAnd -> $first) ( or others= clauseAnd -> ^( OPERATOR[\"OR\"] ( clauseAnd )+ ) )* + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:109:5: (first= clauseAnd -> $first) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:109:6: first= clauseAnd + { + pushFollow(FOLLOW_clauseAnd_in_clauseOr246); + first=clauseAnd(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_clauseAnd.add(first.getTree()); + + // AST REWRITE + // elements: first + // token labels: + // rule labels: retval, first + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + RewriteRuleSubtreeStream stream_first=new RewriteRuleSubtreeStream(adaptor,"rule first",first!=null?first.tree:null); + + root_0 = (Object)adaptor.nil(); + // 109:22: -> $first + { + adaptor.addChild(root_0, stream_first.nextTree()); + + } + + + retval.tree = root_0; + } + + } + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:109:33: ( or others= clauseAnd -> ^( OPERATOR[\"OR\"] ( clauseAnd )+ ) )* + loop2: + do { + int alt2=2; + int LA2_0 = input.LA(1); + + if ( (LA2_0==OR) ) { + alt2=1; + } + + + switch (alt2) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:109:34: or others= clauseAnd + { + pushFollow(FOLLOW_or_in_clauseOr255); + or3=or(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_or.add(or3.getTree()); + + pushFollow(FOLLOW_clauseAnd_in_clauseOr259); + others=clauseAnd(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_clauseAnd.add(others.getTree()); + + // AST REWRITE + // elements: clauseAnd + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 109:54: -> ^( OPERATOR[\"OR\"] ( clauseAnd )+ ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:109:57: ^( OPERATOR[\"OR\"] ( clauseAnd )+ ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(OPERATOR, "OR") + , root_1); + + if ( !(stream_clauseAnd.hasNext()) ) { + throw new RewriteEarlyExitException(); + } + while ( stream_clauseAnd.hasNext() ) { + adaptor.addChild(root_1, stream_clauseAnd.nextTree()); + + } + stream_clauseAnd.reset(); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + + default : + break loop2; + } + } while (true); + + + } + + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "clauseOr" + + + public static class clauseAnd_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "clauseAnd" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:112:1: clauseAnd : (first= clauseNot -> $first) ( and others= clauseNot -> ^( OPERATOR[\"AND\"] ( clauseNot )+ ) )* ; + public final StandardLuceneGrammarParser.clauseAnd_return clauseAnd() throws RecognitionException { + StandardLuceneGrammarParser.clauseAnd_return retval = new StandardLuceneGrammarParser.clauseAnd_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + StandardLuceneGrammarParser.clauseNot_return first =null; + + StandardLuceneGrammarParser.clauseNot_return others =null; + + StandardLuceneGrammarParser.and_return and4 =null; + + + RewriteRuleSubtreeStream stream_clauseNot=new RewriteRuleSubtreeStream(adaptor,"rule clauseNot"); + RewriteRuleSubtreeStream stream_and=new RewriteRuleSubtreeStream(adaptor,"rule and"); + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:113:3: ( (first= clauseNot -> $first) ( and others= clauseNot -> ^( OPERATOR[\"AND\"] ( clauseNot )+ ) )* ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:113:5: (first= clauseNot -> $first) ( and others= clauseNot -> ^( OPERATOR[\"AND\"] ( clauseNot )+ ) )* + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:113:5: (first= clauseNot -> $first) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:113:6: first= clauseNot + { + pushFollow(FOLLOW_clauseNot_in_clauseAnd288); + first=clauseNot(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_clauseNot.add(first.getTree()); + + // AST REWRITE + // elements: first + // token labels: + // rule labels: retval, first + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + RewriteRuleSubtreeStream stream_first=new RewriteRuleSubtreeStream(adaptor,"rule first",first!=null?first.tree:null); + + root_0 = (Object)adaptor.nil(); + // 113:23: -> $first + { + adaptor.addChild(root_0, stream_first.nextTree()); + + } + + + retval.tree = root_0; + } + + } + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:113:34: ( and others= clauseNot -> ^( OPERATOR[\"AND\"] ( clauseNot )+ ) )* + loop3: + do { + int alt3=2; + int LA3_0 = input.LA(1); + + if ( (LA3_0==AND) ) { + alt3=1; + } + + + switch (alt3) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:113:35: and others= clauseNot + { + pushFollow(FOLLOW_and_in_clauseAnd298); + and4=and(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_and.add(and4.getTree()); + + pushFollow(FOLLOW_clauseNot_in_clauseAnd302); + others=clauseNot(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_clauseNot.add(others.getTree()); + + // AST REWRITE + // elements: clauseNot + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 113:56: -> ^( OPERATOR[\"AND\"] ( clauseNot )+ ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:113:59: ^( OPERATOR[\"AND\"] ( clauseNot )+ ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(OPERATOR, "AND") + , root_1); + + if ( !(stream_clauseNot.hasNext()) ) { + throw new RewriteEarlyExitException(); + } + while ( stream_clauseNot.hasNext() ) { + adaptor.addChild(root_1, stream_clauseNot.nextTree()); + + } + stream_clauseNot.reset(); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + + default : + break loop3; + } + } while (true); + + + } + + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "clauseAnd" + + + public static class clauseNot_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "clauseNot" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:116:1: clauseNot : (first= clauseBasic -> $first) ( not others= clauseBasic -> ^( OPERATOR[\"NOT\"] ( clauseBasic )+ ) )* ; + public final StandardLuceneGrammarParser.clauseNot_return clauseNot() throws RecognitionException { + StandardLuceneGrammarParser.clauseNot_return retval = new StandardLuceneGrammarParser.clauseNot_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + StandardLuceneGrammarParser.clauseBasic_return first =null; + + StandardLuceneGrammarParser.clauseBasic_return others =null; + + StandardLuceneGrammarParser.not_return not5 =null; + + + RewriteRuleSubtreeStream stream_not=new RewriteRuleSubtreeStream(adaptor,"rule not"); + RewriteRuleSubtreeStream stream_clauseBasic=new RewriteRuleSubtreeStream(adaptor,"rule clauseBasic"); + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:117:3: ( (first= clauseBasic -> $first) ( not others= clauseBasic -> ^( OPERATOR[\"NOT\"] ( clauseBasic )+ ) )* ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:117:5: (first= clauseBasic -> $first) ( not others= clauseBasic -> ^( OPERATOR[\"NOT\"] ( clauseBasic )+ ) )* + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:117:5: (first= clauseBasic -> $first) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:117:6: first= clauseBasic + { + pushFollow(FOLLOW_clauseBasic_in_clauseNot333); + first=clauseBasic(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_clauseBasic.add(first.getTree()); + + // AST REWRITE + // elements: first + // token labels: + // rule labels: retval, first + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + RewriteRuleSubtreeStream stream_first=new RewriteRuleSubtreeStream(adaptor,"rule first",first!=null?first.tree:null); + + root_0 = (Object)adaptor.nil(); + // 117:24: -> $first + { + adaptor.addChild(root_0, stream_first.nextTree()); + + } + + + retval.tree = root_0; + } + + } + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:117:35: ( not others= clauseBasic -> ^( OPERATOR[\"NOT\"] ( clauseBasic )+ ) )* + loop4: + do { + int alt4=2; + int LA4_0 = input.LA(1); + + if ( (LA4_0==AND) ) { + int LA4_1 = input.LA(2); + + if ( (LA4_1==NOT) ) { + alt4=1; + } + + + } + else if ( (LA4_0==NOT) ) { + alt4=1; + } + + + switch (alt4) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:117:36: not others= clauseBasic + { + pushFollow(FOLLOW_not_in_clauseNot342); + not5=not(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_not.add(not5.getTree()); + + pushFollow(FOLLOW_clauseBasic_in_clauseNot346); + others=clauseBasic(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_clauseBasic.add(others.getTree()); + + // AST REWRITE + // elements: clauseBasic + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 117:59: -> ^( OPERATOR[\"NOT\"] ( clauseBasic )+ ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:117:62: ^( OPERATOR[\"NOT\"] ( clauseBasic )+ ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(OPERATOR, "NOT") + , root_1); + + if ( !(stream_clauseBasic.hasNext()) ) { + throw new RewriteEarlyExitException(); + } + while ( stream_clauseBasic.hasNext() ) { + adaptor.addChild(root_1, stream_clauseBasic.nextTree()); + + } + stream_clauseBasic.reset(); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + + default : + break loop4; + } + } while (true); + + + } + + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "clauseNot" + + + public static class clauseBasic_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "clauseBasic" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:121:1: clauseBasic : ( ( modifier LPAREN ( clauseOr )+ RPAREN )=> ( modifier )? LPAREN ( clauseOr )+ RPAREN ( term_modifier )? -> ^( CLAUSE ^( MODIFIER ( modifier )? ^( TMODIFIER ( term_modifier )? ^( OPERATOR[\"DEFOP\"] ( clauseOr )+ ) ) ) ) | ( LPAREN ( clauseOr )+ RPAREN term_modifier )=> ( modifier )? LPAREN ( clauseOr )+ RPAREN ( term_modifier )? -> ^( CLAUSE ^( MODIFIER ( modifier )? ^( TMODIFIER ( term_modifier )? ^( OPERATOR[\"DEFOP\"] ( clauseOr )+ ) ) ) ) | ( LPAREN )=> LPAREN ( clauseOr )+ RPAREN -> ( clauseOr )+ | atom ); + public final StandardLuceneGrammarParser.clauseBasic_return clauseBasic() throws RecognitionException { + StandardLuceneGrammarParser.clauseBasic_return retval = new StandardLuceneGrammarParser.clauseBasic_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + Token LPAREN7=null; + Token RPAREN9=null; + Token LPAREN12=null; + Token RPAREN14=null; + Token LPAREN16=null; + Token RPAREN18=null; + StandardLuceneGrammarParser.modifier_return modifier6 =null; + + StandardLuceneGrammarParser.clauseOr_return clauseOr8 =null; + + StandardLuceneGrammarParser.term_modifier_return term_modifier10 =null; + + StandardLuceneGrammarParser.modifier_return modifier11 =null; + + StandardLuceneGrammarParser.clauseOr_return clauseOr13 =null; + + StandardLuceneGrammarParser.term_modifier_return term_modifier15 =null; + + StandardLuceneGrammarParser.clauseOr_return clauseOr17 =null; + + StandardLuceneGrammarParser.atom_return atom19 =null; + + + Object LPAREN7_tree=null; + Object RPAREN9_tree=null; + Object LPAREN12_tree=null; + Object RPAREN14_tree=null; + Object LPAREN16_tree=null; + Object RPAREN18_tree=null; + RewriteRuleTokenStream stream_RPAREN=new RewriteRuleTokenStream(adaptor,"token RPAREN"); + RewriteRuleTokenStream stream_LPAREN=new RewriteRuleTokenStream(adaptor,"token LPAREN"); + RewriteRuleSubtreeStream stream_modifier=new RewriteRuleSubtreeStream(adaptor,"rule modifier"); + RewriteRuleSubtreeStream stream_term_modifier=new RewriteRuleSubtreeStream(adaptor,"rule term_modifier"); + RewriteRuleSubtreeStream stream_clauseOr=new RewriteRuleSubtreeStream(adaptor,"rule clauseOr"); + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:122:2: ( ( modifier LPAREN ( clauseOr )+ RPAREN )=> ( modifier )? LPAREN ( clauseOr )+ RPAREN ( term_modifier )? -> ^( CLAUSE ^( MODIFIER ( modifier )? ^( TMODIFIER ( term_modifier )? ^( OPERATOR[\"DEFOP\"] ( clauseOr )+ ) ) ) ) | ( LPAREN ( clauseOr )+ RPAREN term_modifier )=> ( modifier )? LPAREN ( clauseOr )+ RPAREN ( term_modifier )? -> ^( CLAUSE ^( MODIFIER ( modifier )? ^( TMODIFIER ( term_modifier )? ^( OPERATOR[\"DEFOP\"] ( clauseOr )+ ) ) ) ) | ( LPAREN )=> LPAREN ( clauseOr )+ RPAREN -> ( clauseOr )+ | atom ) + int alt12=4; + switch ( input.LA(1) ) { + case PLUS: + { + int LA12_1 = input.LA(2); + + if ( (synpred1_StandardLuceneGrammar()) ) { + alt12=1; + } + else if ( (synpred2_StandardLuceneGrammar()) ) { + alt12=2; + } + else if ( (true) ) { + alt12=4; + } + else { + if (state.backtracking>0) {state.failed=true; return retval;} + NoViableAltException nvae = + new NoViableAltException("", 12, 1, input); + + throw nvae; + + } + } + break; + case MINUS: + { + int LA12_2 = input.LA(2); + + if ( (synpred1_StandardLuceneGrammar()) ) { + alt12=1; + } + else if ( (synpred2_StandardLuceneGrammar()) ) { + alt12=2; + } + else if ( (true) ) { + alt12=4; + } + else { + if (state.backtracking>0) {state.failed=true; return retval;} + NoViableAltException nvae = + new NoViableAltException("", 12, 2, input); + + throw nvae; + + } + } + break; + case LPAREN: + { + int LA12_3 = input.LA(2); + + if ( (synpred1_StandardLuceneGrammar()) ) { + alt12=1; + } + else if ( (synpred2_StandardLuceneGrammar()) ) { + alt12=2; + } + else if ( (synpred3_StandardLuceneGrammar()) ) { + alt12=3; + } + else { + if (state.backtracking>0) {state.failed=true; return retval;} + NoViableAltException nvae = + new NoViableAltException("", 12, 3, input); + + throw nvae; + + } + } + break; + case LBRACK: + case LCURLY: + case NUMBER: + case PHRASE: + case PHRASE_ANYTHING: + case QMARK: + case STAR: + case TERM_NORMAL: + case TERM_TRUNCATED: + { + alt12=4; + } + break; + default: + if (state.backtracking>0) {state.failed=true; return retval;} + NoViableAltException nvae = + new NoViableAltException("", 12, 0, input); + + throw nvae; + + } + + switch (alt12) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:123:2: ( modifier LPAREN ( clauseOr )+ RPAREN )=> ( modifier )? LPAREN ( clauseOr )+ RPAREN ( term_modifier )? + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:123:40: ( modifier )? + int alt5=2; + int LA5_0 = input.LA(1); + + if ( (LA5_0==MINUS||LA5_0==PLUS) ) { + alt5=1; + } + switch (alt5) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:123:40: modifier + { + pushFollow(FOLLOW_modifier_in_clauseBasic391); + modifier6=modifier(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_modifier.add(modifier6.getTree()); + + } + break; + + } + + + LPAREN7=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_clauseBasic394); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_LPAREN.add(LPAREN7); + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:123:57: ( clauseOr )+ + int cnt6=0; + loop6: + do { + int alt6=2; + int LA6_0 = input.LA(1); + + if ( ((LA6_0 >= LBRACK && LA6_0 <= MINUS)||LA6_0==NUMBER||(LA6_0 >= PHRASE && LA6_0 <= PLUS)||LA6_0==QMARK||LA6_0==STAR||LA6_0==TERM_NORMAL||LA6_0==TERM_TRUNCATED) ) { + alt6=1; + } + + + switch (alt6) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:123:57: clauseOr + { + pushFollow(FOLLOW_clauseOr_in_clauseBasic396); + clauseOr8=clauseOr(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_clauseOr.add(clauseOr8.getTree()); + + } + break; + + default : + if ( cnt6 >= 1 ) break loop6; + if (state.backtracking>0) {state.failed=true; return retval;} + EarlyExitException eee = + new EarlyExitException(6, input); + throw eee; + } + cnt6++; + } while (true); + + + RPAREN9=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_clauseBasic399); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_RPAREN.add(RPAREN9); + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:123:74: ( term_modifier )? + int alt7=2; + int LA7_0 = input.LA(1); + + if ( (LA7_0==CARAT||LA7_0==TILDE) ) { + alt7=1; + } + switch (alt7) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:123:74: term_modifier + { + pushFollow(FOLLOW_term_modifier_in_clauseBasic401); + term_modifier10=term_modifier(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_term_modifier.add(term_modifier10.getTree()); + + } + break; + + } + + + // AST REWRITE + // elements: term_modifier, clauseOr, modifier + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 124:3: -> ^( CLAUSE ^( MODIFIER ( modifier )? ^( TMODIFIER ( term_modifier )? ^( OPERATOR[\"DEFOP\"] ( clauseOr )+ ) ) ) ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:124:6: ^( CLAUSE ^( MODIFIER ( modifier )? ^( TMODIFIER ( term_modifier )? ^( OPERATOR[\"DEFOP\"] ( clauseOr )+ ) ) ) ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(CLAUSE, "CLAUSE") + , root_1); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:124:15: ^( MODIFIER ( modifier )? ^( TMODIFIER ( term_modifier )? ^( OPERATOR[\"DEFOP\"] ( clauseOr )+ ) ) ) + { + Object root_2 = (Object)adaptor.nil(); + root_2 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(MODIFIER, "MODIFIER") + , root_2); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:124:26: ( modifier )? + if ( stream_modifier.hasNext() ) { + adaptor.addChild(root_2, stream_modifier.nextTree()); + + } + stream_modifier.reset(); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:124:36: ^( TMODIFIER ( term_modifier )? ^( OPERATOR[\"DEFOP\"] ( clauseOr )+ ) ) + { + Object root_3 = (Object)adaptor.nil(); + root_3 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(TMODIFIER, "TMODIFIER") + , root_3); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:124:48: ( term_modifier )? + if ( stream_term_modifier.hasNext() ) { + adaptor.addChild(root_3, stream_term_modifier.nextTree()); + + } + stream_term_modifier.reset(); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:124:63: ^( OPERATOR[\"DEFOP\"] ( clauseOr )+ ) + { + Object root_4 = (Object)adaptor.nil(); + root_4 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(OPERATOR, "DEFOP") + , root_4); + + if ( !(stream_clauseOr.hasNext()) ) { + throw new RewriteEarlyExitException(); + } + while ( stream_clauseOr.hasNext() ) { + adaptor.addChild(root_4, stream_clauseOr.nextTree()); + + } + stream_clauseOr.reset(); + + adaptor.addChild(root_3, root_4); + } + + adaptor.addChild(root_2, root_3); + } + + adaptor.addChild(root_1, root_2); + } + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + case 2 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:125:4: ( LPAREN ( clauseOr )+ RPAREN term_modifier )=> ( modifier )? LPAREN ( clauseOr )+ RPAREN ( term_modifier )? + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:125:46: ( modifier )? + int alt8=2; + int LA8_0 = input.LA(1); + + if ( (LA8_0==MINUS||LA8_0==PLUS) ) { + alt8=1; + } + switch (alt8) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:125:46: modifier + { + pushFollow(FOLLOW_modifier_in_clauseBasic451); + modifier11=modifier(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_modifier.add(modifier11.getTree()); + + } + break; + + } + + + LPAREN12=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_clauseBasic454); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_LPAREN.add(LPAREN12); + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:125:63: ( clauseOr )+ + int cnt9=0; + loop9: + do { + int alt9=2; + int LA9_0 = input.LA(1); + + if ( ((LA9_0 >= LBRACK && LA9_0 <= MINUS)||LA9_0==NUMBER||(LA9_0 >= PHRASE && LA9_0 <= PLUS)||LA9_0==QMARK||LA9_0==STAR||LA9_0==TERM_NORMAL||LA9_0==TERM_TRUNCATED) ) { + alt9=1; + } + + + switch (alt9) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:125:63: clauseOr + { + pushFollow(FOLLOW_clauseOr_in_clauseBasic456); + clauseOr13=clauseOr(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_clauseOr.add(clauseOr13.getTree()); + + } + break; + + default : + if ( cnt9 >= 1 ) break loop9; + if (state.backtracking>0) {state.failed=true; return retval;} + EarlyExitException eee = + new EarlyExitException(9, input); + throw eee; + } + cnt9++; + } while (true); + + + RPAREN14=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_clauseBasic459); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_RPAREN.add(RPAREN14); + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:125:80: ( term_modifier )? + int alt10=2; + int LA10_0 = input.LA(1); + + if ( (LA10_0==CARAT||LA10_0==TILDE) ) { + alt10=1; + } + switch (alt10) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:125:80: term_modifier + { + pushFollow(FOLLOW_term_modifier_in_clauseBasic461); + term_modifier15=term_modifier(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_term_modifier.add(term_modifier15.getTree()); + + } + break; + + } + + + // AST REWRITE + // elements: term_modifier, clauseOr, modifier + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 126:3: -> ^( CLAUSE ^( MODIFIER ( modifier )? ^( TMODIFIER ( term_modifier )? ^( OPERATOR[\"DEFOP\"] ( clauseOr )+ ) ) ) ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:126:6: ^( CLAUSE ^( MODIFIER ( modifier )? ^( TMODIFIER ( term_modifier )? ^( OPERATOR[\"DEFOP\"] ( clauseOr )+ ) ) ) ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(CLAUSE, "CLAUSE") + , root_1); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:126:15: ^( MODIFIER ( modifier )? ^( TMODIFIER ( term_modifier )? ^( OPERATOR[\"DEFOP\"] ( clauseOr )+ ) ) ) + { + Object root_2 = (Object)adaptor.nil(); + root_2 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(MODIFIER, "MODIFIER") + , root_2); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:126:26: ( modifier )? + if ( stream_modifier.hasNext() ) { + adaptor.addChild(root_2, stream_modifier.nextTree()); + + } + stream_modifier.reset(); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:126:36: ^( TMODIFIER ( term_modifier )? ^( OPERATOR[\"DEFOP\"] ( clauseOr )+ ) ) + { + Object root_3 = (Object)adaptor.nil(); + root_3 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(TMODIFIER, "TMODIFIER") + , root_3); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:126:48: ( term_modifier )? + if ( stream_term_modifier.hasNext() ) { + adaptor.addChild(root_3, stream_term_modifier.nextTree()); + + } + stream_term_modifier.reset(); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:126:63: ^( OPERATOR[\"DEFOP\"] ( clauseOr )+ ) + { + Object root_4 = (Object)adaptor.nil(); + root_4 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(OPERATOR, "DEFOP") + , root_4); + + if ( !(stream_clauseOr.hasNext()) ) { + throw new RewriteEarlyExitException(); + } + while ( stream_clauseOr.hasNext() ) { + adaptor.addChild(root_4, stream_clauseOr.nextTree()); + + } + stream_clauseOr.reset(); + + adaptor.addChild(root_3, root_4); + } + + adaptor.addChild(root_2, root_3); + } + + adaptor.addChild(root_1, root_2); + } + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + case 3 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:127:4: ( LPAREN )=> LPAREN ( clauseOr )+ RPAREN + { + LPAREN16=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_clauseBasic506); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_LPAREN.add(LPAREN16); + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:127:24: ( clauseOr )+ + int cnt11=0; + loop11: + do { + int alt11=2; + int LA11_0 = input.LA(1); + + if ( ((LA11_0 >= LBRACK && LA11_0 <= MINUS)||LA11_0==NUMBER||(LA11_0 >= PHRASE && LA11_0 <= PLUS)||LA11_0==QMARK||LA11_0==STAR||LA11_0==TERM_NORMAL||LA11_0==TERM_TRUNCATED) ) { + alt11=1; + } + + + switch (alt11) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:127:24: clauseOr + { + pushFollow(FOLLOW_clauseOr_in_clauseBasic508); + clauseOr17=clauseOr(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_clauseOr.add(clauseOr17.getTree()); + + } + break; + + default : + if ( cnt11 >= 1 ) break loop11; + if (state.backtracking>0) {state.failed=true; return retval;} + EarlyExitException eee = + new EarlyExitException(11, input); + throw eee; + } + cnt11++; + } while (true); + + + RPAREN18=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_clauseBasic511); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_RPAREN.add(RPAREN18); + + + // AST REWRITE + // elements: clauseOr + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 128:3: -> ( clauseOr )+ + { + if ( !(stream_clauseOr.hasNext()) ) { + throw new RewriteEarlyExitException(); + } + while ( stream_clauseOr.hasNext() ) { + adaptor.addChild(root_0, stream_clauseOr.nextTree()); + + } + stream_clauseOr.reset(); + + } + + + retval.tree = root_0; + } + + } + break; + case 4 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:129:4: atom + { + root_0 = (Object)adaptor.nil(); + + + pushFollow(FOLLOW_atom_in_clauseBasic523); + atom19=atom(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) adaptor.addChild(root_0, atom19.getTree()); + + } + break; + + } + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "clauseBasic" + + + public static class atom_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "atom" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:133:1: atom : ( ( modifier )? field multi_value ( term_modifier )? -> ^( CLAUSE ^( MODIFIER ( modifier )? ^( TMODIFIER ( term_modifier )? ^( FIELD field multi_value ) ) ) ) | ( modifier )? ( field )? value ( term_modifier )? -> ^( MODIFIER ( modifier )? ^( TMODIFIER ( term_modifier )? ^( FIELD ( field )? value ) ) ) ); + public final StandardLuceneGrammarParser.atom_return atom() throws RecognitionException { + StandardLuceneGrammarParser.atom_return retval = new StandardLuceneGrammarParser.atom_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + StandardLuceneGrammarParser.modifier_return modifier20 =null; + + StandardLuceneGrammarParser.field_return field21 =null; + + StandardLuceneGrammarParser.multi_value_return multi_value22 =null; + + StandardLuceneGrammarParser.term_modifier_return term_modifier23 =null; + + StandardLuceneGrammarParser.modifier_return modifier24 =null; + + StandardLuceneGrammarParser.field_return field25 =null; + + StandardLuceneGrammarParser.value_return value26 =null; + + StandardLuceneGrammarParser.term_modifier_return term_modifier27 =null; + + + RewriteRuleSubtreeStream stream_modifier=new RewriteRuleSubtreeStream(adaptor,"rule modifier"); + RewriteRuleSubtreeStream stream_field=new RewriteRuleSubtreeStream(adaptor,"rule field"); + RewriteRuleSubtreeStream stream_term_modifier=new RewriteRuleSubtreeStream(adaptor,"rule term_modifier"); + RewriteRuleSubtreeStream stream_value=new RewriteRuleSubtreeStream(adaptor,"rule value"); + RewriteRuleSubtreeStream stream_multi_value=new RewriteRuleSubtreeStream(adaptor,"rule multi_value"); + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:134:2: ( ( modifier )? field multi_value ( term_modifier )? -> ^( CLAUSE ^( MODIFIER ( modifier )? ^( TMODIFIER ( term_modifier )? ^( FIELD field multi_value ) ) ) ) | ( modifier )? ( field )? value ( term_modifier )? -> ^( MODIFIER ( modifier )? ^( TMODIFIER ( term_modifier )? ^( FIELD ( field )? value ) ) ) ) + int alt18=2; + switch ( input.LA(1) ) { + case PLUS: + { + int LA18_1 = input.LA(2); + + if ( (LA18_1==TERM_NORMAL) ) { + int LA18_3 = input.LA(3); + + if ( (LA18_3==COLON) ) { + int LA18_5 = input.LA(4); + + if ( (LA18_5==LPAREN) ) { + alt18=1; + } + else if ( ((LA18_5 >= LBRACK && LA18_5 <= LCURLY)||LA18_5==NUMBER||(LA18_5 >= PHRASE && LA18_5 <= PHRASE_ANYTHING)||LA18_5==QMARK||LA18_5==STAR||LA18_5==TERM_NORMAL||LA18_5==TERM_TRUNCATED) ) { + alt18=2; + } + else { + if (state.backtracking>0) {state.failed=true; return retval;} + NoViableAltException nvae = + new NoViableAltException("", 18, 5, input); + + throw nvae; + + } + } + else if ( (LA18_3==EOF||LA18_3==AND||LA18_3==CARAT||(LA18_3 >= LBRACK && LA18_3 <= MINUS)||(LA18_3 >= NOT && LA18_3 <= NUMBER)||(LA18_3 >= OR && LA18_3 <= PLUS)||LA18_3==QMARK||LA18_3==RPAREN||LA18_3==STAR||LA18_3==TERM_NORMAL||(LA18_3 >= TERM_TRUNCATED && LA18_3 <= TILDE)) ) { + alt18=2; + } + else { + if (state.backtracking>0) {state.failed=true; return retval;} + NoViableAltException nvae = + new NoViableAltException("", 18, 3, input); + + throw nvae; + + } + } + else if ( ((LA18_1 >= LBRACK && LA18_1 <= LCURLY)||LA18_1==NUMBER||(LA18_1 >= PHRASE && LA18_1 <= PHRASE_ANYTHING)||LA18_1==QMARK||LA18_1==STAR||LA18_1==TERM_TRUNCATED) ) { + alt18=2; + } + else { + if (state.backtracking>0) {state.failed=true; return retval;} + NoViableAltException nvae = + new NoViableAltException("", 18, 1, input); + + throw nvae; + + } + } + break; + case MINUS: + { + int LA18_2 = input.LA(2); + + if ( (LA18_2==TERM_NORMAL) ) { + int LA18_3 = input.LA(3); + + if ( (LA18_3==COLON) ) { + int LA18_5 = input.LA(4); + + if ( (LA18_5==LPAREN) ) { + alt18=1; + } + else if ( ((LA18_5 >= LBRACK && LA18_5 <= LCURLY)||LA18_5==NUMBER||(LA18_5 >= PHRASE && LA18_5 <= PHRASE_ANYTHING)||LA18_5==QMARK||LA18_5==STAR||LA18_5==TERM_NORMAL||LA18_5==TERM_TRUNCATED) ) { + alt18=2; + } + else { + if (state.backtracking>0) {state.failed=true; return retval;} + NoViableAltException nvae = + new NoViableAltException("", 18, 5, input); + + throw nvae; + + } + } + else if ( (LA18_3==EOF||LA18_3==AND||LA18_3==CARAT||(LA18_3 >= LBRACK && LA18_3 <= MINUS)||(LA18_3 >= NOT && LA18_3 <= NUMBER)||(LA18_3 >= OR && LA18_3 <= PLUS)||LA18_3==QMARK||LA18_3==RPAREN||LA18_3==STAR||LA18_3==TERM_NORMAL||(LA18_3 >= TERM_TRUNCATED && LA18_3 <= TILDE)) ) { + alt18=2; + } + else { + if (state.backtracking>0) {state.failed=true; return retval;} + NoViableAltException nvae = + new NoViableAltException("", 18, 3, input); + + throw nvae; + + } + } + else if ( ((LA18_2 >= LBRACK && LA18_2 <= LCURLY)||LA18_2==NUMBER||(LA18_2 >= PHRASE && LA18_2 <= PHRASE_ANYTHING)||LA18_2==QMARK||LA18_2==STAR||LA18_2==TERM_TRUNCATED) ) { + alt18=2; + } + else { + if (state.backtracking>0) {state.failed=true; return retval;} + NoViableAltException nvae = + new NoViableAltException("", 18, 2, input); + + throw nvae; + + } + } + break; + case TERM_NORMAL: + { + int LA18_3 = input.LA(2); + + if ( (LA18_3==COLON) ) { + int LA18_5 = input.LA(3); + + if ( (LA18_5==LPAREN) ) { + alt18=1; + } + else if ( ((LA18_5 >= LBRACK && LA18_5 <= LCURLY)||LA18_5==NUMBER||(LA18_5 >= PHRASE && LA18_5 <= PHRASE_ANYTHING)||LA18_5==QMARK||LA18_5==STAR||LA18_5==TERM_NORMAL||LA18_5==TERM_TRUNCATED) ) { + alt18=2; + } + else { + if (state.backtracking>0) {state.failed=true; return retval;} + NoViableAltException nvae = + new NoViableAltException("", 18, 5, input); + + throw nvae; + + } + } + else if ( (LA18_3==EOF||LA18_3==AND||LA18_3==CARAT||(LA18_3 >= LBRACK && LA18_3 <= MINUS)||(LA18_3 >= NOT && LA18_3 <= NUMBER)||(LA18_3 >= OR && LA18_3 <= PLUS)||LA18_3==QMARK||LA18_3==RPAREN||LA18_3==STAR||LA18_3==TERM_NORMAL||(LA18_3 >= TERM_TRUNCATED && LA18_3 <= TILDE)) ) { + alt18=2; + } + else { + if (state.backtracking>0) {state.failed=true; return retval;} + NoViableAltException nvae = + new NoViableAltException("", 18, 3, input); + + throw nvae; + + } + } + break; + case LBRACK: + case LCURLY: + case NUMBER: + case PHRASE: + case PHRASE_ANYTHING: + case QMARK: + case STAR: + case TERM_TRUNCATED: + { + alt18=2; + } + break; + default: + if (state.backtracking>0) {state.failed=true; return retval;} + NoViableAltException nvae = + new NoViableAltException("", 18, 0, input); + + throw nvae; + + } + + switch (alt18) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:135:2: ( modifier )? field multi_value ( term_modifier )? + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:135:2: ( modifier )? + int alt13=2; + int LA13_0 = input.LA(1); + + if ( (LA13_0==MINUS||LA13_0==PLUS) ) { + alt13=1; + } + switch (alt13) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:135:2: modifier + { + pushFollow(FOLLOW_modifier_in_atom544); + modifier20=modifier(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_modifier.add(modifier20.getTree()); + + } + break; + + } + + + pushFollow(FOLLOW_field_in_atom547); + field21=field(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_field.add(field21.getTree()); + + pushFollow(FOLLOW_multi_value_in_atom549); + multi_value22=multi_value(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_multi_value.add(multi_value22.getTree()); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:135:30: ( term_modifier )? + int alt14=2; + int LA14_0 = input.LA(1); + + if ( (LA14_0==CARAT||LA14_0==TILDE) ) { + alt14=1; + } + switch (alt14) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:135:30: term_modifier + { + pushFollow(FOLLOW_term_modifier_in_atom551); + term_modifier23=term_modifier(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_term_modifier.add(term_modifier23.getTree()); + + } + break; + + } + + + // AST REWRITE + // elements: multi_value, modifier, field, term_modifier + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 136:3: -> ^( CLAUSE ^( MODIFIER ( modifier )? ^( TMODIFIER ( term_modifier )? ^( FIELD field multi_value ) ) ) ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:136:6: ^( CLAUSE ^( MODIFIER ( modifier )? ^( TMODIFIER ( term_modifier )? ^( FIELD field multi_value ) ) ) ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(CLAUSE, "CLAUSE") + , root_1); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:136:15: ^( MODIFIER ( modifier )? ^( TMODIFIER ( term_modifier )? ^( FIELD field multi_value ) ) ) + { + Object root_2 = (Object)adaptor.nil(); + root_2 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(MODIFIER, "MODIFIER") + , root_2); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:136:26: ( modifier )? + if ( stream_modifier.hasNext() ) { + adaptor.addChild(root_2, stream_modifier.nextTree()); + + } + stream_modifier.reset(); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:136:36: ^( TMODIFIER ( term_modifier )? ^( FIELD field multi_value ) ) + { + Object root_3 = (Object)adaptor.nil(); + root_3 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(TMODIFIER, "TMODIFIER") + , root_3); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:136:48: ( term_modifier )? + if ( stream_term_modifier.hasNext() ) { + adaptor.addChild(root_3, stream_term_modifier.nextTree()); + + } + stream_term_modifier.reset(); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:136:63: ^( FIELD field multi_value ) + { + Object root_4 = (Object)adaptor.nil(); + root_4 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(FIELD, "FIELD") + , root_4); + + adaptor.addChild(root_4, stream_field.nextTree()); + + adaptor.addChild(root_4, stream_multi_value.nextTree()); + + adaptor.addChild(root_3, root_4); + } + + adaptor.addChild(root_2, root_3); + } + + adaptor.addChild(root_1, root_2); + } + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + case 2 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:137:4: ( modifier )? ( field )? value ( term_modifier )? + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:137:4: ( modifier )? + int alt15=2; + int LA15_0 = input.LA(1); + + if ( (LA15_0==MINUS||LA15_0==PLUS) ) { + alt15=1; + } + switch (alt15) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:137:4: modifier + { + pushFollow(FOLLOW_modifier_in_atom587); + modifier24=modifier(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_modifier.add(modifier24.getTree()); + + } + break; + + } + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:137:14: ( field )? + int alt16=2; + int LA16_0 = input.LA(1); + + if ( (LA16_0==TERM_NORMAL) ) { + int LA16_1 = input.LA(2); + + if ( (LA16_1==COLON) ) { + alt16=1; + } + } + switch (alt16) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:137:14: field + { + pushFollow(FOLLOW_field_in_atom590); + field25=field(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_field.add(field25.getTree()); + + } + break; + + } + + + pushFollow(FOLLOW_value_in_atom593); + value26=value(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_value.add(value26.getTree()); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:137:27: ( term_modifier )? + int alt17=2; + int LA17_0 = input.LA(1); + + if ( (LA17_0==CARAT||LA17_0==TILDE) ) { + alt17=1; + } + switch (alt17) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:137:27: term_modifier + { + pushFollow(FOLLOW_term_modifier_in_atom595); + term_modifier27=term_modifier(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_term_modifier.add(term_modifier27.getTree()); + + } + break; + + } + + + // AST REWRITE + // elements: value, modifier, field, term_modifier + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 138:3: -> ^( MODIFIER ( modifier )? ^( TMODIFIER ( term_modifier )? ^( FIELD ( field )? value ) ) ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:138:6: ^( MODIFIER ( modifier )? ^( TMODIFIER ( term_modifier )? ^( FIELD ( field )? value ) ) ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(MODIFIER, "MODIFIER") + , root_1); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:138:17: ( modifier )? + if ( stream_modifier.hasNext() ) { + adaptor.addChild(root_1, stream_modifier.nextTree()); + + } + stream_modifier.reset(); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:138:27: ^( TMODIFIER ( term_modifier )? ^( FIELD ( field )? value ) ) + { + Object root_2 = (Object)adaptor.nil(); + root_2 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(TMODIFIER, "TMODIFIER") + , root_2); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:138:39: ( term_modifier )? + if ( stream_term_modifier.hasNext() ) { + adaptor.addChild(root_2, stream_term_modifier.nextTree()); + + } + stream_term_modifier.reset(); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:138:54: ^( FIELD ( field )? value ) + { + Object root_3 = (Object)adaptor.nil(); + root_3 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(FIELD, "FIELD") + , root_3); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:138:62: ( field )? + if ( stream_field.hasNext() ) { + adaptor.addChild(root_3, stream_field.nextTree()); + + } + stream_field.reset(); + + adaptor.addChild(root_3, stream_value.nextTree()); + + adaptor.addChild(root_2, root_3); + } + + adaptor.addChild(root_1, root_2); + } + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + + } + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "atom" + + + public static class field_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "field" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:142:1: field : TERM_NORMAL COLON -> TERM_NORMAL ; + public final StandardLuceneGrammarParser.field_return field() throws RecognitionException { + StandardLuceneGrammarParser.field_return retval = new StandardLuceneGrammarParser.field_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + Token TERM_NORMAL28=null; + Token COLON29=null; + + Object TERM_NORMAL28_tree=null; + Object COLON29_tree=null; + RewriteRuleTokenStream stream_COLON=new RewriteRuleTokenStream(adaptor,"token COLON"); + RewriteRuleTokenStream stream_TERM_NORMAL=new RewriteRuleTokenStream(adaptor,"token TERM_NORMAL"); + + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:143:2: ( TERM_NORMAL COLON -> TERM_NORMAL ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:144:2: TERM_NORMAL COLON + { + TERM_NORMAL28=(Token)match(input,TERM_NORMAL,FOLLOW_TERM_NORMAL_in_field642); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_TERM_NORMAL.add(TERM_NORMAL28); + + + COLON29=(Token)match(input,COLON,FOLLOW_COLON_in_field644); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_COLON.add(COLON29); + + + // AST REWRITE + // elements: TERM_NORMAL + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 144:20: -> TERM_NORMAL + { + adaptor.addChild(root_0, + stream_TERM_NORMAL.nextNode() + ); + + } + + + retval.tree = root_0; + } + + } + + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "field" + + + public static class value_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "value" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:147:1: value : ( range_term_in -> ^( QRANGEIN range_term_in ) | range_term_ex -> ^( QRANGEEX range_term_ex ) | normal -> ^( QNORMAL normal ) | truncated -> ^( QTRUNCATED truncated ) | quoted -> ^( QPHRASE quoted ) | quoted_truncated -> ^( QPHRASETRUNC quoted_truncated ) | QMARK -> ^( QTRUNCATED QMARK ) | STAR COLON b= STAR -> ^( QANYTHING $b) | STAR -> ^( QTRUNCATED STAR ) ); + public final StandardLuceneGrammarParser.value_return value() throws RecognitionException { + StandardLuceneGrammarParser.value_return retval = new StandardLuceneGrammarParser.value_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + Token b=null; + Token QMARK36=null; + Token STAR37=null; + Token COLON38=null; + Token STAR39=null; + StandardLuceneGrammarParser.range_term_in_return range_term_in30 =null; + + StandardLuceneGrammarParser.range_term_ex_return range_term_ex31 =null; + + StandardLuceneGrammarParser.normal_return normal32 =null; + + StandardLuceneGrammarParser.truncated_return truncated33 =null; + + StandardLuceneGrammarParser.quoted_return quoted34 =null; + + StandardLuceneGrammarParser.quoted_truncated_return quoted_truncated35 =null; + + + Object b_tree=null; + Object QMARK36_tree=null; + Object STAR37_tree=null; + Object COLON38_tree=null; + Object STAR39_tree=null; + RewriteRuleTokenStream stream_COLON=new RewriteRuleTokenStream(adaptor,"token COLON"); + RewriteRuleTokenStream stream_STAR=new RewriteRuleTokenStream(adaptor,"token STAR"); + RewriteRuleTokenStream stream_QMARK=new RewriteRuleTokenStream(adaptor,"token QMARK"); + RewriteRuleSubtreeStream stream_range_term_ex=new RewriteRuleSubtreeStream(adaptor,"rule range_term_ex"); + RewriteRuleSubtreeStream stream_normal=new RewriteRuleSubtreeStream(adaptor,"rule normal"); + RewriteRuleSubtreeStream stream_quoted=new RewriteRuleSubtreeStream(adaptor,"rule quoted"); + RewriteRuleSubtreeStream stream_quoted_truncated=new RewriteRuleSubtreeStream(adaptor,"rule quoted_truncated"); + RewriteRuleSubtreeStream stream_truncated=new RewriteRuleSubtreeStream(adaptor,"rule truncated"); + RewriteRuleSubtreeStream stream_range_term_in=new RewriteRuleSubtreeStream(adaptor,"rule range_term_in"); + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:148:2: ( range_term_in -> ^( QRANGEIN range_term_in ) | range_term_ex -> ^( QRANGEEX range_term_ex ) | normal -> ^( QNORMAL normal ) | truncated -> ^( QTRUNCATED truncated ) | quoted -> ^( QPHRASE quoted ) | quoted_truncated -> ^( QPHRASETRUNC quoted_truncated ) | QMARK -> ^( QTRUNCATED QMARK ) | STAR COLON b= STAR -> ^( QANYTHING $b) | STAR -> ^( QTRUNCATED STAR ) ) + int alt19=9; + switch ( input.LA(1) ) { + case LBRACK: + { + alt19=1; + } + break; + case LCURLY: + { + alt19=2; + } + break; + case NUMBER: + case TERM_NORMAL: + { + alt19=3; + } + break; + case TERM_TRUNCATED: + { + alt19=4; + } + break; + case PHRASE: + { + alt19=5; + } + break; + case PHRASE_ANYTHING: + { + alt19=6; + } + break; + case QMARK: + { + alt19=7; + } + break; + case STAR: + { + int LA19_8 = input.LA(2); + + if ( (LA19_8==COLON) ) { + alt19=8; + } + else if ( (LA19_8==EOF||LA19_8==AND||LA19_8==CARAT||(LA19_8 >= LBRACK && LA19_8 <= MINUS)||(LA19_8 >= NOT && LA19_8 <= NUMBER)||(LA19_8 >= OR && LA19_8 <= PLUS)||LA19_8==QMARK||LA19_8==RPAREN||LA19_8==STAR||LA19_8==TERM_NORMAL||(LA19_8 >= TERM_TRUNCATED && LA19_8 <= TILDE)) ) { + alt19=9; + } + else { + if (state.backtracking>0) {state.failed=true; return retval;} + NoViableAltException nvae = + new NoViableAltException("", 19, 8, input); + + throw nvae; + + } + } + break; + default: + if (state.backtracking>0) {state.failed=true; return retval;} + NoViableAltException nvae = + new NoViableAltException("", 19, 0, input); + + throw nvae; + + } + + switch (alt19) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:149:2: range_term_in + { + pushFollow(FOLLOW_range_term_in_in_value663); + range_term_in30=range_term_in(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_range_term_in.add(range_term_in30.getTree()); + + // AST REWRITE + // elements: range_term_in + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 149:16: -> ^( QRANGEIN range_term_in ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:149:19: ^( QRANGEIN range_term_in ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(QRANGEIN, "QRANGEIN") + , root_1); + + adaptor.addChild(root_1, stream_range_term_in.nextTree()); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + case 2 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:150:4: range_term_ex + { + pushFollow(FOLLOW_range_term_ex_in_value676); + range_term_ex31=range_term_ex(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_range_term_ex.add(range_term_ex31.getTree()); + + // AST REWRITE + // elements: range_term_ex + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 150:18: -> ^( QRANGEEX range_term_ex ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:150:21: ^( QRANGEEX range_term_ex ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(QRANGEEX, "QRANGEEX") + , root_1); + + adaptor.addChild(root_1, stream_range_term_ex.nextTree()); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + case 3 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:151:4: normal + { + pushFollow(FOLLOW_normal_in_value690); + normal32=normal(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_normal.add(normal32.getTree()); + + // AST REWRITE + // elements: normal + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 151:11: -> ^( QNORMAL normal ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:151:14: ^( QNORMAL normal ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(QNORMAL, "QNORMAL") + , root_1); + + adaptor.addChild(root_1, stream_normal.nextTree()); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + case 4 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:152:4: truncated + { + pushFollow(FOLLOW_truncated_in_value704); + truncated33=truncated(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_truncated.add(truncated33.getTree()); + + // AST REWRITE + // elements: truncated + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 152:14: -> ^( QTRUNCATED truncated ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:152:17: ^( QTRUNCATED truncated ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(QTRUNCATED, "QTRUNCATED") + , root_1); + + adaptor.addChild(root_1, stream_truncated.nextTree()); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + case 5 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:153:4: quoted + { + pushFollow(FOLLOW_quoted_in_value718); + quoted34=quoted(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_quoted.add(quoted34.getTree()); + + // AST REWRITE + // elements: quoted + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 153:11: -> ^( QPHRASE quoted ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:153:14: ^( QPHRASE quoted ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(QPHRASE, "QPHRASE") + , root_1); + + adaptor.addChild(root_1, stream_quoted.nextTree()); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + case 6 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:154:4: quoted_truncated + { + pushFollow(FOLLOW_quoted_truncated_in_value731); + quoted_truncated35=quoted_truncated(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_quoted_truncated.add(quoted_truncated35.getTree()); + + // AST REWRITE + // elements: quoted_truncated + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 154:21: -> ^( QPHRASETRUNC quoted_truncated ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:154:24: ^( QPHRASETRUNC quoted_truncated ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(QPHRASETRUNC, "QPHRASETRUNC") + , root_1); + + adaptor.addChild(root_1, stream_quoted_truncated.nextTree()); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + case 7 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:155:4: QMARK + { + QMARK36=(Token)match(input,QMARK,FOLLOW_QMARK_in_value744); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_QMARK.add(QMARK36); + + + // AST REWRITE + // elements: QMARK + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 155:10: -> ^( QTRUNCATED QMARK ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:155:13: ^( QTRUNCATED QMARK ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(QTRUNCATED, "QTRUNCATED") + , root_1); + + adaptor.addChild(root_1, + stream_QMARK.nextNode() + ); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + case 8 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:156:4: STAR COLON b= STAR + { + STAR37=(Token)match(input,STAR,FOLLOW_STAR_in_value757); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_STAR.add(STAR37); + + + COLON38=(Token)match(input,COLON,FOLLOW_COLON_in_value759); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_COLON.add(COLON38); + + + b=(Token)match(input,STAR,FOLLOW_STAR_in_value763); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_STAR.add(b); + + + // AST REWRITE + // elements: b + // token labels: b + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleTokenStream stream_b=new RewriteRuleTokenStream(adaptor,"token b",b); + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 156:22: -> ^( QANYTHING $b) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:156:25: ^( QANYTHING $b) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(QANYTHING, "QANYTHING") + , root_1); + + adaptor.addChild(root_1, stream_b.nextNode()); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + case 9 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:157:5: STAR + { + STAR39=(Token)match(input,STAR,FOLLOW_STAR_in_value778); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_STAR.add(STAR39); + + + // AST REWRITE + // elements: STAR + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 157:10: -> ^( QTRUNCATED STAR ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:157:13: ^( QTRUNCATED STAR ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(QTRUNCATED, "QTRUNCATED") + , root_1); + + adaptor.addChild(root_1, + stream_STAR.nextNode() + ); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + + } + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "value" + + + public static class range_term_in_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "range_term_in" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:162:1: range_term_in : LBRACK (a= range_value -> range_value ^( QANYTHING QANYTHING[\"*\"] ) ) ( ( TO )? b= range_value -> $a ( $b)? )? RBRACK ; + public final StandardLuceneGrammarParser.range_term_in_return range_term_in() throws RecognitionException { + StandardLuceneGrammarParser.range_term_in_return retval = new StandardLuceneGrammarParser.range_term_in_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + Token LBRACK40=null; + Token TO41=null; + Token RBRACK42=null; + StandardLuceneGrammarParser.range_value_return a =null; + + StandardLuceneGrammarParser.range_value_return b =null; + + + Object LBRACK40_tree=null; + Object TO41_tree=null; + Object RBRACK42_tree=null; + RewriteRuleTokenStream stream_RBRACK=new RewriteRuleTokenStream(adaptor,"token RBRACK"); + RewriteRuleTokenStream stream_LBRACK=new RewriteRuleTokenStream(adaptor,"token LBRACK"); + RewriteRuleTokenStream stream_TO=new RewriteRuleTokenStream(adaptor,"token TO"); + RewriteRuleSubtreeStream stream_range_value=new RewriteRuleSubtreeStream(adaptor,"rule range_value"); + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:163:2: ( LBRACK (a= range_value -> range_value ^( QANYTHING QANYTHING[\"*\"] ) ) ( ( TO )? b= range_value -> $a ( $b)? )? RBRACK ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:164:8: LBRACK (a= range_value -> range_value ^( QANYTHING QANYTHING[\"*\"] ) ) ( ( TO )? b= range_value -> $a ( $b)? )? RBRACK + { + LBRACK40=(Token)match(input,LBRACK,FOLLOW_LBRACK_in_range_term_in809); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_LBRACK.add(LBRACK40); + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:165:8: (a= range_value -> range_value ^( QANYTHING QANYTHING[\"*\"] ) ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:165:9: a= range_value + { + pushFollow(FOLLOW_range_value_in_range_term_in821); + a=range_value(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_range_value.add(a.getTree()); + + // AST REWRITE + // elements: range_value + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 165:23: -> range_value ^( QANYTHING QANYTHING[\"*\"] ) + { + adaptor.addChild(root_0, stream_range_value.nextTree()); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:165:38: ^( QANYTHING QANYTHING[\"*\"] ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(QANYTHING, "QANYTHING") + , root_1); + + adaptor.addChild(root_1, + (Object)adaptor.create(QANYTHING, "*") + ); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:166:8: ( ( TO )? b= range_value -> $a ( $b)? )? + int alt21=2; + int LA21_0 = input.LA(1); + + if ( (LA21_0==DATE_TOKEN||LA21_0==NUMBER||(LA21_0 >= PHRASE && LA21_0 <= PHRASE_ANYTHING)||LA21_0==STAR||LA21_0==TERM_NORMAL||LA21_0==TERM_TRUNCATED||LA21_0==TO) ) { + alt21=1; + } + switch (alt21) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:166:10: ( TO )? b= range_value + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:166:10: ( TO )? + int alt20=2; + int LA20_0 = input.LA(1); + + if ( (LA20_0==TO) ) { + alt20=1; + } + switch (alt20) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:166:10: TO + { + TO41=(Token)match(input,TO,FOLLOW_TO_in_range_term_in844); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_TO.add(TO41); + + + } + break; + + } + + + pushFollow(FOLLOW_range_value_in_range_term_in849); + b=range_value(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_range_value.add(b.getTree()); + + // AST REWRITE + // elements: a, b + // token labels: + // rule labels: retval, b, a + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + RewriteRuleSubtreeStream stream_b=new RewriteRuleSubtreeStream(adaptor,"rule b",b!=null?b.tree:null); + RewriteRuleSubtreeStream stream_a=new RewriteRuleSubtreeStream(adaptor,"rule a",a!=null?a.tree:null); + + root_0 = (Object)adaptor.nil(); + // 166:28: -> $a ( $b)? + { + adaptor.addChild(root_0, stream_a.nextTree()); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:166:35: ( $b)? + if ( stream_b.hasNext() ) { + adaptor.addChild(root_0, stream_b.nextTree()); + + } + stream_b.reset(); + + } + + + retval.tree = root_0; + } + + } + break; + + } + + + RBRACK42=(Token)match(input,RBRACK,FOLLOW_RBRACK_in_range_term_in870); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_RBRACK.add(RBRACK42); + + + } + + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "range_term_in" + + + public static class range_term_ex_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "range_term_ex" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:171:1: range_term_ex : LCURLY (a= range_value -> range_value ^( QANYTHING QANYTHING[\"*\"] ) ) ( ( TO )? b= range_value -> $a ( $b)? )? RCURLY ; + public final StandardLuceneGrammarParser.range_term_ex_return range_term_ex() throws RecognitionException { + StandardLuceneGrammarParser.range_term_ex_return retval = new StandardLuceneGrammarParser.range_term_ex_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + Token LCURLY43=null; + Token TO44=null; + Token RCURLY45=null; + StandardLuceneGrammarParser.range_value_return a =null; + + StandardLuceneGrammarParser.range_value_return b =null; + + + Object LCURLY43_tree=null; + Object TO44_tree=null; + Object RCURLY45_tree=null; + RewriteRuleTokenStream stream_LCURLY=new RewriteRuleTokenStream(adaptor,"token LCURLY"); + RewriteRuleTokenStream stream_TO=new RewriteRuleTokenStream(adaptor,"token TO"); + RewriteRuleTokenStream stream_RCURLY=new RewriteRuleTokenStream(adaptor,"token RCURLY"); + RewriteRuleSubtreeStream stream_range_value=new RewriteRuleSubtreeStream(adaptor,"rule range_value"); + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:172:2: ( LCURLY (a= range_value -> range_value ^( QANYTHING QANYTHING[\"*\"] ) ) ( ( TO )? b= range_value -> $a ( $b)? )? RCURLY ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:173:8: LCURLY (a= range_value -> range_value ^( QANYTHING QANYTHING[\"*\"] ) ) ( ( TO )? b= range_value -> $a ( $b)? )? RCURLY + { + LCURLY43=(Token)match(input,LCURLY,FOLLOW_LCURLY_in_range_term_ex890); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_LCURLY.add(LCURLY43); + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:174:8: (a= range_value -> range_value ^( QANYTHING QANYTHING[\"*\"] ) ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:174:10: a= range_value + { + pushFollow(FOLLOW_range_value_in_range_term_ex903); + a=range_value(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_range_value.add(a.getTree()); + + // AST REWRITE + // elements: range_value + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 174:24: -> range_value ^( QANYTHING QANYTHING[\"*\"] ) + { + adaptor.addChild(root_0, stream_range_value.nextTree()); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:174:39: ^( QANYTHING QANYTHING[\"*\"] ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(QANYTHING, "QANYTHING") + , root_1); + + adaptor.addChild(root_1, + (Object)adaptor.create(QANYTHING, "*") + ); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:175:8: ( ( TO )? b= range_value -> $a ( $b)? )? + int alt23=2; + int LA23_0 = input.LA(1); + + if ( (LA23_0==DATE_TOKEN||LA23_0==NUMBER||(LA23_0 >= PHRASE && LA23_0 <= PHRASE_ANYTHING)||LA23_0==STAR||LA23_0==TERM_NORMAL||LA23_0==TERM_TRUNCATED||LA23_0==TO) ) { + alt23=1; + } + switch (alt23) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:175:10: ( TO )? b= range_value + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:175:10: ( TO )? + int alt22=2; + int LA22_0 = input.LA(1); + + if ( (LA22_0==TO) ) { + alt22=1; + } + switch (alt22) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:175:10: TO + { + TO44=(Token)match(input,TO,FOLLOW_TO_in_range_term_ex926); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_TO.add(TO44); + + + } + break; + + } + + + pushFollow(FOLLOW_range_value_in_range_term_ex931); + b=range_value(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_range_value.add(b.getTree()); + + // AST REWRITE + // elements: b, a + // token labels: + // rule labels: retval, b, a + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + RewriteRuleSubtreeStream stream_b=new RewriteRuleSubtreeStream(adaptor,"rule b",b!=null?b.tree:null); + RewriteRuleSubtreeStream stream_a=new RewriteRuleSubtreeStream(adaptor,"rule a",a!=null?a.tree:null); + + root_0 = (Object)adaptor.nil(); + // 175:28: -> $a ( $b)? + { + adaptor.addChild(root_0, stream_a.nextTree()); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:175:35: ( $b)? + if ( stream_b.hasNext() ) { + adaptor.addChild(root_0, stream_b.nextTree()); + + } + stream_b.reset(); + + } + + + retval.tree = root_0; + } + + } + break; + + } + + + RCURLY45=(Token)match(input,RCURLY,FOLLOW_RCURLY_in_range_term_ex952); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_RCURLY.add(RCURLY45); + + + } + + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "range_term_ex" + + + public static class range_value_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "range_value" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:179:1: range_value : ( truncated -> ^( QTRUNCATED truncated ) | quoted -> ^( QPHRASE quoted ) | quoted_truncated -> ^( QPHRASETRUNC quoted_truncated ) | date -> ^( QNORMAL date ) | normal -> ^( QNORMAL normal ) | STAR -> ^( QANYTHING STAR ) ); + public final StandardLuceneGrammarParser.range_value_return range_value() throws RecognitionException { + StandardLuceneGrammarParser.range_value_return retval = new StandardLuceneGrammarParser.range_value_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + Token STAR51=null; + StandardLuceneGrammarParser.truncated_return truncated46 =null; + + StandardLuceneGrammarParser.quoted_return quoted47 =null; + + StandardLuceneGrammarParser.quoted_truncated_return quoted_truncated48 =null; + + StandardLuceneGrammarParser.date_return date49 =null; + + StandardLuceneGrammarParser.normal_return normal50 =null; + + + Object STAR51_tree=null; + RewriteRuleTokenStream stream_STAR=new RewriteRuleTokenStream(adaptor,"token STAR"); + RewriteRuleSubtreeStream stream_normal=new RewriteRuleSubtreeStream(adaptor,"rule normal"); + RewriteRuleSubtreeStream stream_quoted=new RewriteRuleSubtreeStream(adaptor,"rule quoted"); + RewriteRuleSubtreeStream stream_quoted_truncated=new RewriteRuleSubtreeStream(adaptor,"rule quoted_truncated"); + RewriteRuleSubtreeStream stream_truncated=new RewriteRuleSubtreeStream(adaptor,"rule truncated"); + RewriteRuleSubtreeStream stream_date=new RewriteRuleSubtreeStream(adaptor,"rule date"); + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:180:2: ( truncated -> ^( QTRUNCATED truncated ) | quoted -> ^( QPHRASE quoted ) | quoted_truncated -> ^( QPHRASETRUNC quoted_truncated ) | date -> ^( QNORMAL date ) | normal -> ^( QNORMAL normal ) | STAR -> ^( QANYTHING STAR ) ) + int alt24=6; + switch ( input.LA(1) ) { + case TERM_TRUNCATED: + { + alt24=1; + } + break; + case PHRASE: + { + alt24=2; + } + break; + case PHRASE_ANYTHING: + { + alt24=3; + } + break; + case DATE_TOKEN: + { + alt24=4; + } + break; + case NUMBER: + case TERM_NORMAL: + { + alt24=5; + } + break; + case STAR: + { + alt24=6; + } + break; + default: + if (state.backtracking>0) {state.failed=true; return retval;} + NoViableAltException nvae = + new NoViableAltException("", 24, 0, input); + + throw nvae; + + } + + switch (alt24) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:181:2: truncated + { + pushFollow(FOLLOW_truncated_in_range_value966); + truncated46=truncated(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_truncated.add(truncated46.getTree()); + + // AST REWRITE + // elements: truncated + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 181:12: -> ^( QTRUNCATED truncated ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:181:15: ^( QTRUNCATED truncated ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(QTRUNCATED, "QTRUNCATED") + , root_1); + + adaptor.addChild(root_1, stream_truncated.nextTree()); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + case 2 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:182:4: quoted + { + pushFollow(FOLLOW_quoted_in_range_value979); + quoted47=quoted(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_quoted.add(quoted47.getTree()); + + // AST REWRITE + // elements: quoted + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 182:11: -> ^( QPHRASE quoted ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:182:14: ^( QPHRASE quoted ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(QPHRASE, "QPHRASE") + , root_1); + + adaptor.addChild(root_1, stream_quoted.nextTree()); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + case 3 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:183:4: quoted_truncated + { + pushFollow(FOLLOW_quoted_truncated_in_range_value992); + quoted_truncated48=quoted_truncated(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_quoted_truncated.add(quoted_truncated48.getTree()); + + // AST REWRITE + // elements: quoted_truncated + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 183:21: -> ^( QPHRASETRUNC quoted_truncated ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:183:24: ^( QPHRASETRUNC quoted_truncated ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(QPHRASETRUNC, "QPHRASETRUNC") + , root_1); + + adaptor.addChild(root_1, stream_quoted_truncated.nextTree()); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + case 4 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:184:4: date + { + pushFollow(FOLLOW_date_in_range_value1005); + date49=date(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_date.add(date49.getTree()); + + // AST REWRITE + // elements: date + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 184:9: -> ^( QNORMAL date ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:184:12: ^( QNORMAL date ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(QNORMAL, "QNORMAL") + , root_1); + + adaptor.addChild(root_1, stream_date.nextTree()); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + case 5 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:185:4: normal + { + pushFollow(FOLLOW_normal_in_range_value1018); + normal50=normal(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_normal.add(normal50.getTree()); + + // AST REWRITE + // elements: normal + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 185:11: -> ^( QNORMAL normal ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:185:14: ^( QNORMAL normal ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(QNORMAL, "QNORMAL") + , root_1); + + adaptor.addChild(root_1, stream_normal.nextTree()); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + case 6 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:186:4: STAR + { + STAR51=(Token)match(input,STAR,FOLLOW_STAR_in_range_value1032); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_STAR.add(STAR51); + + + // AST REWRITE + // elements: STAR + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 186:9: -> ^( QANYTHING STAR ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:186:12: ^( QANYTHING STAR ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(QANYTHING, "QANYTHING") + , root_1); + + adaptor.addChild(root_1, + stream_STAR.nextNode() + ); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + + } + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "range_value" + + + public static class multi_value_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "multi_value" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:189:1: multi_value : LPAREN multiClause RPAREN -> multiClause ; + public final StandardLuceneGrammarParser.multi_value_return multi_value() throws RecognitionException { + StandardLuceneGrammarParser.multi_value_return retval = new StandardLuceneGrammarParser.multi_value_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + Token LPAREN52=null; + Token RPAREN54=null; + StandardLuceneGrammarParser.multiClause_return multiClause53 =null; + + + Object LPAREN52_tree=null; + Object RPAREN54_tree=null; + RewriteRuleTokenStream stream_RPAREN=new RewriteRuleTokenStream(adaptor,"token RPAREN"); + RewriteRuleTokenStream stream_LPAREN=new RewriteRuleTokenStream(adaptor,"token LPAREN"); + RewriteRuleSubtreeStream stream_multiClause=new RewriteRuleSubtreeStream(adaptor,"rule multiClause"); + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:190:2: ( LPAREN multiClause RPAREN -> multiClause ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:191:2: LPAREN multiClause RPAREN + { + LPAREN52=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_multi_value1053); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_LPAREN.add(LPAREN52); + + + pushFollow(FOLLOW_multiClause_in_multi_value1055); + multiClause53=multiClause(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_multiClause.add(multiClause53.getTree()); + + RPAREN54=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_multi_value1057); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_RPAREN.add(RPAREN54); + + + // AST REWRITE + // elements: multiClause + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 191:28: -> multiClause + { + adaptor.addChild(root_0, stream_multiClause.nextTree()); + + } + + + retval.tree = root_0; + } + + } + + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "multi_value" + + + public static class multiClause_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "multiClause" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:196:1: multiClause : ( clauseOr )+ -> ^( OPERATOR[\"DEFOP\"] ( clauseOr )+ ) ; + public final StandardLuceneGrammarParser.multiClause_return multiClause() throws RecognitionException { + StandardLuceneGrammarParser.multiClause_return retval = new StandardLuceneGrammarParser.multiClause_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + StandardLuceneGrammarParser.clauseOr_return clauseOr55 =null; + + + RewriteRuleSubtreeStream stream_clauseOr=new RewriteRuleSubtreeStream(adaptor,"rule clauseOr"); + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:197:2: ( ( clauseOr )+ -> ^( OPERATOR[\"DEFOP\"] ( clauseOr )+ ) ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:202:2: ( clauseOr )+ + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:202:2: ( clauseOr )+ + int cnt25=0; + loop25: + do { + int alt25=2; + int LA25_0 = input.LA(1); + + if ( ((LA25_0 >= LBRACK && LA25_0 <= MINUS)||LA25_0==NUMBER||(LA25_0 >= PHRASE && LA25_0 <= PLUS)||LA25_0==QMARK||LA25_0==STAR||LA25_0==TERM_NORMAL||LA25_0==TERM_TRUNCATED) ) { + alt25=1; + } + + + switch (alt25) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:202:2: clauseOr + { + pushFollow(FOLLOW_clauseOr_in_multiClause1084); + clauseOr55=clauseOr(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_clauseOr.add(clauseOr55.getTree()); + + } + break; + + default : + if ( cnt25 >= 1 ) break loop25; + if (state.backtracking>0) {state.failed=true; return retval;} + EarlyExitException eee = + new EarlyExitException(25, input); + throw eee; + } + cnt25++; + } while (true); + + + // AST REWRITE + // elements: clauseOr + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 202:12: -> ^( OPERATOR[\"DEFOP\"] ( clauseOr )+ ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:202:15: ^( OPERATOR[\"DEFOP\"] ( clauseOr )+ ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(OPERATOR, "DEFOP") + , root_1); + + if ( !(stream_clauseOr.hasNext()) ) { + throw new RewriteEarlyExitException(); + } + while ( stream_clauseOr.hasNext() ) { + adaptor.addChild(root_1, stream_clauseOr.nextTree()); + + } + stream_clauseOr.reset(); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "multiClause" + + + public static class multiDefault_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "multiDefault" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:215:1: multiDefault : ( multiOr )+ -> ^( OPERATOR[\"DEFOP\"] ( multiOr )+ ) ; + public final StandardLuceneGrammarParser.multiDefault_return multiDefault() throws RecognitionException { + StandardLuceneGrammarParser.multiDefault_return retval = new StandardLuceneGrammarParser.multiDefault_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + StandardLuceneGrammarParser.multiOr_return multiOr56 =null; + + + RewriteRuleSubtreeStream stream_multiOr=new RewriteRuleSubtreeStream(adaptor,"rule multiOr"); + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:216:2: ( ( multiOr )+ -> ^( OPERATOR[\"DEFOP\"] ( multiOr )+ ) ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:217:2: ( multiOr )+ + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:217:2: ( multiOr )+ + int cnt26=0; + loop26: + do { + int alt26=2; + int LA26_0 = input.LA(1); + + if ( ((LA26_0 >= LBRACK && LA26_0 <= LCURLY)||LA26_0==MINUS||LA26_0==NUMBER||(LA26_0 >= PHRASE && LA26_0 <= PLUS)||LA26_0==QMARK||LA26_0==STAR||LA26_0==TERM_NORMAL||LA26_0==TERM_TRUNCATED) ) { + alt26=1; + } + + + switch (alt26) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:217:2: multiOr + { + pushFollow(FOLLOW_multiOr_in_multiDefault1128); + multiOr56=multiOr(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_multiOr.add(multiOr56.getTree()); + + } + break; + + default : + if ( cnt26 >= 1 ) break loop26; + if (state.backtracking>0) {state.failed=true; return retval;} + EarlyExitException eee = + new EarlyExitException(26, input); + throw eee; + } + cnt26++; + } while (true); + + + // AST REWRITE + // elements: multiOr + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 217:11: -> ^( OPERATOR[\"DEFOP\"] ( multiOr )+ ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:217:14: ^( OPERATOR[\"DEFOP\"] ( multiOr )+ ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(OPERATOR, "DEFOP") + , root_1); + + if ( !(stream_multiOr.hasNext()) ) { + throw new RewriteEarlyExitException(); + } + while ( stream_multiOr.hasNext() ) { + adaptor.addChild(root_1, stream_multiOr.nextTree()); + + } + stream_multiOr.reset(); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "multiDefault" + + + public static class multiOr_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "multiOr" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:220:1: multiOr : (first= multiAnd -> $first) ( or others= multiAnd -> ^( OPERATOR[\"OR\"] ( multiAnd )+ ) )* ; + public final StandardLuceneGrammarParser.multiOr_return multiOr() throws RecognitionException { + StandardLuceneGrammarParser.multiOr_return retval = new StandardLuceneGrammarParser.multiOr_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + StandardLuceneGrammarParser.multiAnd_return first =null; + + StandardLuceneGrammarParser.multiAnd_return others =null; + + StandardLuceneGrammarParser.or_return or57 =null; + + + RewriteRuleSubtreeStream stream_multiAnd=new RewriteRuleSubtreeStream(adaptor,"rule multiAnd"); + RewriteRuleSubtreeStream stream_or=new RewriteRuleSubtreeStream(adaptor,"rule or"); + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:221:2: ( (first= multiAnd -> $first) ( or others= multiAnd -> ^( OPERATOR[\"OR\"] ( multiAnd )+ ) )* ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:222:2: (first= multiAnd -> $first) ( or others= multiAnd -> ^( OPERATOR[\"OR\"] ( multiAnd )+ ) )* + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:222:2: (first= multiAnd -> $first) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:222:3: first= multiAnd + { + pushFollow(FOLLOW_multiAnd_in_multiOr1156); + first=multiAnd(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_multiAnd.add(first.getTree()); + + // AST REWRITE + // elements: first + // token labels: + // rule labels: retval, first + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + RewriteRuleSubtreeStream stream_first=new RewriteRuleSubtreeStream(adaptor,"rule first",first!=null?first.tree:null); + + root_0 = (Object)adaptor.nil(); + // 222:19: -> $first + { + adaptor.addChild(root_0, stream_first.nextTree()); + + } + + + retval.tree = root_0; + } + + } + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:222:30: ( or others= multiAnd -> ^( OPERATOR[\"OR\"] ( multiAnd )+ ) )* + loop27: + do { + int alt27=2; + int LA27_0 = input.LA(1); + + if ( (LA27_0==OR) ) { + alt27=1; + } + + + switch (alt27) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:222:31: or others= multiAnd + { + pushFollow(FOLLOW_or_in_multiOr1166); + or57=or(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_or.add(or57.getTree()); + + pushFollow(FOLLOW_multiAnd_in_multiOr1170); + others=multiAnd(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_multiAnd.add(others.getTree()); + + // AST REWRITE + // elements: multiAnd + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 222:49: -> ^( OPERATOR[\"OR\"] ( multiAnd )+ ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:222:52: ^( OPERATOR[\"OR\"] ( multiAnd )+ ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(OPERATOR, "OR") + , root_1); + + if ( !(stream_multiAnd.hasNext()) ) { + throw new RewriteEarlyExitException(); + } + while ( stream_multiAnd.hasNext() ) { + adaptor.addChild(root_1, stream_multiAnd.nextTree()); + + } + stream_multiAnd.reset(); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + + default : + break loop27; + } + } while (true); + + + } + + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "multiOr" + + + public static class multiAnd_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "multiAnd" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:225:1: multiAnd : (first= multiNot -> $first) ( and others= multiNot -> ^( OPERATOR[\"AND\"] ( multiNot )+ ) )* ; + public final StandardLuceneGrammarParser.multiAnd_return multiAnd() throws RecognitionException { + StandardLuceneGrammarParser.multiAnd_return retval = new StandardLuceneGrammarParser.multiAnd_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + StandardLuceneGrammarParser.multiNot_return first =null; + + StandardLuceneGrammarParser.multiNot_return others =null; + + StandardLuceneGrammarParser.and_return and58 =null; + + + RewriteRuleSubtreeStream stream_multiNot=new RewriteRuleSubtreeStream(adaptor,"rule multiNot"); + RewriteRuleSubtreeStream stream_and=new RewriteRuleSubtreeStream(adaptor,"rule and"); + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:226:2: ( (first= multiNot -> $first) ( and others= multiNot -> ^( OPERATOR[\"AND\"] ( multiNot )+ ) )* ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:227:2: (first= multiNot -> $first) ( and others= multiNot -> ^( OPERATOR[\"AND\"] ( multiNot )+ ) )* + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:227:2: (first= multiNot -> $first) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:227:3: first= multiNot + { + pushFollow(FOLLOW_multiNot_in_multiAnd1201); + first=multiNot(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_multiNot.add(first.getTree()); + + // AST REWRITE + // elements: first + // token labels: + // rule labels: retval, first + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + RewriteRuleSubtreeStream stream_first=new RewriteRuleSubtreeStream(adaptor,"rule first",first!=null?first.tree:null); + + root_0 = (Object)adaptor.nil(); + // 227:19: -> $first + { + adaptor.addChild(root_0, stream_first.nextTree()); + + } + + + retval.tree = root_0; + } + + } + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:227:30: ( and others= multiNot -> ^( OPERATOR[\"AND\"] ( multiNot )+ ) )* + loop28: + do { + int alt28=2; + int LA28_0 = input.LA(1); + + if ( (LA28_0==AND) ) { + alt28=1; + } + + + switch (alt28) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:227:31: and others= multiNot + { + pushFollow(FOLLOW_and_in_multiAnd1211); + and58=and(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_and.add(and58.getTree()); + + pushFollow(FOLLOW_multiNot_in_multiAnd1215); + others=multiNot(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_multiNot.add(others.getTree()); + + // AST REWRITE + // elements: multiNot + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 227:51: -> ^( OPERATOR[\"AND\"] ( multiNot )+ ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:227:54: ^( OPERATOR[\"AND\"] ( multiNot )+ ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(OPERATOR, "AND") + , root_1); + + if ( !(stream_multiNot.hasNext()) ) { + throw new RewriteEarlyExitException(); + } + while ( stream_multiNot.hasNext() ) { + adaptor.addChild(root_1, stream_multiNot.nextTree()); + + } + stream_multiNot.reset(); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + + default : + break loop28; + } + } while (true); + + + } + + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "multiAnd" + + + public static class multiNot_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "multiNot" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:230:1: multiNot : (first= multiBasic -> $first) ( not others= multiBasic -> ^( not ( multiBasic )+ ) )* ; + public final StandardLuceneGrammarParser.multiNot_return multiNot() throws RecognitionException { + StandardLuceneGrammarParser.multiNot_return retval = new StandardLuceneGrammarParser.multiNot_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + StandardLuceneGrammarParser.multiBasic_return first =null; + + StandardLuceneGrammarParser.multiBasic_return others =null; + + StandardLuceneGrammarParser.not_return not59 =null; + + + RewriteRuleSubtreeStream stream_not=new RewriteRuleSubtreeStream(adaptor,"rule not"); + RewriteRuleSubtreeStream stream_multiBasic=new RewriteRuleSubtreeStream(adaptor,"rule multiBasic"); + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:231:2: ( (first= multiBasic -> $first) ( not others= multiBasic -> ^( not ( multiBasic )+ ) )* ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:232:2: (first= multiBasic -> $first) ( not others= multiBasic -> ^( not ( multiBasic )+ ) )* + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:232:2: (first= multiBasic -> $first) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:232:3: first= multiBasic + { + pushFollow(FOLLOW_multiBasic_in_multiNot1246); + first=multiBasic(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_multiBasic.add(first.getTree()); + + // AST REWRITE + // elements: first + // token labels: + // rule labels: retval, first + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + RewriteRuleSubtreeStream stream_first=new RewriteRuleSubtreeStream(adaptor,"rule first",first!=null?first.tree:null); + + root_0 = (Object)adaptor.nil(); + // 232:21: -> $first + { + adaptor.addChild(root_0, stream_first.nextTree()); + + } + + + retval.tree = root_0; + } + + } + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:232:32: ( not others= multiBasic -> ^( not ( multiBasic )+ ) )* + loop29: + do { + int alt29=2; + int LA29_0 = input.LA(1); + + if ( (LA29_0==AND) ) { + int LA29_1 = input.LA(2); + + if ( (LA29_1==NOT) ) { + alt29=1; + } + + + } + else if ( (LA29_0==NOT) ) { + alt29=1; + } + + + switch (alt29) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:232:33: not others= multiBasic + { + pushFollow(FOLLOW_not_in_multiNot1256); + not59=not(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_not.add(not59.getTree()); + + pushFollow(FOLLOW_multiBasic_in_multiNot1260); + others=multiBasic(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_multiBasic.add(others.getTree()); + + // AST REWRITE + // elements: not, multiBasic + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 232:54: -> ^( not ( multiBasic )+ ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:232:57: ^( not ( multiBasic )+ ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot(stream_not.nextNode(), root_1); + + if ( !(stream_multiBasic.hasNext()) ) { + throw new RewriteEarlyExitException(); + } + while ( stream_multiBasic.hasNext() ) { + adaptor.addChild(root_1, stream_multiBasic.nextTree()); + + } + stream_multiBasic.reset(); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + + default : + break loop29; + } + } while (true); + + + } + + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "multiNot" + + + public static class multiBasic_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "multiBasic" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:237:1: multiBasic : mterm ; + public final StandardLuceneGrammarParser.multiBasic_return multiBasic() throws RecognitionException { + StandardLuceneGrammarParser.multiBasic_return retval = new StandardLuceneGrammarParser.multiBasic_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + StandardLuceneGrammarParser.mterm_return mterm60 =null; + + + + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:238:2: ( mterm ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:239:2: mterm + { + root_0 = (Object)adaptor.nil(); + + + pushFollow(FOLLOW_mterm_in_multiBasic1287); + mterm60=mterm(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) adaptor.addChild(root_0, mterm60.getTree()); + + } + + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "multiBasic" + + + public static class mterm_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "mterm" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:242:1: mterm : ( modifier )? value -> ^( MODIFIER ( modifier )? value ) ; + public final StandardLuceneGrammarParser.mterm_return mterm() throws RecognitionException { + StandardLuceneGrammarParser.mterm_return retval = new StandardLuceneGrammarParser.mterm_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + StandardLuceneGrammarParser.modifier_return modifier61 =null; + + StandardLuceneGrammarParser.value_return value62 =null; + + + RewriteRuleSubtreeStream stream_modifier=new RewriteRuleSubtreeStream(adaptor,"rule modifier"); + RewriteRuleSubtreeStream stream_value=new RewriteRuleSubtreeStream(adaptor,"rule value"); + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:243:2: ( ( modifier )? value -> ^( MODIFIER ( modifier )? value ) ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:244:2: ( modifier )? value + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:244:2: ( modifier )? + int alt30=2; + int LA30_0 = input.LA(1); + + if ( (LA30_0==MINUS||LA30_0==PLUS) ) { + alt30=1; + } + switch (alt30) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:244:2: modifier + { + pushFollow(FOLLOW_modifier_in_mterm1303); + modifier61=modifier(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_modifier.add(modifier61.getTree()); + + } + break; + + } + + + pushFollow(FOLLOW_value_in_mterm1306); + value62=value(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_value.add(value62.getTree()); + + // AST REWRITE + // elements: modifier, value + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 244:18: -> ^( MODIFIER ( modifier )? value ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:244:21: ^( MODIFIER ( modifier )? value ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(MODIFIER, "MODIFIER") + , root_1); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:244:32: ( modifier )? + if ( stream_modifier.hasNext() ) { + adaptor.addChild(root_1, stream_modifier.nextTree()); + + } + stream_modifier.reset(); + + adaptor.addChild(root_1, stream_value.nextTree()); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "mterm" + + + public static class normal_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "normal" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:248:1: normal : ( TERM_NORMAL | NUMBER ); + public final StandardLuceneGrammarParser.normal_return normal() throws RecognitionException { + StandardLuceneGrammarParser.normal_return retval = new StandardLuceneGrammarParser.normal_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + Token set63=null; + + Object set63_tree=null; + + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:249:2: ( TERM_NORMAL | NUMBER ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g: + { + root_0 = (Object)adaptor.nil(); + + + set63=(Token)input.LT(1); + + if ( input.LA(1)==NUMBER||input.LA(1)==TERM_NORMAL ) { + input.consume(); + if ( state.backtracking==0 ) adaptor.addChild(root_0, + (Object)adaptor.create(set63) + ); + state.errorRecovery=false; + state.failed=false; + } + else { + if (state.backtracking>0) {state.failed=true; return retval;} + MismatchedSetException mse = new MismatchedSetException(null,input); + throw mse; + } + + + } + + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "normal" + + + public static class truncated_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "truncated" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:257:1: truncated : TERM_TRUNCATED ; + public final StandardLuceneGrammarParser.truncated_return truncated() throws RecognitionException { + StandardLuceneGrammarParser.truncated_return retval = new StandardLuceneGrammarParser.truncated_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + Token TERM_TRUNCATED64=null; + + Object TERM_TRUNCATED64_tree=null; + + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:258:2: ( TERM_TRUNCATED ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:259:2: TERM_TRUNCATED + { + root_0 = (Object)adaptor.nil(); + + + TERM_TRUNCATED64=(Token)match(input,TERM_TRUNCATED,FOLLOW_TERM_TRUNCATED_in_truncated1359); if (state.failed) return retval; + if ( state.backtracking==0 ) { + TERM_TRUNCATED64_tree = + (Object)adaptor.create(TERM_TRUNCATED64) + ; + adaptor.addChild(root_0, TERM_TRUNCATED64_tree); + } + + } + + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "truncated" + + + public static class quoted_truncated_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "quoted_truncated" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:263:1: quoted_truncated : PHRASE_ANYTHING ; + public final StandardLuceneGrammarParser.quoted_truncated_return quoted_truncated() throws RecognitionException { + StandardLuceneGrammarParser.quoted_truncated_return retval = new StandardLuceneGrammarParser.quoted_truncated_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + Token PHRASE_ANYTHING65=null; + + Object PHRASE_ANYTHING65_tree=null; + + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:264:2: ( PHRASE_ANYTHING ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:265:2: PHRASE_ANYTHING + { + root_0 = (Object)adaptor.nil(); + + + PHRASE_ANYTHING65=(Token)match(input,PHRASE_ANYTHING,FOLLOW_PHRASE_ANYTHING_in_quoted_truncated1374); if (state.failed) return retval; + if ( state.backtracking==0 ) { + PHRASE_ANYTHING65_tree = + (Object)adaptor.create(PHRASE_ANYTHING65) + ; + adaptor.addChild(root_0, PHRASE_ANYTHING65_tree); + } + + } + + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "quoted_truncated" + + + public static class quoted_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "quoted" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:268:1: quoted : PHRASE ; + public final StandardLuceneGrammarParser.quoted_return quoted() throws RecognitionException { + StandardLuceneGrammarParser.quoted_return retval = new StandardLuceneGrammarParser.quoted_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + Token PHRASE66=null; + + Object PHRASE66_tree=null; + + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:268:8: ( PHRASE ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:269:2: PHRASE + { + root_0 = (Object)adaptor.nil(); + + + PHRASE66=(Token)match(input,PHRASE,FOLLOW_PHRASE_in_quoted1386); if (state.failed) return retval; + if ( state.backtracking==0 ) { + PHRASE66_tree = + (Object)adaptor.create(PHRASE66) + ; + adaptor.addChild(root_0, PHRASE66_tree); + } + + } + + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "quoted" + + + public static class operator_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "operator" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:275:1: operator : ( AND -> OPERATOR[\"AND\"] | OR -> OPERATOR[\"OR\"] | NOT -> OPERATOR[\"NOT\"] ) ; + public final StandardLuceneGrammarParser.operator_return operator() throws RecognitionException { + StandardLuceneGrammarParser.operator_return retval = new StandardLuceneGrammarParser.operator_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + Token AND67=null; + Token OR68=null; + Token NOT69=null; + + Object AND67_tree=null; + Object OR68_tree=null; + Object NOT69_tree=null; + RewriteRuleTokenStream stream_NOT=new RewriteRuleTokenStream(adaptor,"token NOT"); + RewriteRuleTokenStream stream_AND=new RewriteRuleTokenStream(adaptor,"token AND"); + RewriteRuleTokenStream stream_OR=new RewriteRuleTokenStream(adaptor,"token OR"); + + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:275:9: ( ( AND -> OPERATOR[\"AND\"] | OR -> OPERATOR[\"OR\"] | NOT -> OPERATOR[\"NOT\"] ) ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:275:11: ( AND -> OPERATOR[\"AND\"] | OR -> OPERATOR[\"OR\"] | NOT -> OPERATOR[\"NOT\"] ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:275:11: ( AND -> OPERATOR[\"AND\"] | OR -> OPERATOR[\"OR\"] | NOT -> OPERATOR[\"NOT\"] ) + int alt31=3; + switch ( input.LA(1) ) { + case AND: + { + alt31=1; + } + break; + case OR: + { + alt31=2; + } + break; + case NOT: + { + alt31=3; + } + break; + default: + if (state.backtracking>0) {state.failed=true; return retval;} + NoViableAltException nvae = + new NoViableAltException("", 31, 0, input); + + throw nvae; + + } + + switch (alt31) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:276:2: AND + { + AND67=(Token)match(input,AND,FOLLOW_AND_in_operator1402); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_AND.add(AND67); + + + // AST REWRITE + // elements: + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 276:6: -> OPERATOR[\"AND\"] + { + adaptor.addChild(root_0, + (Object)adaptor.create(OPERATOR, "AND") + ); + + } + + + retval.tree = root_0; + } + + } + break; + case 2 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:277:4: OR + { + OR68=(Token)match(input,OR,FOLLOW_OR_in_operator1412); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_OR.add(OR68); + + + // AST REWRITE + // elements: + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 277:7: -> OPERATOR[\"OR\"] + { + adaptor.addChild(root_0, + (Object)adaptor.create(OPERATOR, "OR") + ); + + } + + + retval.tree = root_0; + } + + } + break; + case 3 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:278:4: NOT + { + NOT69=(Token)match(input,NOT,FOLLOW_NOT_in_operator1422); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_NOT.add(NOT69); + + + // AST REWRITE + // elements: + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 278:8: -> OPERATOR[\"NOT\"] + { + adaptor.addChild(root_0, + (Object)adaptor.create(OPERATOR, "NOT") + ); + + } + + + retval.tree = root_0; + } + + } + break; + + } + + + } + + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "operator" + + + public static class modifier_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "modifier" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:281:1: modifier : ( PLUS -> PLUS[\"+\"] | MINUS -> MINUS[\"-\"] ); + public final StandardLuceneGrammarParser.modifier_return modifier() throws RecognitionException { + StandardLuceneGrammarParser.modifier_return retval = new StandardLuceneGrammarParser.modifier_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + Token PLUS70=null; + Token MINUS71=null; + + Object PLUS70_tree=null; + Object MINUS71_tree=null; + RewriteRuleTokenStream stream_PLUS=new RewriteRuleTokenStream(adaptor,"token PLUS"); + RewriteRuleTokenStream stream_MINUS=new RewriteRuleTokenStream(adaptor,"token MINUS"); + + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:281:9: ( PLUS -> PLUS[\"+\"] | MINUS -> MINUS[\"-\"] ) + int alt32=2; + int LA32_0 = input.LA(1); + + if ( (LA32_0==PLUS) ) { + alt32=1; + } + else if ( (LA32_0==MINUS) ) { + alt32=2; + } + else { + if (state.backtracking>0) {state.failed=true; return retval;} + NoViableAltException nvae = + new NoViableAltException("", 32, 0, input); + + throw nvae; + + } + switch (alt32) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:282:2: PLUS + { + PLUS70=(Token)match(input,PLUS,FOLLOW_PLUS_in_modifier1439); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_PLUS.add(PLUS70); + + + // AST REWRITE + // elements: PLUS + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 282:7: -> PLUS[\"+\"] + { + adaptor.addChild(root_0, + (Object)adaptor.create(PLUS, "+") + ); + + } + + + retval.tree = root_0; + } + + } + break; + case 2 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:283:4: MINUS + { + MINUS71=(Token)match(input,MINUS,FOLLOW_MINUS_in_modifier1449); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_MINUS.add(MINUS71); + + + // AST REWRITE + // elements: MINUS + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 283:10: -> MINUS[\"-\"] + { + adaptor.addChild(root_0, + (Object)adaptor.create(MINUS, "-") + ); + + } + + + retval.tree = root_0; + } + + } + break; + + } + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "modifier" + + + public static class term_modifier_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "term_modifier" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:295:1: term_modifier : ( TILDE ( CARAT )? -> ^( BOOST ( CARAT )? ) ^( FUZZY TILDE ) | CARAT ( TILDE )? -> ^( BOOST CARAT ) ^( FUZZY ( TILDE )? ) ); + public final StandardLuceneGrammarParser.term_modifier_return term_modifier() throws RecognitionException { + StandardLuceneGrammarParser.term_modifier_return retval = new StandardLuceneGrammarParser.term_modifier_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + Token TILDE72=null; + Token CARAT73=null; + Token CARAT74=null; + Token TILDE75=null; + + Object TILDE72_tree=null; + Object CARAT73_tree=null; + Object CARAT74_tree=null; + Object TILDE75_tree=null; + RewriteRuleTokenStream stream_CARAT=new RewriteRuleTokenStream(adaptor,"token CARAT"); + RewriteRuleTokenStream stream_TILDE=new RewriteRuleTokenStream(adaptor,"token TILDE"); + + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:295:15: ( TILDE ( CARAT )? -> ^( BOOST ( CARAT )? ) ^( FUZZY TILDE ) | CARAT ( TILDE )? -> ^( BOOST CARAT ) ^( FUZZY ( TILDE )? ) ) + int alt35=2; + int LA35_0 = input.LA(1); + + if ( (LA35_0==TILDE) ) { + alt35=1; + } + else if ( (LA35_0==CARAT) ) { + alt35=2; + } + else { + if (state.backtracking>0) {state.failed=true; return retval;} + NoViableAltException nvae = + new NoViableAltException("", 35, 0, input); + + throw nvae; + + } + switch (alt35) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:296:2: TILDE ( CARAT )? + { + TILDE72=(Token)match(input,TILDE,FOLLOW_TILDE_in_term_modifier1467); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_TILDE.add(TILDE72); + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:296:8: ( CARAT )? + int alt33=2; + int LA33_0 = input.LA(1); + + if ( (LA33_0==CARAT) ) { + alt33=1; + } + switch (alt33) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:296:8: CARAT + { + CARAT73=(Token)match(input,CARAT,FOLLOW_CARAT_in_term_modifier1469); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_CARAT.add(CARAT73); + + + } + break; + + } + + + // AST REWRITE + // elements: CARAT, TILDE + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 296:15: -> ^( BOOST ( CARAT )? ) ^( FUZZY TILDE ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:296:18: ^( BOOST ( CARAT )? ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(BOOST, "BOOST") + , root_1); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:296:26: ( CARAT )? + if ( stream_CARAT.hasNext() ) { + adaptor.addChild(root_1, + stream_CARAT.nextNode() + ); + + } + stream_CARAT.reset(); + + adaptor.addChild(root_0, root_1); + } + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:296:34: ^( FUZZY TILDE ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(FUZZY, "FUZZY") + , root_1); + + adaptor.addChild(root_1, + stream_TILDE.nextNode() + ); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + case 2 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:297:4: CARAT ( TILDE )? + { + CARAT74=(Token)match(input,CARAT,FOLLOW_CARAT_in_term_modifier1491); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_CARAT.add(CARAT74); + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:297:10: ( TILDE )? + int alt34=2; + int LA34_0 = input.LA(1); + + if ( (LA34_0==TILDE) ) { + alt34=1; + } + switch (alt34) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:297:10: TILDE + { + TILDE75=(Token)match(input,TILDE,FOLLOW_TILDE_in_term_modifier1493); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_TILDE.add(TILDE75); + + + } + break; + + } + + + // AST REWRITE + // elements: CARAT, TILDE + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 297:17: -> ^( BOOST CARAT ) ^( FUZZY ( TILDE )? ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:297:20: ^( BOOST CARAT ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(BOOST, "BOOST") + , root_1); + + adaptor.addChild(root_1, + stream_CARAT.nextNode() + ); + + adaptor.addChild(root_0, root_1); + } + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:297:35: ^( FUZZY ( TILDE )? ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(FUZZY, "FUZZY") + , root_1); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:297:43: ( TILDE )? + if ( stream_TILDE.hasNext() ) { + adaptor.addChild(root_1, + stream_TILDE.nextNode() + ); + + } + stream_TILDE.reset(); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + + } + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "term_modifier" + + + public static class boost_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "boost" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:317:1: boost : ( CARAT -> ^( BOOST NUMBER[\"DEF\"] ) ) ( NUMBER -> ^( BOOST NUMBER ) )? ; + public final StandardLuceneGrammarParser.boost_return boost() throws RecognitionException { + StandardLuceneGrammarParser.boost_return retval = new StandardLuceneGrammarParser.boost_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + Token CARAT76=null; + Token NUMBER77=null; + + Object CARAT76_tree=null; + Object NUMBER77_tree=null; + RewriteRuleTokenStream stream_CARAT=new RewriteRuleTokenStream(adaptor,"token CARAT"); + RewriteRuleTokenStream stream_NUMBER=new RewriteRuleTokenStream(adaptor,"token NUMBER"); + + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:317:7: ( ( CARAT -> ^( BOOST NUMBER[\"DEF\"] ) ) ( NUMBER -> ^( BOOST NUMBER ) )? ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:318:2: ( CARAT -> ^( BOOST NUMBER[\"DEF\"] ) ) ( NUMBER -> ^( BOOST NUMBER ) )? + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:318:2: ( CARAT -> ^( BOOST NUMBER[\"DEF\"] ) ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:318:3: CARAT + { + CARAT76=(Token)match(input,CARAT,FOLLOW_CARAT_in_boost1525); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_CARAT.add(CARAT76); + + + // AST REWRITE + // elements: + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 318:9: -> ^( BOOST NUMBER[\"DEF\"] ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:318:12: ^( BOOST NUMBER[\"DEF\"] ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(BOOST, "BOOST") + , root_1); + + adaptor.addChild(root_1, + (Object)adaptor.create(NUMBER, "DEF") + ); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:319:2: ( NUMBER -> ^( BOOST NUMBER ) )? + int alt36=2; + int LA36_0 = input.LA(1); + + if ( (LA36_0==NUMBER) ) { + alt36=1; + } + switch (alt36) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:319:3: NUMBER + { + NUMBER77=(Token)match(input,NUMBER,FOLLOW_NUMBER_in_boost1540); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_NUMBER.add(NUMBER77); + + + // AST REWRITE + // elements: NUMBER + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 319:10: -> ^( BOOST NUMBER ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:319:13: ^( BOOST NUMBER ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(BOOST, "BOOST") + , root_1); + + adaptor.addChild(root_1, + stream_NUMBER.nextNode() + ); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + + } + + + } + + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "boost" + + + public static class fuzzy_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "fuzzy" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:322:1: fuzzy : ( TILDE -> ^( FUZZY NUMBER[\"DEF\"] ) ) ( NUMBER -> ^( FUZZY NUMBER ) )? ; + public final StandardLuceneGrammarParser.fuzzy_return fuzzy() throws RecognitionException { + StandardLuceneGrammarParser.fuzzy_return retval = new StandardLuceneGrammarParser.fuzzy_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + Token TILDE78=null; + Token NUMBER79=null; + + Object TILDE78_tree=null; + Object NUMBER79_tree=null; + RewriteRuleTokenStream stream_TILDE=new RewriteRuleTokenStream(adaptor,"token TILDE"); + RewriteRuleTokenStream stream_NUMBER=new RewriteRuleTokenStream(adaptor,"token NUMBER"); + + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:322:7: ( ( TILDE -> ^( FUZZY NUMBER[\"DEF\"] ) ) ( NUMBER -> ^( FUZZY NUMBER ) )? ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:323:2: ( TILDE -> ^( FUZZY NUMBER[\"DEF\"] ) ) ( NUMBER -> ^( FUZZY NUMBER ) )? + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:323:2: ( TILDE -> ^( FUZZY NUMBER[\"DEF\"] ) ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:323:3: TILDE + { + TILDE78=(Token)match(input,TILDE,FOLLOW_TILDE_in_fuzzy1563); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_TILDE.add(TILDE78); + + + // AST REWRITE + // elements: + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 323:9: -> ^( FUZZY NUMBER[\"DEF\"] ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:323:12: ^( FUZZY NUMBER[\"DEF\"] ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(FUZZY, "FUZZY") + , root_1); + + adaptor.addChild(root_1, + (Object)adaptor.create(NUMBER, "DEF") + ); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:324:2: ( NUMBER -> ^( FUZZY NUMBER ) )? + int alt37=2; + int LA37_0 = input.LA(1); + + if ( (LA37_0==NUMBER) ) { + alt37=1; + } + switch (alt37) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:324:3: NUMBER + { + NUMBER79=(Token)match(input,NUMBER,FOLLOW_NUMBER_in_fuzzy1578); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_NUMBER.add(NUMBER79); + + + // AST REWRITE + // elements: NUMBER + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 324:10: -> ^( FUZZY NUMBER ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:324:13: ^( FUZZY NUMBER ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(FUZZY, "FUZZY") + , root_1); + + adaptor.addChild(root_1, + stream_NUMBER.nextNode() + ); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + + } + + + } + + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "fuzzy" + + + public static class not_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "not" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:327:1: not : ( ( AND NOT )=> AND NOT | NOT ); + public final StandardLuceneGrammarParser.not_return not() throws RecognitionException { + StandardLuceneGrammarParser.not_return retval = new StandardLuceneGrammarParser.not_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + Token AND80=null; + Token NOT81=null; + Token NOT82=null; + + Object AND80_tree=null; + Object NOT81_tree=null; + Object NOT82_tree=null; + + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:327:5: ( ( AND NOT )=> AND NOT | NOT ) + int alt38=2; + int LA38_0 = input.LA(1); + + if ( (LA38_0==AND) && (synpred4_StandardLuceneGrammar())) { + alt38=1; + } + else if ( (LA38_0==NOT) ) { + alt38=2; + } + else { + if (state.backtracking>0) {state.failed=true; return retval;} + NoViableAltException nvae = + new NoViableAltException("", 38, 0, input); + + throw nvae; + + } + switch (alt38) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:328:2: ( AND NOT )=> AND NOT + { + root_0 = (Object)adaptor.nil(); + + + AND80=(Token)match(input,AND,FOLLOW_AND_in_not1608); if (state.failed) return retval; + if ( state.backtracking==0 ) { + AND80_tree = + (Object)adaptor.create(AND80) + ; + adaptor.addChild(root_0, AND80_tree); + } + + NOT81=(Token)match(input,NOT,FOLLOW_NOT_in_not1610); if (state.failed) return retval; + if ( state.backtracking==0 ) { + NOT81_tree = + (Object)adaptor.create(NOT81) + ; + adaptor.addChild(root_0, NOT81_tree); + } + + } + break; + case 2 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:329:4: NOT + { + root_0 = (Object)adaptor.nil(); + + + NOT82=(Token)match(input,NOT,FOLLOW_NOT_in_not1615); if (state.failed) return retval; + if ( state.backtracking==0 ) { + NOT82_tree = + (Object)adaptor.create(NOT82) + ; + adaptor.addChild(root_0, NOT82_tree); + } + + } + break; + + } + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "not" + + + public static class and_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "and" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:332:1: and : AND ; + public final StandardLuceneGrammarParser.and_return and() throws RecognitionException { + StandardLuceneGrammarParser.and_return retval = new StandardLuceneGrammarParser.and_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + Token AND83=null; + + Object AND83_tree=null; + + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:332:6: ( AND ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:333:2: AND + { + root_0 = (Object)adaptor.nil(); + + + AND83=(Token)match(input,AND,FOLLOW_AND_in_and1629); if (state.failed) return retval; + if ( state.backtracking==0 ) { + AND83_tree = + (Object)adaptor.create(AND83) + ; + adaptor.addChild(root_0, AND83_tree); + } + + } + + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "and" + + + public static class or_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "or" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:336:1: or : OR ; + public final StandardLuceneGrammarParser.or_return or() throws RecognitionException { + StandardLuceneGrammarParser.or_return retval = new StandardLuceneGrammarParser.or_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + Token OR84=null; + + Object OR84_tree=null; + + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:336:5: ( OR ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:337:2: OR + { + root_0 = (Object)adaptor.nil(); + + + OR84=(Token)match(input,OR,FOLLOW_OR_in_or1643); if (state.failed) return retval; + if ( state.backtracking==0 ) { + OR84_tree = + (Object)adaptor.create(OR84) + ; + adaptor.addChild(root_0, OR84_tree); + } + + } + + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "or" + + + public static class date_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "date" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:341:1: date : DATE_TOKEN ; + public final StandardLuceneGrammarParser.date_return date() throws RecognitionException { + StandardLuceneGrammarParser.date_return retval = new StandardLuceneGrammarParser.date_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + Token DATE_TOKEN85=null; + + Object DATE_TOKEN85_tree=null; + + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:341:6: ( DATE_TOKEN ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:343:2: DATE_TOKEN + { + root_0 = (Object)adaptor.nil(); + + + DATE_TOKEN85=(Token)match(input,DATE_TOKEN,FOLLOW_DATE_TOKEN_in_date1660); if (state.failed) return retval; + if ( state.backtracking==0 ) { + DATE_TOKEN85_tree = + (Object)adaptor.create(DATE_TOKEN85) + ; + adaptor.addChild(root_0, DATE_TOKEN85_tree); + } + + } + + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "date" + + // $ANTLR start synpred1_StandardLuceneGrammar + public final void synpred1_StandardLuceneGrammar_fragment() throws RecognitionException { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:123:2: ( modifier LPAREN ( clauseOr )+ RPAREN ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:123:3: modifier LPAREN ( clauseOr )+ RPAREN + { + pushFollow(FOLLOW_modifier_in_synpred1_StandardLuceneGrammar379); + modifier(); + + state._fsp--; + if (state.failed) return ; + + match(input,LPAREN,FOLLOW_LPAREN_in_synpred1_StandardLuceneGrammar381); if (state.failed) return ; + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:123:19: ( clauseOr )+ + int cnt39=0; + loop39: + do { + int alt39=2; + int LA39_0 = input.LA(1); + + if ( ((LA39_0 >= LBRACK && LA39_0 <= MINUS)||LA39_0==NUMBER||(LA39_0 >= PHRASE && LA39_0 <= PLUS)||LA39_0==QMARK||LA39_0==STAR||LA39_0==TERM_NORMAL||LA39_0==TERM_TRUNCATED) ) { + alt39=1; + } + + + switch (alt39) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:123:19: clauseOr + { + pushFollow(FOLLOW_clauseOr_in_synpred1_StandardLuceneGrammar383); + clauseOr(); + + state._fsp--; + if (state.failed) return ; + + } + break; + + default : + if ( cnt39 >= 1 ) break loop39; + if (state.backtracking>0) {state.failed=true; return ;} + EarlyExitException eee = + new EarlyExitException(39, input); + throw eee; + } + cnt39++; + } while (true); + + + match(input,RPAREN,FOLLOW_RPAREN_in_synpred1_StandardLuceneGrammar386); if (state.failed) return ; + + } + + } + // $ANTLR end synpred1_StandardLuceneGrammar + + // $ANTLR start synpred2_StandardLuceneGrammar + public final void synpred2_StandardLuceneGrammar_fragment() throws RecognitionException { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:125:4: ( LPAREN ( clauseOr )+ RPAREN term_modifier ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:125:5: LPAREN ( clauseOr )+ RPAREN term_modifier + { + match(input,LPAREN,FOLLOW_LPAREN_in_synpred2_StandardLuceneGrammar440); if (state.failed) return ; + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:125:12: ( clauseOr )+ + int cnt40=0; + loop40: + do { + int alt40=2; + int LA40_0 = input.LA(1); + + if ( ((LA40_0 >= LBRACK && LA40_0 <= MINUS)||LA40_0==NUMBER||(LA40_0 >= PHRASE && LA40_0 <= PLUS)||LA40_0==QMARK||LA40_0==STAR||LA40_0==TERM_NORMAL||LA40_0==TERM_TRUNCATED) ) { + alt40=1; + } + + + switch (alt40) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:125:12: clauseOr + { + pushFollow(FOLLOW_clauseOr_in_synpred2_StandardLuceneGrammar442); + clauseOr(); + + state._fsp--; + if (state.failed) return ; + + } + break; + + default : + if ( cnt40 >= 1 ) break loop40; + if (state.backtracking>0) {state.failed=true; return ;} + EarlyExitException eee = + new EarlyExitException(40, input); + throw eee; + } + cnt40++; + } while (true); + + + match(input,RPAREN,FOLLOW_RPAREN_in_synpred2_StandardLuceneGrammar445); if (state.failed) return ; + + pushFollow(FOLLOW_term_modifier_in_synpred2_StandardLuceneGrammar447); + term_modifier(); + + state._fsp--; + if (state.failed) return ; + + } + + } + // $ANTLR end synpred2_StandardLuceneGrammar + + // $ANTLR start synpred3_StandardLuceneGrammar + public final void synpred3_StandardLuceneGrammar_fragment() throws RecognitionException { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:127:4: ( LPAREN ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:127:5: LPAREN + { + match(input,LPAREN,FOLLOW_LPAREN_in_synpred3_StandardLuceneGrammar500); if (state.failed) return ; + + } + + } + // $ANTLR end synpred3_StandardLuceneGrammar + + // $ANTLR start synpred4_StandardLuceneGrammar + public final void synpred4_StandardLuceneGrammar_fragment() throws RecognitionException { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:328:2: ( AND NOT ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:328:3: AND NOT + { + match(input,AND,FOLLOW_AND_in_synpred4_StandardLuceneGrammar1602); if (state.failed) return ; + + match(input,NOT,FOLLOW_NOT_in_synpred4_StandardLuceneGrammar1604); if (state.failed) return ; + + } + + } + // $ANTLR end synpred4_StandardLuceneGrammar + + // Delegated rules + + public final boolean synpred1_StandardLuceneGrammar() { + state.backtracking++; + int start = input.mark(); + try { + synpred1_StandardLuceneGrammar_fragment(); // can never throw exception + } catch (RecognitionException re) { + System.err.println("impossible: "+re); + } + boolean success = !state.failed; + input.rewind(start); + state.backtracking--; + state.failed=false; + return success; + } + public final boolean synpred4_StandardLuceneGrammar() { + state.backtracking++; + int start = input.mark(); + try { + synpred4_StandardLuceneGrammar_fragment(); // can never throw exception + } catch (RecognitionException re) { + System.err.println("impossible: "+re); + } + boolean success = !state.failed; + input.rewind(start); + state.backtracking--; + state.failed=false; + return success; + } + public final boolean synpred3_StandardLuceneGrammar() { + state.backtracking++; + int start = input.mark(); + try { + synpred3_StandardLuceneGrammar_fragment(); // can never throw exception + } catch (RecognitionException re) { + System.err.println("impossible: "+re); + } + boolean success = !state.failed; + input.rewind(start); + state.backtracking--; + state.failed=false; + return success; + } + public final boolean synpred2_StandardLuceneGrammar() { + state.backtracking++; + int start = input.mark(); + try { + synpred2_StandardLuceneGrammar_fragment(); // can never throw exception + } catch (RecognitionException re) { + System.err.println("impossible: "+re); + } + boolean success = !state.failed; + input.rewind(start); + state.backtracking--; + state.failed=false; + return success; + } + + + + + public static final BitSet FOLLOW_clauseOr_in_mainQ212 = new BitSet(new long[]{0x000054009C9E0000L}); + public static final BitSet FOLLOW_EOF_in_mainQ215 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_clauseAnd_in_clauseOr246 = new BitSet(new long[]{0x0000000002000002L}); + public static final BitSet FOLLOW_or_in_clauseOr255 = new BitSet(new long[]{0x000054009C9E0000L}); + public static final BitSet FOLLOW_clauseAnd_in_clauseOr259 = new BitSet(new long[]{0x0000000002000002L}); + public static final BitSet FOLLOW_clauseNot_in_clauseAnd288 = new BitSet(new long[]{0x0000000000000022L}); + public static final BitSet FOLLOW_and_in_clauseAnd298 = new BitSet(new long[]{0x000054009C9E0000L}); + public static final BitSet FOLLOW_clauseNot_in_clauseAnd302 = new BitSet(new long[]{0x0000000000000022L}); + public static final BitSet FOLLOW_clauseBasic_in_clauseNot333 = new BitSet(new long[]{0x0000000000400022L}); + public static final BitSet FOLLOW_not_in_clauseNot342 = new BitSet(new long[]{0x000054009C9E0000L}); + public static final BitSet FOLLOW_clauseBasic_in_clauseNot346 = new BitSet(new long[]{0x0000000000400022L}); + public static final BitSet FOLLOW_modifier_in_clauseBasic391 = new BitSet(new long[]{0x0000000000080000L}); + public static final BitSet FOLLOW_LPAREN_in_clauseBasic394 = new BitSet(new long[]{0x000054009C9E0000L}); + public static final BitSet FOLLOW_clauseOr_in_clauseBasic396 = new BitSet(new long[]{0x000055009C9E0000L}); + public static final BitSet FOLLOW_RPAREN_in_clauseBasic399 = new BitSet(new long[]{0x0000800000000102L}); + public static final BitSet FOLLOW_term_modifier_in_clauseBasic401 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_modifier_in_clauseBasic451 = new BitSet(new long[]{0x0000000000080000L}); + public static final BitSet FOLLOW_LPAREN_in_clauseBasic454 = new BitSet(new long[]{0x000054009C9E0000L}); + public static final BitSet FOLLOW_clauseOr_in_clauseBasic456 = new BitSet(new long[]{0x000055009C9E0000L}); + public static final BitSet FOLLOW_RPAREN_in_clauseBasic459 = new BitSet(new long[]{0x0000800000000102L}); + public static final BitSet FOLLOW_term_modifier_in_clauseBasic461 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_LPAREN_in_clauseBasic506 = new BitSet(new long[]{0x000054009C9E0000L}); + public static final BitSet FOLLOW_clauseOr_in_clauseBasic508 = new BitSet(new long[]{0x000055009C9E0000L}); + public static final BitSet FOLLOW_RPAREN_in_clauseBasic511 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_atom_in_clauseBasic523 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_modifier_in_atom544 = new BitSet(new long[]{0x0000100000000000L}); + public static final BitSet FOLLOW_field_in_atom547 = new BitSet(new long[]{0x0000000000080000L}); + public static final BitSet FOLLOW_multi_value_in_atom549 = new BitSet(new long[]{0x0000800000000102L}); + public static final BitSet FOLLOW_term_modifier_in_atom551 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_modifier_in_atom587 = new BitSet(new long[]{0x000054008C860000L}); + public static final BitSet FOLLOW_field_in_atom590 = new BitSet(new long[]{0x000054008C860000L}); + public static final BitSet FOLLOW_value_in_atom593 = new BitSet(new long[]{0x0000800000000102L}); + public static final BitSet FOLLOW_term_modifier_in_atom595 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_TERM_NORMAL_in_field642 = new BitSet(new long[]{0x0000000000000400L}); + public static final BitSet FOLLOW_COLON_in_field644 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_range_term_in_in_value663 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_range_term_ex_in_value676 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_normal_in_value690 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_truncated_in_value704 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_quoted_in_value718 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_quoted_truncated_in_value731 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_QMARK_in_value744 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_STAR_in_value757 = new BitSet(new long[]{0x0000000000000400L}); + public static final BitSet FOLLOW_COLON_in_value759 = new BitSet(new long[]{0x0000040000000000L}); + public static final BitSet FOLLOW_STAR_in_value763 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_STAR_in_value778 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_LBRACK_in_range_term_in809 = new BitSet(new long[]{0x000054000C800800L}); + public static final BitSet FOLLOW_range_value_in_range_term_in821 = new BitSet(new long[]{0x000254400C800800L}); + public static final BitSet FOLLOW_TO_in_range_term_in844 = new BitSet(new long[]{0x000054000C800800L}); + public static final BitSet FOLLOW_range_value_in_range_term_in849 = new BitSet(new long[]{0x0000004000000000L}); + public static final BitSet FOLLOW_RBRACK_in_range_term_in870 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_LCURLY_in_range_term_ex890 = new BitSet(new long[]{0x000054000C800800L}); + public static final BitSet FOLLOW_range_value_in_range_term_ex903 = new BitSet(new long[]{0x000254800C800800L}); + public static final BitSet FOLLOW_TO_in_range_term_ex926 = new BitSet(new long[]{0x000054000C800800L}); + public static final BitSet FOLLOW_range_value_in_range_term_ex931 = new BitSet(new long[]{0x0000008000000000L}); + public static final BitSet FOLLOW_RCURLY_in_range_term_ex952 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_truncated_in_range_value966 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_quoted_in_range_value979 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_quoted_truncated_in_range_value992 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_date_in_range_value1005 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_normal_in_range_value1018 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_STAR_in_range_value1032 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_LPAREN_in_multi_value1053 = new BitSet(new long[]{0x000054009C9E0000L}); + public static final BitSet FOLLOW_multiClause_in_multi_value1055 = new BitSet(new long[]{0x0000010000000000L}); + public static final BitSet FOLLOW_RPAREN_in_multi_value1057 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_clauseOr_in_multiClause1084 = new BitSet(new long[]{0x000054009C9E0002L}); + public static final BitSet FOLLOW_multiOr_in_multiDefault1128 = new BitSet(new long[]{0x000054009C960002L}); + public static final BitSet FOLLOW_multiAnd_in_multiOr1156 = new BitSet(new long[]{0x0000000002000002L}); + public static final BitSet FOLLOW_or_in_multiOr1166 = new BitSet(new long[]{0x000054009C960000L}); + public static final BitSet FOLLOW_multiAnd_in_multiOr1170 = new BitSet(new long[]{0x0000000002000002L}); + public static final BitSet FOLLOW_multiNot_in_multiAnd1201 = new BitSet(new long[]{0x0000000000000022L}); + public static final BitSet FOLLOW_and_in_multiAnd1211 = new BitSet(new long[]{0x000054009C960000L}); + public static final BitSet FOLLOW_multiNot_in_multiAnd1215 = new BitSet(new long[]{0x0000000000000022L}); + public static final BitSet FOLLOW_multiBasic_in_multiNot1246 = new BitSet(new long[]{0x0000000000400022L}); + public static final BitSet FOLLOW_not_in_multiNot1256 = new BitSet(new long[]{0x000054009C960000L}); + public static final BitSet FOLLOW_multiBasic_in_multiNot1260 = new BitSet(new long[]{0x0000000000400022L}); + public static final BitSet FOLLOW_mterm_in_multiBasic1287 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_modifier_in_mterm1303 = new BitSet(new long[]{0x000054008C860000L}); + public static final BitSet FOLLOW_value_in_mterm1306 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_TERM_TRUNCATED_in_truncated1359 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_PHRASE_ANYTHING_in_quoted_truncated1374 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_PHRASE_in_quoted1386 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_AND_in_operator1402 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_OR_in_operator1412 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_NOT_in_operator1422 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_PLUS_in_modifier1439 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_MINUS_in_modifier1449 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_TILDE_in_term_modifier1467 = new BitSet(new long[]{0x0000000000000102L}); + public static final BitSet FOLLOW_CARAT_in_term_modifier1469 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_CARAT_in_term_modifier1491 = new BitSet(new long[]{0x0000800000000002L}); + public static final BitSet FOLLOW_TILDE_in_term_modifier1493 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_CARAT_in_boost1525 = new BitSet(new long[]{0x0000000000800002L}); + public static final BitSet FOLLOW_NUMBER_in_boost1540 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_TILDE_in_fuzzy1563 = new BitSet(new long[]{0x0000000000800002L}); + public static final BitSet FOLLOW_NUMBER_in_fuzzy1578 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_AND_in_not1608 = new BitSet(new long[]{0x0000000000400000L}); + public static final BitSet FOLLOW_NOT_in_not1610 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_NOT_in_not1615 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_AND_in_and1629 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_OR_in_or1643 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_DATE_TOKEN_in_date1660 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_modifier_in_synpred1_StandardLuceneGrammar379 = new BitSet(new long[]{0x0000000000080000L}); + public static final BitSet FOLLOW_LPAREN_in_synpred1_StandardLuceneGrammar381 = new BitSet(new long[]{0x000054009C9E0000L}); + public static final BitSet FOLLOW_clauseOr_in_synpred1_StandardLuceneGrammar383 = new BitSet(new long[]{0x000055009C9E0000L}); + public static final BitSet FOLLOW_RPAREN_in_synpred1_StandardLuceneGrammar386 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_LPAREN_in_synpred2_StandardLuceneGrammar440 = new BitSet(new long[]{0x000054009C9E0000L}); + public static final BitSet FOLLOW_clauseOr_in_synpred2_StandardLuceneGrammar442 = new BitSet(new long[]{0x000055009C9E0000L}); + public static final BitSet FOLLOW_RPAREN_in_synpred2_StandardLuceneGrammar445 = new BitSet(new long[]{0x0000800000000100L}); + public static final BitSet FOLLOW_term_modifier_in_synpred2_StandardLuceneGrammar447 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_LPAREN_in_synpred3_StandardLuceneGrammar500 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_AND_in_synpred4_StandardLuceneGrammar1602 = new BitSet(new long[]{0x0000000000400000L}); + public static final BitSet FOLLOW_NOT_in_synpred4_StandardLuceneGrammar1604 = new BitSet(new long[]{0x0000000000000002L}); + +} \ No newline at end of file Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/parser/ExtendedLuceneGrammarParser.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/parser/ExtendedLuceneGrammarParser.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/parser/ExtendedLuceneGrammarParser.java (revision 0) @@ -0,0 +1,6557 @@ +// $ANTLR 3.4 /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g 2013-06-27 18:55:44 + + package org.apache.lucene.queryparser.flexible.aqp.parser; + + +import org.antlr.runtime.*; +import java.util.Stack; +import java.util.List; +import java.util.ArrayList; +import java.util.Map; +import java.util.HashMap; + +import org.antlr.runtime.tree.*; + + +@SuppressWarnings({"all", "warnings", "unchecked"}) +public class ExtendedLuceneGrammarParser extends UnforgivingParser { + public static final String[] tokenNames = new String[] { + "", "", "", "", "AMPER", "AND", "ATOM", "BOOST", "CARAT", "CLAUSE", "COLON", "DATE_TOKEN", "DQUOTE", "ESC_CHAR", "FIELD", "FUZZY", "INT", "LBRACK", "LCURLY", "LPAREN", "MINUS", "MODIFIER", "NEAR", "NOT", "NUMBER", "OPERATOR", "OR", "PHRASE", "PHRASE_ANYTHING", "PLUS", "QANYTHING", "QDATE", "QMARK", "QNORMAL", "QPHRASE", "QPHRASETRUNC", "QRANGEEX", "QRANGEIN", "QTRUNCATED", "RBRACK", "RCURLY", "RPAREN", "SQUOTE", "STAR", "TERM_CHAR", "TERM_NORMAL", "TERM_START_CHAR", "TERM_TRUNCATED", "TILDE", "TMODIFIER", "TO", "VBAR", "WS" + }; + + public static final int EOF=-1; + public static final int AMPER=4; + public static final int AND=5; + public static final int ATOM=6; + public static final int BOOST=7; + public static final int CARAT=8; + public static final int CLAUSE=9; + public static final int COLON=10; + public static final int DATE_TOKEN=11; + public static final int DQUOTE=12; + public static final int ESC_CHAR=13; + public static final int FIELD=14; + public static final int FUZZY=15; + public static final int INT=16; + public static final int LBRACK=17; + public static final int LCURLY=18; + public static final int LPAREN=19; + public static final int MINUS=20; + public static final int MODIFIER=21; + public static final int NEAR=22; + public static final int NOT=23; + public static final int NUMBER=24; + public static final int OPERATOR=25; + public static final int OR=26; + public static final int PHRASE=27; + public static final int PHRASE_ANYTHING=28; + public static final int PLUS=29; + public static final int QANYTHING=30; + public static final int QDATE=31; + public static final int QMARK=32; + public static final int QNORMAL=33; + public static final int QPHRASE=34; + public static final int QPHRASETRUNC=35; + public static final int QRANGEEX=36; + public static final int QRANGEIN=37; + public static final int QTRUNCATED=38; + public static final int RBRACK=39; + public static final int RCURLY=40; + public static final int RPAREN=41; + public static final int SQUOTE=42; + public static final int STAR=43; + public static final int TERM_CHAR=44; + public static final int TERM_NORMAL=45; + public static final int TERM_START_CHAR=46; + public static final int TERM_TRUNCATED=47; + public static final int TILDE=48; + public static final int TMODIFIER=49; + public static final int TO=50; + public static final int VBAR=51; + public static final int WS=52; + + // delegates + public UnforgivingParser[] getDelegates() { + return new UnforgivingParser[] {}; + } + + // delegators + + + public ExtendedLuceneGrammarParser(TokenStream input) { + this(input, new RecognizerSharedState()); + } + public ExtendedLuceneGrammarParser(TokenStream input, RecognizerSharedState state) { + super(input, state); + } + +protected TreeAdaptor adaptor = new CommonTreeAdaptor(); + +public void setTreeAdaptor(TreeAdaptor adaptor) { + this.adaptor = adaptor; +} +public TreeAdaptor getTreeAdaptor() { + return adaptor; +} + public String[] getTokenNames() { return ExtendedLuceneGrammarParser.tokenNames; } + public String getGrammarFileName() { return "/dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g"; } + + + public static class mainQ_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "mainQ" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:103:1: mainQ : ( clauseOr )+ EOF -> ^( OPERATOR[\"DEFOP\"] ( clauseOr )+ ) ; + public final ExtendedLuceneGrammarParser.mainQ_return mainQ() throws RecognitionException { + ExtendedLuceneGrammarParser.mainQ_return retval = new ExtendedLuceneGrammarParser.mainQ_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + Token EOF2=null; + ExtendedLuceneGrammarParser.clauseOr_return clauseOr1 =null; + + + Object EOF2_tree=null; + RewriteRuleTokenStream stream_EOF=new RewriteRuleTokenStream(adaptor,"token EOF"); + RewriteRuleSubtreeStream stream_clauseOr=new RewriteRuleSubtreeStream(adaptor,"rule clauseOr"); + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:103:7: ( ( clauseOr )+ EOF -> ^( OPERATOR[\"DEFOP\"] ( clauseOr )+ ) ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:104:2: ( clauseOr )+ EOF + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:104:2: ( clauseOr )+ + int cnt1=0; + loop1: + do { + int alt1=2; + int LA1_0 = input.LA(1); + + if ( ((LA1_0 >= LBRACK && LA1_0 <= MINUS)||LA1_0==NUMBER||(LA1_0 >= PHRASE && LA1_0 <= PLUS)||LA1_0==QMARK||LA1_0==STAR||LA1_0==TERM_NORMAL||LA1_0==TERM_TRUNCATED) ) { + alt1=1; + } + + + switch (alt1) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:104:2: clauseOr + { + pushFollow(FOLLOW_clauseOr_in_mainQ212); + clauseOr1=clauseOr(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_clauseOr.add(clauseOr1.getTree()); + + } + break; + + default : + if ( cnt1 >= 1 ) break loop1; + if (state.backtracking>0) {state.failed=true; return retval;} + EarlyExitException eee = + new EarlyExitException(1, input); + throw eee; + } + cnt1++; + } while (true); + + + EOF2=(Token)match(input,EOF,FOLLOW_EOF_in_mainQ215); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_EOF.add(EOF2); + + + // AST REWRITE + // elements: clauseOr + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 104:16: -> ^( OPERATOR[\"DEFOP\"] ( clauseOr )+ ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:104:19: ^( OPERATOR[\"DEFOP\"] ( clauseOr )+ ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(OPERATOR, "DEFOP") + , root_1); + + if ( !(stream_clauseOr.hasNext()) ) { + throw new RewriteEarlyExitException(); + } + while ( stream_clauseOr.hasNext() ) { + adaptor.addChild(root_1, stream_clauseOr.nextTree()); + + } + stream_clauseOr.reset(); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "mainQ" + + + public static class clauseOr_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "clauseOr" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:107:1: clauseOr : (first= clauseAnd -> $first) ( or others= clauseAnd -> ^( OPERATOR[\"OR\"] ( clauseAnd )+ ) )* ; + public final ExtendedLuceneGrammarParser.clauseOr_return clauseOr() throws RecognitionException { + ExtendedLuceneGrammarParser.clauseOr_return retval = new ExtendedLuceneGrammarParser.clauseOr_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + ExtendedLuceneGrammarParser.clauseAnd_return first =null; + + ExtendedLuceneGrammarParser.clauseAnd_return others =null; + + ExtendedLuceneGrammarParser.or_return or3 =null; + + + RewriteRuleSubtreeStream stream_clauseAnd=new RewriteRuleSubtreeStream(adaptor,"rule clauseAnd"); + RewriteRuleSubtreeStream stream_or=new RewriteRuleSubtreeStream(adaptor,"rule or"); + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:108:3: ( (first= clauseAnd -> $first) ( or others= clauseAnd -> ^( OPERATOR[\"OR\"] ( clauseAnd )+ ) )* ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:108:5: (first= clauseAnd -> $first) ( or others= clauseAnd -> ^( OPERATOR[\"OR\"] ( clauseAnd )+ ) )* + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:108:5: (first= clauseAnd -> $first) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:108:6: first= clauseAnd + { + pushFollow(FOLLOW_clauseAnd_in_clauseOr242); + first=clauseAnd(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_clauseAnd.add(first.getTree()); + + // AST REWRITE + // elements: first + // token labels: + // rule labels: retval, first + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + RewriteRuleSubtreeStream stream_first=new RewriteRuleSubtreeStream(adaptor,"rule first",first!=null?first.tree:null); + + root_0 = (Object)adaptor.nil(); + // 108:22: -> $first + { + adaptor.addChild(root_0, stream_first.nextTree()); + + } + + + retval.tree = root_0; + } + + } + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:108:33: ( or others= clauseAnd -> ^( OPERATOR[\"OR\"] ( clauseAnd )+ ) )* + loop2: + do { + int alt2=2; + int LA2_0 = input.LA(1); + + if ( (LA2_0==OR) ) { + alt2=1; + } + + + switch (alt2) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:108:34: or others= clauseAnd + { + pushFollow(FOLLOW_or_in_clauseOr251); + or3=or(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_or.add(or3.getTree()); + + pushFollow(FOLLOW_clauseAnd_in_clauseOr255); + others=clauseAnd(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_clauseAnd.add(others.getTree()); + + // AST REWRITE + // elements: clauseAnd + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 108:54: -> ^( OPERATOR[\"OR\"] ( clauseAnd )+ ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:108:57: ^( OPERATOR[\"OR\"] ( clauseAnd )+ ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(OPERATOR, "OR") + , root_1); + + if ( !(stream_clauseAnd.hasNext()) ) { + throw new RewriteEarlyExitException(); + } + while ( stream_clauseAnd.hasNext() ) { + adaptor.addChild(root_1, stream_clauseAnd.nextTree()); + + } + stream_clauseAnd.reset(); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + + default : + break loop2; + } + } while (true); + + + } + + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "clauseOr" + + + public static class clauseAnd_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "clauseAnd" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:111:1: clauseAnd : (first= clauseNot -> $first) ( and others= clauseNot -> ^( OPERATOR[\"AND\"] ( clauseNot )+ ) )* ; + public final ExtendedLuceneGrammarParser.clauseAnd_return clauseAnd() throws RecognitionException { + ExtendedLuceneGrammarParser.clauseAnd_return retval = new ExtendedLuceneGrammarParser.clauseAnd_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + ExtendedLuceneGrammarParser.clauseNot_return first =null; + + ExtendedLuceneGrammarParser.clauseNot_return others =null; + + ExtendedLuceneGrammarParser.and_return and4 =null; + + + RewriteRuleSubtreeStream stream_clauseNot=new RewriteRuleSubtreeStream(adaptor,"rule clauseNot"); + RewriteRuleSubtreeStream stream_and=new RewriteRuleSubtreeStream(adaptor,"rule and"); + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:112:3: ( (first= clauseNot -> $first) ( and others= clauseNot -> ^( OPERATOR[\"AND\"] ( clauseNot )+ ) )* ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:112:5: (first= clauseNot -> $first) ( and others= clauseNot -> ^( OPERATOR[\"AND\"] ( clauseNot )+ ) )* + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:112:5: (first= clauseNot -> $first) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:112:6: first= clauseNot + { + pushFollow(FOLLOW_clauseNot_in_clauseAnd284); + first=clauseNot(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_clauseNot.add(first.getTree()); + + // AST REWRITE + // elements: first + // token labels: + // rule labels: retval, first + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + RewriteRuleSubtreeStream stream_first=new RewriteRuleSubtreeStream(adaptor,"rule first",first!=null?first.tree:null); + + root_0 = (Object)adaptor.nil(); + // 112:23: -> $first + { + adaptor.addChild(root_0, stream_first.nextTree()); + + } + + + retval.tree = root_0; + } + + } + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:112:34: ( and others= clauseNot -> ^( OPERATOR[\"AND\"] ( clauseNot )+ ) )* + loop3: + do { + int alt3=2; + int LA3_0 = input.LA(1); + + if ( (LA3_0==AND) ) { + alt3=1; + } + + + switch (alt3) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:112:35: and others= clauseNot + { + pushFollow(FOLLOW_and_in_clauseAnd294); + and4=and(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_and.add(and4.getTree()); + + pushFollow(FOLLOW_clauseNot_in_clauseAnd298); + others=clauseNot(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_clauseNot.add(others.getTree()); + + // AST REWRITE + // elements: clauseNot + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 112:56: -> ^( OPERATOR[\"AND\"] ( clauseNot )+ ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:112:59: ^( OPERATOR[\"AND\"] ( clauseNot )+ ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(OPERATOR, "AND") + , root_1); + + if ( !(stream_clauseNot.hasNext()) ) { + throw new RewriteEarlyExitException(); + } + while ( stream_clauseNot.hasNext() ) { + adaptor.addChild(root_1, stream_clauseNot.nextTree()); + + } + stream_clauseNot.reset(); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + + default : + break loop3; + } + } while (true); + + + } + + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "clauseAnd" + + + public static class clauseNot_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "clauseNot" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:115:1: clauseNot : (first= clauseNear -> $first) ( not others= clauseNear -> ^( OPERATOR[\"NOT\"] ( clauseNear )+ ) )* ; + public final ExtendedLuceneGrammarParser.clauseNot_return clauseNot() throws RecognitionException { + ExtendedLuceneGrammarParser.clauseNot_return retval = new ExtendedLuceneGrammarParser.clauseNot_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + ExtendedLuceneGrammarParser.clauseNear_return first =null; + + ExtendedLuceneGrammarParser.clauseNear_return others =null; + + ExtendedLuceneGrammarParser.not_return not5 =null; + + + RewriteRuleSubtreeStream stream_not=new RewriteRuleSubtreeStream(adaptor,"rule not"); + RewriteRuleSubtreeStream stream_clauseNear=new RewriteRuleSubtreeStream(adaptor,"rule clauseNear"); + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:116:3: ( (first= clauseNear -> $first) ( not others= clauseNear -> ^( OPERATOR[\"NOT\"] ( clauseNear )+ ) )* ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:116:5: (first= clauseNear -> $first) ( not others= clauseNear -> ^( OPERATOR[\"NOT\"] ( clauseNear )+ ) )* + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:116:5: (first= clauseNear -> $first) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:116:6: first= clauseNear + { + pushFollow(FOLLOW_clauseNear_in_clauseNot329); + first=clauseNear(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_clauseNear.add(first.getTree()); + + // AST REWRITE + // elements: first + // token labels: + // rule labels: retval, first + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + RewriteRuleSubtreeStream stream_first=new RewriteRuleSubtreeStream(adaptor,"rule first",first!=null?first.tree:null); + + root_0 = (Object)adaptor.nil(); + // 116:23: -> $first + { + adaptor.addChild(root_0, stream_first.nextTree()); + + } + + + retval.tree = root_0; + } + + } + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:116:34: ( not others= clauseNear -> ^( OPERATOR[\"NOT\"] ( clauseNear )+ ) )* + loop4: + do { + int alt4=2; + int LA4_0 = input.LA(1); + + if ( (LA4_0==AND) ) { + int LA4_1 = input.LA(2); + + if ( (LA4_1==NOT) ) { + alt4=1; + } + + + } + else if ( (LA4_0==NOT) ) { + alt4=1; + } + + + switch (alt4) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:116:35: not others= clauseNear + { + pushFollow(FOLLOW_not_in_clauseNot338); + not5=not(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_not.add(not5.getTree()); + + pushFollow(FOLLOW_clauseNear_in_clauseNot342); + others=clauseNear(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_clauseNear.add(others.getTree()); + + // AST REWRITE + // elements: clauseNear + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 116:57: -> ^( OPERATOR[\"NOT\"] ( clauseNear )+ ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:116:60: ^( OPERATOR[\"NOT\"] ( clauseNear )+ ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(OPERATOR, "NOT") + , root_1); + + if ( !(stream_clauseNear.hasNext()) ) { + throw new RewriteEarlyExitException(); + } + while ( stream_clauseNear.hasNext() ) { + adaptor.addChild(root_1, stream_clauseNear.nextTree()); + + } + stream_clauseNear.reset(); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + + default : + break loop4; + } + } while (true); + + + } + + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "clauseNot" + + + public static class clauseNear_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "clauseNear" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:119:1: clauseNear : (first= clauseBasic -> $first) ( near others= clauseBasic -> ^( near ( clauseBasic )+ ) )* ; + public final ExtendedLuceneGrammarParser.clauseNear_return clauseNear() throws RecognitionException { + ExtendedLuceneGrammarParser.clauseNear_return retval = new ExtendedLuceneGrammarParser.clauseNear_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + ExtendedLuceneGrammarParser.clauseBasic_return first =null; + + ExtendedLuceneGrammarParser.clauseBasic_return others =null; + + ExtendedLuceneGrammarParser.near_return near6 =null; + + + RewriteRuleSubtreeStream stream_clauseBasic=new RewriteRuleSubtreeStream(adaptor,"rule clauseBasic"); + RewriteRuleSubtreeStream stream_near=new RewriteRuleSubtreeStream(adaptor,"rule near"); + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:120:3: ( (first= clauseBasic -> $first) ( near others= clauseBasic -> ^( near ( clauseBasic )+ ) )* ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:120:5: (first= clauseBasic -> $first) ( near others= clauseBasic -> ^( near ( clauseBasic )+ ) )* + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:120:5: (first= clauseBasic -> $first) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:120:6: first= clauseBasic + { + pushFollow(FOLLOW_clauseBasic_in_clauseNear373); + first=clauseBasic(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_clauseBasic.add(first.getTree()); + + // AST REWRITE + // elements: first + // token labels: + // rule labels: retval, first + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + RewriteRuleSubtreeStream stream_first=new RewriteRuleSubtreeStream(adaptor,"rule first",first!=null?first.tree:null); + + root_0 = (Object)adaptor.nil(); + // 120:24: -> $first + { + adaptor.addChild(root_0, stream_first.nextTree()); + + } + + + retval.tree = root_0; + } + + } + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:120:35: ( near others= clauseBasic -> ^( near ( clauseBasic )+ ) )* + loop5: + do { + int alt5=2; + int LA5_0 = input.LA(1); + + if ( (LA5_0==NEAR) ) { + alt5=1; + } + + + switch (alt5) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:120:36: near others= clauseBasic + { + pushFollow(FOLLOW_near_in_clauseNear382); + near6=near(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_near.add(near6.getTree()); + + pushFollow(FOLLOW_clauseBasic_in_clauseNear386); + others=clauseBasic(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_clauseBasic.add(others.getTree()); + + // AST REWRITE + // elements: clauseBasic, near + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 120:60: -> ^( near ( clauseBasic )+ ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:120:63: ^( near ( clauseBasic )+ ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot(stream_near.nextNode(), root_1); + + if ( !(stream_clauseBasic.hasNext()) ) { + throw new RewriteEarlyExitException(); + } + while ( stream_clauseBasic.hasNext() ) { + adaptor.addChild(root_1, stream_clauseBasic.nextTree()); + + } + stream_clauseBasic.reset(); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + + default : + break loop5; + } + } while (true); + + + } + + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "clauseNear" + + + public static class clauseBasic_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "clauseBasic" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:123:1: clauseBasic : ( ( modifier LPAREN ( clauseOr )+ RPAREN )=> ( modifier )? LPAREN ( clauseOr )+ RPAREN ( term_modifier )? -> ^( CLAUSE ^( MODIFIER ( modifier )? ^( TMODIFIER ( term_modifier )? ^( OPERATOR[\"DEFOP\"] ( clauseOr )+ ) ) ) ) | ( LPAREN ( clauseOr )+ RPAREN term_modifier )=> ( modifier )? LPAREN ( clauseOr )+ RPAREN ( term_modifier )? -> ^( CLAUSE ^( MODIFIER ( modifier )? ^( TMODIFIER ( term_modifier )? ^( OPERATOR[\"DEFOP\"] ( clauseOr )+ ) ) ) ) | ( LPAREN )=> LPAREN ( clauseOr )+ RPAREN -> ( clauseOr )+ | atom ); + public final ExtendedLuceneGrammarParser.clauseBasic_return clauseBasic() throws RecognitionException { + ExtendedLuceneGrammarParser.clauseBasic_return retval = new ExtendedLuceneGrammarParser.clauseBasic_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + Token LPAREN8=null; + Token RPAREN10=null; + Token LPAREN13=null; + Token RPAREN15=null; + Token LPAREN17=null; + Token RPAREN19=null; + ExtendedLuceneGrammarParser.modifier_return modifier7 =null; + + ExtendedLuceneGrammarParser.clauseOr_return clauseOr9 =null; + + ExtendedLuceneGrammarParser.term_modifier_return term_modifier11 =null; + + ExtendedLuceneGrammarParser.modifier_return modifier12 =null; + + ExtendedLuceneGrammarParser.clauseOr_return clauseOr14 =null; + + ExtendedLuceneGrammarParser.term_modifier_return term_modifier16 =null; + + ExtendedLuceneGrammarParser.clauseOr_return clauseOr18 =null; + + ExtendedLuceneGrammarParser.atom_return atom20 =null; + + + Object LPAREN8_tree=null; + Object RPAREN10_tree=null; + Object LPAREN13_tree=null; + Object RPAREN15_tree=null; + Object LPAREN17_tree=null; + Object RPAREN19_tree=null; + RewriteRuleTokenStream stream_RPAREN=new RewriteRuleTokenStream(adaptor,"token RPAREN"); + RewriteRuleTokenStream stream_LPAREN=new RewriteRuleTokenStream(adaptor,"token LPAREN"); + RewriteRuleSubtreeStream stream_modifier=new RewriteRuleSubtreeStream(adaptor,"rule modifier"); + RewriteRuleSubtreeStream stream_term_modifier=new RewriteRuleSubtreeStream(adaptor,"rule term_modifier"); + RewriteRuleSubtreeStream stream_clauseOr=new RewriteRuleSubtreeStream(adaptor,"rule clauseOr"); + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:124:2: ( ( modifier LPAREN ( clauseOr )+ RPAREN )=> ( modifier )? LPAREN ( clauseOr )+ RPAREN ( term_modifier )? -> ^( CLAUSE ^( MODIFIER ( modifier )? ^( TMODIFIER ( term_modifier )? ^( OPERATOR[\"DEFOP\"] ( clauseOr )+ ) ) ) ) | ( LPAREN ( clauseOr )+ RPAREN term_modifier )=> ( modifier )? LPAREN ( clauseOr )+ RPAREN ( term_modifier )? -> ^( CLAUSE ^( MODIFIER ( modifier )? ^( TMODIFIER ( term_modifier )? ^( OPERATOR[\"DEFOP\"] ( clauseOr )+ ) ) ) ) | ( LPAREN )=> LPAREN ( clauseOr )+ RPAREN -> ( clauseOr )+ | atom ) + int alt13=4; + switch ( input.LA(1) ) { + case PLUS: + { + int LA13_1 = input.LA(2); + + if ( (synpred1_ExtendedLuceneGrammar()) ) { + alt13=1; + } + else if ( (synpred2_ExtendedLuceneGrammar()) ) { + alt13=2; + } + else if ( (true) ) { + alt13=4; + } + else { + if (state.backtracking>0) {state.failed=true; return retval;} + NoViableAltException nvae = + new NoViableAltException("", 13, 1, input); + + throw nvae; + + } + } + break; + case MINUS: + { + int LA13_2 = input.LA(2); + + if ( (synpred1_ExtendedLuceneGrammar()) ) { + alt13=1; + } + else if ( (synpred2_ExtendedLuceneGrammar()) ) { + alt13=2; + } + else if ( (true) ) { + alt13=4; + } + else { + if (state.backtracking>0) {state.failed=true; return retval;} + NoViableAltException nvae = + new NoViableAltException("", 13, 2, input); + + throw nvae; + + } + } + break; + case LPAREN: + { + int LA13_3 = input.LA(2); + + if ( (synpred1_ExtendedLuceneGrammar()) ) { + alt13=1; + } + else if ( (synpred2_ExtendedLuceneGrammar()) ) { + alt13=2; + } + else if ( (synpred3_ExtendedLuceneGrammar()) ) { + alt13=3; + } + else { + if (state.backtracking>0) {state.failed=true; return retval;} + NoViableAltException nvae = + new NoViableAltException("", 13, 3, input); + + throw nvae; + + } + } + break; + case LBRACK: + case LCURLY: + case NUMBER: + case PHRASE: + case PHRASE_ANYTHING: + case QMARK: + case STAR: + case TERM_NORMAL: + case TERM_TRUNCATED: + { + alt13=4; + } + break; + default: + if (state.backtracking>0) {state.failed=true; return retval;} + NoViableAltException nvae = + new NoViableAltException("", 13, 0, input); + + throw nvae; + + } + + switch (alt13) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:125:2: ( modifier LPAREN ( clauseOr )+ RPAREN )=> ( modifier )? LPAREN ( clauseOr )+ RPAREN ( term_modifier )? + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:125:40: ( modifier )? + int alt6=2; + int LA6_0 = input.LA(1); + + if ( (LA6_0==MINUS||LA6_0==PLUS) ) { + alt6=1; + } + switch (alt6) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:125:40: modifier + { + pushFollow(FOLLOW_modifier_in_clauseBasic427); + modifier7=modifier(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_modifier.add(modifier7.getTree()); + + } + break; + + } + + + LPAREN8=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_clauseBasic430); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_LPAREN.add(LPAREN8); + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:125:57: ( clauseOr )+ + int cnt7=0; + loop7: + do { + int alt7=2; + int LA7_0 = input.LA(1); + + if ( ((LA7_0 >= LBRACK && LA7_0 <= MINUS)||LA7_0==NUMBER||(LA7_0 >= PHRASE && LA7_0 <= PLUS)||LA7_0==QMARK||LA7_0==STAR||LA7_0==TERM_NORMAL||LA7_0==TERM_TRUNCATED) ) { + alt7=1; + } + + + switch (alt7) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:125:57: clauseOr + { + pushFollow(FOLLOW_clauseOr_in_clauseBasic432); + clauseOr9=clauseOr(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_clauseOr.add(clauseOr9.getTree()); + + } + break; + + default : + if ( cnt7 >= 1 ) break loop7; + if (state.backtracking>0) {state.failed=true; return retval;} + EarlyExitException eee = + new EarlyExitException(7, input); + throw eee; + } + cnt7++; + } while (true); + + + RPAREN10=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_clauseBasic435); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_RPAREN.add(RPAREN10); + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:125:74: ( term_modifier )? + int alt8=2; + int LA8_0 = input.LA(1); + + if ( (LA8_0==CARAT||LA8_0==TILDE) ) { + alt8=1; + } + switch (alt8) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:125:74: term_modifier + { + pushFollow(FOLLOW_term_modifier_in_clauseBasic437); + term_modifier11=term_modifier(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_term_modifier.add(term_modifier11.getTree()); + + } + break; + + } + + + // AST REWRITE + // elements: clauseOr, modifier, term_modifier + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 126:3: -> ^( CLAUSE ^( MODIFIER ( modifier )? ^( TMODIFIER ( term_modifier )? ^( OPERATOR[\"DEFOP\"] ( clauseOr )+ ) ) ) ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:126:6: ^( CLAUSE ^( MODIFIER ( modifier )? ^( TMODIFIER ( term_modifier )? ^( OPERATOR[\"DEFOP\"] ( clauseOr )+ ) ) ) ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(CLAUSE, "CLAUSE") + , root_1); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:126:15: ^( MODIFIER ( modifier )? ^( TMODIFIER ( term_modifier )? ^( OPERATOR[\"DEFOP\"] ( clauseOr )+ ) ) ) + { + Object root_2 = (Object)adaptor.nil(); + root_2 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(MODIFIER, "MODIFIER") + , root_2); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:126:26: ( modifier )? + if ( stream_modifier.hasNext() ) { + adaptor.addChild(root_2, stream_modifier.nextTree()); + + } + stream_modifier.reset(); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:126:36: ^( TMODIFIER ( term_modifier )? ^( OPERATOR[\"DEFOP\"] ( clauseOr )+ ) ) + { + Object root_3 = (Object)adaptor.nil(); + root_3 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(TMODIFIER, "TMODIFIER") + , root_3); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:126:48: ( term_modifier )? + if ( stream_term_modifier.hasNext() ) { + adaptor.addChild(root_3, stream_term_modifier.nextTree()); + + } + stream_term_modifier.reset(); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:126:63: ^( OPERATOR[\"DEFOP\"] ( clauseOr )+ ) + { + Object root_4 = (Object)adaptor.nil(); + root_4 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(OPERATOR, "DEFOP") + , root_4); + + if ( !(stream_clauseOr.hasNext()) ) { + throw new RewriteEarlyExitException(); + } + while ( stream_clauseOr.hasNext() ) { + adaptor.addChild(root_4, stream_clauseOr.nextTree()); + + } + stream_clauseOr.reset(); + + adaptor.addChild(root_3, root_4); + } + + adaptor.addChild(root_2, root_3); + } + + adaptor.addChild(root_1, root_2); + } + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + case 2 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:127:4: ( LPAREN ( clauseOr )+ RPAREN term_modifier )=> ( modifier )? LPAREN ( clauseOr )+ RPAREN ( term_modifier )? + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:127:46: ( modifier )? + int alt9=2; + int LA9_0 = input.LA(1); + + if ( (LA9_0==MINUS||LA9_0==PLUS) ) { + alt9=1; + } + switch (alt9) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:127:46: modifier + { + pushFollow(FOLLOW_modifier_in_clauseBasic487); + modifier12=modifier(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_modifier.add(modifier12.getTree()); + + } + break; + + } + + + LPAREN13=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_clauseBasic490); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_LPAREN.add(LPAREN13); + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:127:63: ( clauseOr )+ + int cnt10=0; + loop10: + do { + int alt10=2; + int LA10_0 = input.LA(1); + + if ( ((LA10_0 >= LBRACK && LA10_0 <= MINUS)||LA10_0==NUMBER||(LA10_0 >= PHRASE && LA10_0 <= PLUS)||LA10_0==QMARK||LA10_0==STAR||LA10_0==TERM_NORMAL||LA10_0==TERM_TRUNCATED) ) { + alt10=1; + } + + + switch (alt10) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:127:63: clauseOr + { + pushFollow(FOLLOW_clauseOr_in_clauseBasic492); + clauseOr14=clauseOr(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_clauseOr.add(clauseOr14.getTree()); + + } + break; + + default : + if ( cnt10 >= 1 ) break loop10; + if (state.backtracking>0) {state.failed=true; return retval;} + EarlyExitException eee = + new EarlyExitException(10, input); + throw eee; + } + cnt10++; + } while (true); + + + RPAREN15=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_clauseBasic495); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_RPAREN.add(RPAREN15); + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:127:80: ( term_modifier )? + int alt11=2; + int LA11_0 = input.LA(1); + + if ( (LA11_0==CARAT||LA11_0==TILDE) ) { + alt11=1; + } + switch (alt11) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:127:80: term_modifier + { + pushFollow(FOLLOW_term_modifier_in_clauseBasic497); + term_modifier16=term_modifier(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_term_modifier.add(term_modifier16.getTree()); + + } + break; + + } + + + // AST REWRITE + // elements: clauseOr, term_modifier, modifier + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 128:3: -> ^( CLAUSE ^( MODIFIER ( modifier )? ^( TMODIFIER ( term_modifier )? ^( OPERATOR[\"DEFOP\"] ( clauseOr )+ ) ) ) ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:128:6: ^( CLAUSE ^( MODIFIER ( modifier )? ^( TMODIFIER ( term_modifier )? ^( OPERATOR[\"DEFOP\"] ( clauseOr )+ ) ) ) ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(CLAUSE, "CLAUSE") + , root_1); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:128:15: ^( MODIFIER ( modifier )? ^( TMODIFIER ( term_modifier )? ^( OPERATOR[\"DEFOP\"] ( clauseOr )+ ) ) ) + { + Object root_2 = (Object)adaptor.nil(); + root_2 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(MODIFIER, "MODIFIER") + , root_2); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:128:26: ( modifier )? + if ( stream_modifier.hasNext() ) { + adaptor.addChild(root_2, stream_modifier.nextTree()); + + } + stream_modifier.reset(); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:128:36: ^( TMODIFIER ( term_modifier )? ^( OPERATOR[\"DEFOP\"] ( clauseOr )+ ) ) + { + Object root_3 = (Object)adaptor.nil(); + root_3 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(TMODIFIER, "TMODIFIER") + , root_3); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:128:48: ( term_modifier )? + if ( stream_term_modifier.hasNext() ) { + adaptor.addChild(root_3, stream_term_modifier.nextTree()); + + } + stream_term_modifier.reset(); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:128:63: ^( OPERATOR[\"DEFOP\"] ( clauseOr )+ ) + { + Object root_4 = (Object)adaptor.nil(); + root_4 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(OPERATOR, "DEFOP") + , root_4); + + if ( !(stream_clauseOr.hasNext()) ) { + throw new RewriteEarlyExitException(); + } + while ( stream_clauseOr.hasNext() ) { + adaptor.addChild(root_4, stream_clauseOr.nextTree()); + + } + stream_clauseOr.reset(); + + adaptor.addChild(root_3, root_4); + } + + adaptor.addChild(root_2, root_3); + } + + adaptor.addChild(root_1, root_2); + } + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + case 3 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:129:4: ( LPAREN )=> LPAREN ( clauseOr )+ RPAREN + { + LPAREN17=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_clauseBasic542); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_LPAREN.add(LPAREN17); + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:129:24: ( clauseOr )+ + int cnt12=0; + loop12: + do { + int alt12=2; + int LA12_0 = input.LA(1); + + if ( ((LA12_0 >= LBRACK && LA12_0 <= MINUS)||LA12_0==NUMBER||(LA12_0 >= PHRASE && LA12_0 <= PLUS)||LA12_0==QMARK||LA12_0==STAR||LA12_0==TERM_NORMAL||LA12_0==TERM_TRUNCATED) ) { + alt12=1; + } + + + switch (alt12) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:129:24: clauseOr + { + pushFollow(FOLLOW_clauseOr_in_clauseBasic544); + clauseOr18=clauseOr(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_clauseOr.add(clauseOr18.getTree()); + + } + break; + + default : + if ( cnt12 >= 1 ) break loop12; + if (state.backtracking>0) {state.failed=true; return retval;} + EarlyExitException eee = + new EarlyExitException(12, input); + throw eee; + } + cnt12++; + } while (true); + + + RPAREN19=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_clauseBasic547); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_RPAREN.add(RPAREN19); + + + // AST REWRITE + // elements: clauseOr + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 130:3: -> ( clauseOr )+ + { + if ( !(stream_clauseOr.hasNext()) ) { + throw new RewriteEarlyExitException(); + } + while ( stream_clauseOr.hasNext() ) { + adaptor.addChild(root_0, stream_clauseOr.nextTree()); + + } + stream_clauseOr.reset(); + + } + + + retval.tree = root_0; + } + + } + break; + case 4 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:131:4: atom + { + root_0 = (Object)adaptor.nil(); + + + pushFollow(FOLLOW_atom_in_clauseBasic559); + atom20=atom(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) adaptor.addChild(root_0, atom20.getTree()); + + } + break; + + } + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "clauseBasic" + + + public static class atom_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "atom" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:135:1: atom : ( ( modifier )? field multi_value ( term_modifier )? -> ^( CLAUSE ^( MODIFIER ( modifier )? ^( TMODIFIER ( term_modifier )? ^( FIELD field multi_value ) ) ) ) | ( modifier )? ( field )? value ( term_modifier )? -> ^( MODIFIER ( modifier )? ^( TMODIFIER ( term_modifier )? ^( FIELD ( field )? value ) ) ) ); + public final ExtendedLuceneGrammarParser.atom_return atom() throws RecognitionException { + ExtendedLuceneGrammarParser.atom_return retval = new ExtendedLuceneGrammarParser.atom_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + ExtendedLuceneGrammarParser.modifier_return modifier21 =null; + + ExtendedLuceneGrammarParser.field_return field22 =null; + + ExtendedLuceneGrammarParser.multi_value_return multi_value23 =null; + + ExtendedLuceneGrammarParser.term_modifier_return term_modifier24 =null; + + ExtendedLuceneGrammarParser.modifier_return modifier25 =null; + + ExtendedLuceneGrammarParser.field_return field26 =null; + + ExtendedLuceneGrammarParser.value_return value27 =null; + + ExtendedLuceneGrammarParser.term_modifier_return term_modifier28 =null; + + + RewriteRuleSubtreeStream stream_modifier=new RewriteRuleSubtreeStream(adaptor,"rule modifier"); + RewriteRuleSubtreeStream stream_field=new RewriteRuleSubtreeStream(adaptor,"rule field"); + RewriteRuleSubtreeStream stream_term_modifier=new RewriteRuleSubtreeStream(adaptor,"rule term_modifier"); + RewriteRuleSubtreeStream stream_value=new RewriteRuleSubtreeStream(adaptor,"rule value"); + RewriteRuleSubtreeStream stream_multi_value=new RewriteRuleSubtreeStream(adaptor,"rule multi_value"); + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:136:2: ( ( modifier )? field multi_value ( term_modifier )? -> ^( CLAUSE ^( MODIFIER ( modifier )? ^( TMODIFIER ( term_modifier )? ^( FIELD field multi_value ) ) ) ) | ( modifier )? ( field )? value ( term_modifier )? -> ^( MODIFIER ( modifier )? ^( TMODIFIER ( term_modifier )? ^( FIELD ( field )? value ) ) ) ) + int alt19=2; + switch ( input.LA(1) ) { + case PLUS: + { + int LA19_1 = input.LA(2); + + if ( (LA19_1==TERM_NORMAL) ) { + int LA19_3 = input.LA(3); + + if ( (LA19_3==COLON) ) { + int LA19_5 = input.LA(4); + + if ( (LA19_5==LPAREN) ) { + alt19=1; + } + else if ( ((LA19_5 >= LBRACK && LA19_5 <= LCURLY)||LA19_5==NUMBER||(LA19_5 >= PHRASE && LA19_5 <= PHRASE_ANYTHING)||LA19_5==QMARK||LA19_5==STAR||LA19_5==TERM_NORMAL||LA19_5==TERM_TRUNCATED) ) { + alt19=2; + } + else { + if (state.backtracking>0) {state.failed=true; return retval;} + NoViableAltException nvae = + new NoViableAltException("", 19, 5, input); + + throw nvae; + + } + } + else if ( (LA19_3==EOF||LA19_3==AND||LA19_3==CARAT||(LA19_3 >= LBRACK && LA19_3 <= MINUS)||(LA19_3 >= NEAR && LA19_3 <= NUMBER)||(LA19_3 >= OR && LA19_3 <= PLUS)||LA19_3==QMARK||LA19_3==RPAREN||LA19_3==STAR||LA19_3==TERM_NORMAL||(LA19_3 >= TERM_TRUNCATED && LA19_3 <= TILDE)) ) { + alt19=2; + } + else { + if (state.backtracking>0) {state.failed=true; return retval;} + NoViableAltException nvae = + new NoViableAltException("", 19, 3, input); + + throw nvae; + + } + } + else if ( ((LA19_1 >= LBRACK && LA19_1 <= LCURLY)||LA19_1==NUMBER||(LA19_1 >= PHRASE && LA19_1 <= PHRASE_ANYTHING)||LA19_1==QMARK||LA19_1==STAR||LA19_1==TERM_TRUNCATED) ) { + alt19=2; + } + else { + if (state.backtracking>0) {state.failed=true; return retval;} + NoViableAltException nvae = + new NoViableAltException("", 19, 1, input); + + throw nvae; + + } + } + break; + case MINUS: + { + int LA19_2 = input.LA(2); + + if ( (LA19_2==TERM_NORMAL) ) { + int LA19_3 = input.LA(3); + + if ( (LA19_3==COLON) ) { + int LA19_5 = input.LA(4); + + if ( (LA19_5==LPAREN) ) { + alt19=1; + } + else if ( ((LA19_5 >= LBRACK && LA19_5 <= LCURLY)||LA19_5==NUMBER||(LA19_5 >= PHRASE && LA19_5 <= PHRASE_ANYTHING)||LA19_5==QMARK||LA19_5==STAR||LA19_5==TERM_NORMAL||LA19_5==TERM_TRUNCATED) ) { + alt19=2; + } + else { + if (state.backtracking>0) {state.failed=true; return retval;} + NoViableAltException nvae = + new NoViableAltException("", 19, 5, input); + + throw nvae; + + } + } + else if ( (LA19_3==EOF||LA19_3==AND||LA19_3==CARAT||(LA19_3 >= LBRACK && LA19_3 <= MINUS)||(LA19_3 >= NEAR && LA19_3 <= NUMBER)||(LA19_3 >= OR && LA19_3 <= PLUS)||LA19_3==QMARK||LA19_3==RPAREN||LA19_3==STAR||LA19_3==TERM_NORMAL||(LA19_3 >= TERM_TRUNCATED && LA19_3 <= TILDE)) ) { + alt19=2; + } + else { + if (state.backtracking>0) {state.failed=true; return retval;} + NoViableAltException nvae = + new NoViableAltException("", 19, 3, input); + + throw nvae; + + } + } + else if ( ((LA19_2 >= LBRACK && LA19_2 <= LCURLY)||LA19_2==NUMBER||(LA19_2 >= PHRASE && LA19_2 <= PHRASE_ANYTHING)||LA19_2==QMARK||LA19_2==STAR||LA19_2==TERM_TRUNCATED) ) { + alt19=2; + } + else { + if (state.backtracking>0) {state.failed=true; return retval;} + NoViableAltException nvae = + new NoViableAltException("", 19, 2, input); + + throw nvae; + + } + } + break; + case TERM_NORMAL: + { + int LA19_3 = input.LA(2); + + if ( (LA19_3==COLON) ) { + int LA19_5 = input.LA(3); + + if ( (LA19_5==LPAREN) ) { + alt19=1; + } + else if ( ((LA19_5 >= LBRACK && LA19_5 <= LCURLY)||LA19_5==NUMBER||(LA19_5 >= PHRASE && LA19_5 <= PHRASE_ANYTHING)||LA19_5==QMARK||LA19_5==STAR||LA19_5==TERM_NORMAL||LA19_5==TERM_TRUNCATED) ) { + alt19=2; + } + else { + if (state.backtracking>0) {state.failed=true; return retval;} + NoViableAltException nvae = + new NoViableAltException("", 19, 5, input); + + throw nvae; + + } + } + else if ( (LA19_3==EOF||LA19_3==AND||LA19_3==CARAT||(LA19_3 >= LBRACK && LA19_3 <= MINUS)||(LA19_3 >= NEAR && LA19_3 <= NUMBER)||(LA19_3 >= OR && LA19_3 <= PLUS)||LA19_3==QMARK||LA19_3==RPAREN||LA19_3==STAR||LA19_3==TERM_NORMAL||(LA19_3 >= TERM_TRUNCATED && LA19_3 <= TILDE)) ) { + alt19=2; + } + else { + if (state.backtracking>0) {state.failed=true; return retval;} + NoViableAltException nvae = + new NoViableAltException("", 19, 3, input); + + throw nvae; + + } + } + break; + case LBRACK: + case LCURLY: + case NUMBER: + case PHRASE: + case PHRASE_ANYTHING: + case QMARK: + case STAR: + case TERM_TRUNCATED: + { + alt19=2; + } + break; + default: + if (state.backtracking>0) {state.failed=true; return retval;} + NoViableAltException nvae = + new NoViableAltException("", 19, 0, input); + + throw nvae; + + } + + switch (alt19) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:137:2: ( modifier )? field multi_value ( term_modifier )? + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:137:2: ( modifier )? + int alt14=2; + int LA14_0 = input.LA(1); + + if ( (LA14_0==MINUS||LA14_0==PLUS) ) { + alt14=1; + } + switch (alt14) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:137:2: modifier + { + pushFollow(FOLLOW_modifier_in_atom580); + modifier21=modifier(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_modifier.add(modifier21.getTree()); + + } + break; + + } + + + pushFollow(FOLLOW_field_in_atom583); + field22=field(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_field.add(field22.getTree()); + + pushFollow(FOLLOW_multi_value_in_atom585); + multi_value23=multi_value(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_multi_value.add(multi_value23.getTree()); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:137:30: ( term_modifier )? + int alt15=2; + int LA15_0 = input.LA(1); + + if ( (LA15_0==CARAT||LA15_0==TILDE) ) { + alt15=1; + } + switch (alt15) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:137:30: term_modifier + { + pushFollow(FOLLOW_term_modifier_in_atom587); + term_modifier24=term_modifier(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_term_modifier.add(term_modifier24.getTree()); + + } + break; + + } + + + // AST REWRITE + // elements: modifier, term_modifier, multi_value, field + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 138:3: -> ^( CLAUSE ^( MODIFIER ( modifier )? ^( TMODIFIER ( term_modifier )? ^( FIELD field multi_value ) ) ) ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:138:6: ^( CLAUSE ^( MODIFIER ( modifier )? ^( TMODIFIER ( term_modifier )? ^( FIELD field multi_value ) ) ) ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(CLAUSE, "CLAUSE") + , root_1); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:138:15: ^( MODIFIER ( modifier )? ^( TMODIFIER ( term_modifier )? ^( FIELD field multi_value ) ) ) + { + Object root_2 = (Object)adaptor.nil(); + root_2 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(MODIFIER, "MODIFIER") + , root_2); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:138:26: ( modifier )? + if ( stream_modifier.hasNext() ) { + adaptor.addChild(root_2, stream_modifier.nextTree()); + + } + stream_modifier.reset(); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:138:36: ^( TMODIFIER ( term_modifier )? ^( FIELD field multi_value ) ) + { + Object root_3 = (Object)adaptor.nil(); + root_3 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(TMODIFIER, "TMODIFIER") + , root_3); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:138:48: ( term_modifier )? + if ( stream_term_modifier.hasNext() ) { + adaptor.addChild(root_3, stream_term_modifier.nextTree()); + + } + stream_term_modifier.reset(); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:138:63: ^( FIELD field multi_value ) + { + Object root_4 = (Object)adaptor.nil(); + root_4 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(FIELD, "FIELD") + , root_4); + + adaptor.addChild(root_4, stream_field.nextTree()); + + adaptor.addChild(root_4, stream_multi_value.nextTree()); + + adaptor.addChild(root_3, root_4); + } + + adaptor.addChild(root_2, root_3); + } + + adaptor.addChild(root_1, root_2); + } + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + case 2 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:139:4: ( modifier )? ( field )? value ( term_modifier )? + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:139:4: ( modifier )? + int alt16=2; + int LA16_0 = input.LA(1); + + if ( (LA16_0==MINUS||LA16_0==PLUS) ) { + alt16=1; + } + switch (alt16) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:139:4: modifier + { + pushFollow(FOLLOW_modifier_in_atom623); + modifier25=modifier(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_modifier.add(modifier25.getTree()); + + } + break; + + } + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:139:14: ( field )? + int alt17=2; + int LA17_0 = input.LA(1); + + if ( (LA17_0==TERM_NORMAL) ) { + int LA17_1 = input.LA(2); + + if ( (LA17_1==COLON) ) { + alt17=1; + } + } + switch (alt17) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:139:14: field + { + pushFollow(FOLLOW_field_in_atom626); + field26=field(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_field.add(field26.getTree()); + + } + break; + + } + + + pushFollow(FOLLOW_value_in_atom629); + value27=value(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_value.add(value27.getTree()); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:139:27: ( term_modifier )? + int alt18=2; + int LA18_0 = input.LA(1); + + if ( (LA18_0==CARAT||LA18_0==TILDE) ) { + alt18=1; + } + switch (alt18) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:139:27: term_modifier + { + pushFollow(FOLLOW_term_modifier_in_atom631); + term_modifier28=term_modifier(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_term_modifier.add(term_modifier28.getTree()); + + } + break; + + } + + + // AST REWRITE + // elements: field, modifier, value, term_modifier + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 140:3: -> ^( MODIFIER ( modifier )? ^( TMODIFIER ( term_modifier )? ^( FIELD ( field )? value ) ) ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:140:6: ^( MODIFIER ( modifier )? ^( TMODIFIER ( term_modifier )? ^( FIELD ( field )? value ) ) ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(MODIFIER, "MODIFIER") + , root_1); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:140:17: ( modifier )? + if ( stream_modifier.hasNext() ) { + adaptor.addChild(root_1, stream_modifier.nextTree()); + + } + stream_modifier.reset(); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:140:27: ^( TMODIFIER ( term_modifier )? ^( FIELD ( field )? value ) ) + { + Object root_2 = (Object)adaptor.nil(); + root_2 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(TMODIFIER, "TMODIFIER") + , root_2); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:140:39: ( term_modifier )? + if ( stream_term_modifier.hasNext() ) { + adaptor.addChild(root_2, stream_term_modifier.nextTree()); + + } + stream_term_modifier.reset(); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:140:54: ^( FIELD ( field )? value ) + { + Object root_3 = (Object)adaptor.nil(); + root_3 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(FIELD, "FIELD") + , root_3); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:140:62: ( field )? + if ( stream_field.hasNext() ) { + adaptor.addChild(root_3, stream_field.nextTree()); + + } + stream_field.reset(); + + adaptor.addChild(root_3, stream_value.nextTree()); + + adaptor.addChild(root_2, root_3); + } + + adaptor.addChild(root_1, root_2); + } + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + + } + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "atom" + + + public static class field_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "field" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:144:1: field : TERM_NORMAL COLON -> TERM_NORMAL ; + public final ExtendedLuceneGrammarParser.field_return field() throws RecognitionException { + ExtendedLuceneGrammarParser.field_return retval = new ExtendedLuceneGrammarParser.field_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + Token TERM_NORMAL29=null; + Token COLON30=null; + + Object TERM_NORMAL29_tree=null; + Object COLON30_tree=null; + RewriteRuleTokenStream stream_COLON=new RewriteRuleTokenStream(adaptor,"token COLON"); + RewriteRuleTokenStream stream_TERM_NORMAL=new RewriteRuleTokenStream(adaptor,"token TERM_NORMAL"); + + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:145:2: ( TERM_NORMAL COLON -> TERM_NORMAL ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:146:2: TERM_NORMAL COLON + { + TERM_NORMAL29=(Token)match(input,TERM_NORMAL,FOLLOW_TERM_NORMAL_in_field678); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_TERM_NORMAL.add(TERM_NORMAL29); + + + COLON30=(Token)match(input,COLON,FOLLOW_COLON_in_field680); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_COLON.add(COLON30); + + + // AST REWRITE + // elements: TERM_NORMAL + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 146:20: -> TERM_NORMAL + { + adaptor.addChild(root_0, + stream_TERM_NORMAL.nextNode() + ); + + } + + + retval.tree = root_0; + } + + } + + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "field" + + + public static class value_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "value" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:149:1: value : ( range_term_in -> ^( QRANGEIN range_term_in ) | range_term_ex -> ^( QRANGEEX range_term_ex ) | normal -> ^( QNORMAL normal ) | truncated -> ^( QTRUNCATED truncated ) | quoted -> ^( QPHRASE quoted ) | quoted_truncated -> ^( QPHRASETRUNC quoted_truncated ) | QMARK -> ^( QTRUNCATED QMARK ) | STAR COLON b= STAR -> ^( QANYTHING $b) | STAR -> ^( QTRUNCATED STAR ) ); + public final ExtendedLuceneGrammarParser.value_return value() throws RecognitionException { + ExtendedLuceneGrammarParser.value_return retval = new ExtendedLuceneGrammarParser.value_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + Token b=null; + Token QMARK37=null; + Token STAR38=null; + Token COLON39=null; + Token STAR40=null; + ExtendedLuceneGrammarParser.range_term_in_return range_term_in31 =null; + + ExtendedLuceneGrammarParser.range_term_ex_return range_term_ex32 =null; + + ExtendedLuceneGrammarParser.normal_return normal33 =null; + + ExtendedLuceneGrammarParser.truncated_return truncated34 =null; + + ExtendedLuceneGrammarParser.quoted_return quoted35 =null; + + ExtendedLuceneGrammarParser.quoted_truncated_return quoted_truncated36 =null; + + + Object b_tree=null; + Object QMARK37_tree=null; + Object STAR38_tree=null; + Object COLON39_tree=null; + Object STAR40_tree=null; + RewriteRuleTokenStream stream_COLON=new RewriteRuleTokenStream(adaptor,"token COLON"); + RewriteRuleTokenStream stream_STAR=new RewriteRuleTokenStream(adaptor,"token STAR"); + RewriteRuleTokenStream stream_QMARK=new RewriteRuleTokenStream(adaptor,"token QMARK"); + RewriteRuleSubtreeStream stream_range_term_ex=new RewriteRuleSubtreeStream(adaptor,"rule range_term_ex"); + RewriteRuleSubtreeStream stream_normal=new RewriteRuleSubtreeStream(adaptor,"rule normal"); + RewriteRuleSubtreeStream stream_quoted=new RewriteRuleSubtreeStream(adaptor,"rule quoted"); + RewriteRuleSubtreeStream stream_quoted_truncated=new RewriteRuleSubtreeStream(adaptor,"rule quoted_truncated"); + RewriteRuleSubtreeStream stream_truncated=new RewriteRuleSubtreeStream(adaptor,"rule truncated"); + RewriteRuleSubtreeStream stream_range_term_in=new RewriteRuleSubtreeStream(adaptor,"rule range_term_in"); + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:150:2: ( range_term_in -> ^( QRANGEIN range_term_in ) | range_term_ex -> ^( QRANGEEX range_term_ex ) | normal -> ^( QNORMAL normal ) | truncated -> ^( QTRUNCATED truncated ) | quoted -> ^( QPHRASE quoted ) | quoted_truncated -> ^( QPHRASETRUNC quoted_truncated ) | QMARK -> ^( QTRUNCATED QMARK ) | STAR COLON b= STAR -> ^( QANYTHING $b) | STAR -> ^( QTRUNCATED STAR ) ) + int alt20=9; + switch ( input.LA(1) ) { + case LBRACK: + { + alt20=1; + } + break; + case LCURLY: + { + alt20=2; + } + break; + case NUMBER: + case TERM_NORMAL: + { + alt20=3; + } + break; + case TERM_TRUNCATED: + { + alt20=4; + } + break; + case PHRASE: + { + alt20=5; + } + break; + case PHRASE_ANYTHING: + { + alt20=6; + } + break; + case QMARK: + { + alt20=7; + } + break; + case STAR: + { + int LA20_8 = input.LA(2); + + if ( (LA20_8==COLON) ) { + alt20=8; + } + else if ( (LA20_8==EOF||LA20_8==AND||LA20_8==CARAT||(LA20_8 >= LBRACK && LA20_8 <= MINUS)||(LA20_8 >= NEAR && LA20_8 <= NUMBER)||(LA20_8 >= OR && LA20_8 <= PLUS)||LA20_8==QMARK||LA20_8==RPAREN||LA20_8==STAR||LA20_8==TERM_NORMAL||(LA20_8 >= TERM_TRUNCATED && LA20_8 <= TILDE)) ) { + alt20=9; + } + else { + if (state.backtracking>0) {state.failed=true; return retval;} + NoViableAltException nvae = + new NoViableAltException("", 20, 8, input); + + throw nvae; + + } + } + break; + default: + if (state.backtracking>0) {state.failed=true; return retval;} + NoViableAltException nvae = + new NoViableAltException("", 20, 0, input); + + throw nvae; + + } + + switch (alt20) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:151:2: range_term_in + { + pushFollow(FOLLOW_range_term_in_in_value699); + range_term_in31=range_term_in(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_range_term_in.add(range_term_in31.getTree()); + + // AST REWRITE + // elements: range_term_in + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 151:16: -> ^( QRANGEIN range_term_in ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:151:19: ^( QRANGEIN range_term_in ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(QRANGEIN, "QRANGEIN") + , root_1); + + adaptor.addChild(root_1, stream_range_term_in.nextTree()); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + case 2 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:152:4: range_term_ex + { + pushFollow(FOLLOW_range_term_ex_in_value712); + range_term_ex32=range_term_ex(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_range_term_ex.add(range_term_ex32.getTree()); + + // AST REWRITE + // elements: range_term_ex + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 152:18: -> ^( QRANGEEX range_term_ex ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:152:21: ^( QRANGEEX range_term_ex ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(QRANGEEX, "QRANGEEX") + , root_1); + + adaptor.addChild(root_1, stream_range_term_ex.nextTree()); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + case 3 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:153:4: normal + { + pushFollow(FOLLOW_normal_in_value726); + normal33=normal(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_normal.add(normal33.getTree()); + + // AST REWRITE + // elements: normal + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 153:11: -> ^( QNORMAL normal ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:153:14: ^( QNORMAL normal ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(QNORMAL, "QNORMAL") + , root_1); + + adaptor.addChild(root_1, stream_normal.nextTree()); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + case 4 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:154:4: truncated + { + pushFollow(FOLLOW_truncated_in_value740); + truncated34=truncated(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_truncated.add(truncated34.getTree()); + + // AST REWRITE + // elements: truncated + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 154:14: -> ^( QTRUNCATED truncated ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:154:17: ^( QTRUNCATED truncated ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(QTRUNCATED, "QTRUNCATED") + , root_1); + + adaptor.addChild(root_1, stream_truncated.nextTree()); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + case 5 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:155:4: quoted + { + pushFollow(FOLLOW_quoted_in_value754); + quoted35=quoted(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_quoted.add(quoted35.getTree()); + + // AST REWRITE + // elements: quoted + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 155:11: -> ^( QPHRASE quoted ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:155:14: ^( QPHRASE quoted ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(QPHRASE, "QPHRASE") + , root_1); + + adaptor.addChild(root_1, stream_quoted.nextTree()); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + case 6 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:156:4: quoted_truncated + { + pushFollow(FOLLOW_quoted_truncated_in_value767); + quoted_truncated36=quoted_truncated(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_quoted_truncated.add(quoted_truncated36.getTree()); + + // AST REWRITE + // elements: quoted_truncated + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 156:21: -> ^( QPHRASETRUNC quoted_truncated ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:156:24: ^( QPHRASETRUNC quoted_truncated ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(QPHRASETRUNC, "QPHRASETRUNC") + , root_1); + + adaptor.addChild(root_1, stream_quoted_truncated.nextTree()); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + case 7 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:157:4: QMARK + { + QMARK37=(Token)match(input,QMARK,FOLLOW_QMARK_in_value780); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_QMARK.add(QMARK37); + + + // AST REWRITE + // elements: QMARK + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 157:10: -> ^( QTRUNCATED QMARK ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:157:13: ^( QTRUNCATED QMARK ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(QTRUNCATED, "QTRUNCATED") + , root_1); + + adaptor.addChild(root_1, + stream_QMARK.nextNode() + ); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + case 8 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:158:4: STAR COLON b= STAR + { + STAR38=(Token)match(input,STAR,FOLLOW_STAR_in_value793); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_STAR.add(STAR38); + + + COLON39=(Token)match(input,COLON,FOLLOW_COLON_in_value795); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_COLON.add(COLON39); + + + b=(Token)match(input,STAR,FOLLOW_STAR_in_value799); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_STAR.add(b); + + + // AST REWRITE + // elements: b + // token labels: b + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleTokenStream stream_b=new RewriteRuleTokenStream(adaptor,"token b",b); + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 158:22: -> ^( QANYTHING $b) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:158:25: ^( QANYTHING $b) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(QANYTHING, "QANYTHING") + , root_1); + + adaptor.addChild(root_1, stream_b.nextNode()); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + case 9 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:159:5: STAR + { + STAR40=(Token)match(input,STAR,FOLLOW_STAR_in_value814); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_STAR.add(STAR40); + + + // AST REWRITE + // elements: STAR + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 159:10: -> ^( QTRUNCATED STAR ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:159:13: ^( QTRUNCATED STAR ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(QTRUNCATED, "QTRUNCATED") + , root_1); + + adaptor.addChild(root_1, + stream_STAR.nextNode() + ); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + + } + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "value" + + + public static class range_term_in_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "range_term_in" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:164:1: range_term_in : LBRACK (a= range_value -> range_value ^( QANYTHING QANYTHING[\"*\"] ) ) ( ( TO )? b= range_value -> $a ( $b)? )? RBRACK ; + public final ExtendedLuceneGrammarParser.range_term_in_return range_term_in() throws RecognitionException { + ExtendedLuceneGrammarParser.range_term_in_return retval = new ExtendedLuceneGrammarParser.range_term_in_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + Token LBRACK41=null; + Token TO42=null; + Token RBRACK43=null; + ExtendedLuceneGrammarParser.range_value_return a =null; + + ExtendedLuceneGrammarParser.range_value_return b =null; + + + Object LBRACK41_tree=null; + Object TO42_tree=null; + Object RBRACK43_tree=null; + RewriteRuleTokenStream stream_RBRACK=new RewriteRuleTokenStream(adaptor,"token RBRACK"); + RewriteRuleTokenStream stream_LBRACK=new RewriteRuleTokenStream(adaptor,"token LBRACK"); + RewriteRuleTokenStream stream_TO=new RewriteRuleTokenStream(adaptor,"token TO"); + RewriteRuleSubtreeStream stream_range_value=new RewriteRuleSubtreeStream(adaptor,"rule range_value"); + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:165:2: ( LBRACK (a= range_value -> range_value ^( QANYTHING QANYTHING[\"*\"] ) ) ( ( TO )? b= range_value -> $a ( $b)? )? RBRACK ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:166:8: LBRACK (a= range_value -> range_value ^( QANYTHING QANYTHING[\"*\"] ) ) ( ( TO )? b= range_value -> $a ( $b)? )? RBRACK + { + LBRACK41=(Token)match(input,LBRACK,FOLLOW_LBRACK_in_range_term_in845); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_LBRACK.add(LBRACK41); + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:167:8: (a= range_value -> range_value ^( QANYTHING QANYTHING[\"*\"] ) ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:167:9: a= range_value + { + pushFollow(FOLLOW_range_value_in_range_term_in857); + a=range_value(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_range_value.add(a.getTree()); + + // AST REWRITE + // elements: range_value + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 167:23: -> range_value ^( QANYTHING QANYTHING[\"*\"] ) + { + adaptor.addChild(root_0, stream_range_value.nextTree()); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:167:38: ^( QANYTHING QANYTHING[\"*\"] ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(QANYTHING, "QANYTHING") + , root_1); + + adaptor.addChild(root_1, + (Object)adaptor.create(QANYTHING, "*") + ); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:168:8: ( ( TO )? b= range_value -> $a ( $b)? )? + int alt22=2; + int LA22_0 = input.LA(1); + + if ( (LA22_0==DATE_TOKEN||LA22_0==NUMBER||(LA22_0 >= PHRASE && LA22_0 <= PHRASE_ANYTHING)||LA22_0==STAR||LA22_0==TERM_NORMAL||LA22_0==TERM_TRUNCATED||LA22_0==TO) ) { + alt22=1; + } + switch (alt22) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:168:10: ( TO )? b= range_value + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:168:10: ( TO )? + int alt21=2; + int LA21_0 = input.LA(1); + + if ( (LA21_0==TO) ) { + alt21=1; + } + switch (alt21) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:168:10: TO + { + TO42=(Token)match(input,TO,FOLLOW_TO_in_range_term_in880); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_TO.add(TO42); + + + } + break; + + } + + + pushFollow(FOLLOW_range_value_in_range_term_in885); + b=range_value(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_range_value.add(b.getTree()); + + // AST REWRITE + // elements: a, b + // token labels: + // rule labels: retval, b, a + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + RewriteRuleSubtreeStream stream_b=new RewriteRuleSubtreeStream(adaptor,"rule b",b!=null?b.tree:null); + RewriteRuleSubtreeStream stream_a=new RewriteRuleSubtreeStream(adaptor,"rule a",a!=null?a.tree:null); + + root_0 = (Object)adaptor.nil(); + // 168:28: -> $a ( $b)? + { + adaptor.addChild(root_0, stream_a.nextTree()); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:168:35: ( $b)? + if ( stream_b.hasNext() ) { + adaptor.addChild(root_0, stream_b.nextTree()); + + } + stream_b.reset(); + + } + + + retval.tree = root_0; + } + + } + break; + + } + + + RBRACK43=(Token)match(input,RBRACK,FOLLOW_RBRACK_in_range_term_in906); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_RBRACK.add(RBRACK43); + + + } + + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "range_term_in" + + + public static class range_term_ex_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "range_term_ex" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:173:1: range_term_ex : LCURLY (a= range_value -> range_value ^( QANYTHING QANYTHING[\"*\"] ) ) ( ( TO )? b= range_value -> $a ( $b)? )? RCURLY ; + public final ExtendedLuceneGrammarParser.range_term_ex_return range_term_ex() throws RecognitionException { + ExtendedLuceneGrammarParser.range_term_ex_return retval = new ExtendedLuceneGrammarParser.range_term_ex_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + Token LCURLY44=null; + Token TO45=null; + Token RCURLY46=null; + ExtendedLuceneGrammarParser.range_value_return a =null; + + ExtendedLuceneGrammarParser.range_value_return b =null; + + + Object LCURLY44_tree=null; + Object TO45_tree=null; + Object RCURLY46_tree=null; + RewriteRuleTokenStream stream_LCURLY=new RewriteRuleTokenStream(adaptor,"token LCURLY"); + RewriteRuleTokenStream stream_TO=new RewriteRuleTokenStream(adaptor,"token TO"); + RewriteRuleTokenStream stream_RCURLY=new RewriteRuleTokenStream(adaptor,"token RCURLY"); + RewriteRuleSubtreeStream stream_range_value=new RewriteRuleSubtreeStream(adaptor,"rule range_value"); + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:174:2: ( LCURLY (a= range_value -> range_value ^( QANYTHING QANYTHING[\"*\"] ) ) ( ( TO )? b= range_value -> $a ( $b)? )? RCURLY ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:175:8: LCURLY (a= range_value -> range_value ^( QANYTHING QANYTHING[\"*\"] ) ) ( ( TO )? b= range_value -> $a ( $b)? )? RCURLY + { + LCURLY44=(Token)match(input,LCURLY,FOLLOW_LCURLY_in_range_term_ex926); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_LCURLY.add(LCURLY44); + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:176:8: (a= range_value -> range_value ^( QANYTHING QANYTHING[\"*\"] ) ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:176:10: a= range_value + { + pushFollow(FOLLOW_range_value_in_range_term_ex939); + a=range_value(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_range_value.add(a.getTree()); + + // AST REWRITE + // elements: range_value + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 176:24: -> range_value ^( QANYTHING QANYTHING[\"*\"] ) + { + adaptor.addChild(root_0, stream_range_value.nextTree()); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:176:39: ^( QANYTHING QANYTHING[\"*\"] ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(QANYTHING, "QANYTHING") + , root_1); + + adaptor.addChild(root_1, + (Object)adaptor.create(QANYTHING, "*") + ); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:177:8: ( ( TO )? b= range_value -> $a ( $b)? )? + int alt24=2; + int LA24_0 = input.LA(1); + + if ( (LA24_0==DATE_TOKEN||LA24_0==NUMBER||(LA24_0 >= PHRASE && LA24_0 <= PHRASE_ANYTHING)||LA24_0==STAR||LA24_0==TERM_NORMAL||LA24_0==TERM_TRUNCATED||LA24_0==TO) ) { + alt24=1; + } + switch (alt24) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:177:10: ( TO )? b= range_value + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:177:10: ( TO )? + int alt23=2; + int LA23_0 = input.LA(1); + + if ( (LA23_0==TO) ) { + alt23=1; + } + switch (alt23) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:177:10: TO + { + TO45=(Token)match(input,TO,FOLLOW_TO_in_range_term_ex962); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_TO.add(TO45); + + + } + break; + + } + + + pushFollow(FOLLOW_range_value_in_range_term_ex967); + b=range_value(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_range_value.add(b.getTree()); + + // AST REWRITE + // elements: a, b + // token labels: + // rule labels: retval, b, a + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + RewriteRuleSubtreeStream stream_b=new RewriteRuleSubtreeStream(adaptor,"rule b",b!=null?b.tree:null); + RewriteRuleSubtreeStream stream_a=new RewriteRuleSubtreeStream(adaptor,"rule a",a!=null?a.tree:null); + + root_0 = (Object)adaptor.nil(); + // 177:28: -> $a ( $b)? + { + adaptor.addChild(root_0, stream_a.nextTree()); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:177:35: ( $b)? + if ( stream_b.hasNext() ) { + adaptor.addChild(root_0, stream_b.nextTree()); + + } + stream_b.reset(); + + } + + + retval.tree = root_0; + } + + } + break; + + } + + + RCURLY46=(Token)match(input,RCURLY,FOLLOW_RCURLY_in_range_term_ex988); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_RCURLY.add(RCURLY46); + + + } + + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "range_term_ex" + + + public static class range_value_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "range_value" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:181:1: range_value : ( truncated -> ^( QTRUNCATED truncated ) | quoted -> ^( QPHRASE quoted ) | quoted_truncated -> ^( QPHRASETRUNC quoted_truncated ) | date -> ^( QNORMAL date ) | normal -> ^( QNORMAL normal ) | STAR -> ^( QANYTHING STAR ) ); + public final ExtendedLuceneGrammarParser.range_value_return range_value() throws RecognitionException { + ExtendedLuceneGrammarParser.range_value_return retval = new ExtendedLuceneGrammarParser.range_value_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + Token STAR52=null; + ExtendedLuceneGrammarParser.truncated_return truncated47 =null; + + ExtendedLuceneGrammarParser.quoted_return quoted48 =null; + + ExtendedLuceneGrammarParser.quoted_truncated_return quoted_truncated49 =null; + + ExtendedLuceneGrammarParser.date_return date50 =null; + + ExtendedLuceneGrammarParser.normal_return normal51 =null; + + + Object STAR52_tree=null; + RewriteRuleTokenStream stream_STAR=new RewriteRuleTokenStream(adaptor,"token STAR"); + RewriteRuleSubtreeStream stream_normal=new RewriteRuleSubtreeStream(adaptor,"rule normal"); + RewriteRuleSubtreeStream stream_quoted=new RewriteRuleSubtreeStream(adaptor,"rule quoted"); + RewriteRuleSubtreeStream stream_quoted_truncated=new RewriteRuleSubtreeStream(adaptor,"rule quoted_truncated"); + RewriteRuleSubtreeStream stream_truncated=new RewriteRuleSubtreeStream(adaptor,"rule truncated"); + RewriteRuleSubtreeStream stream_date=new RewriteRuleSubtreeStream(adaptor,"rule date"); + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:182:2: ( truncated -> ^( QTRUNCATED truncated ) | quoted -> ^( QPHRASE quoted ) | quoted_truncated -> ^( QPHRASETRUNC quoted_truncated ) | date -> ^( QNORMAL date ) | normal -> ^( QNORMAL normal ) | STAR -> ^( QANYTHING STAR ) ) + int alt25=6; + switch ( input.LA(1) ) { + case TERM_TRUNCATED: + { + alt25=1; + } + break; + case PHRASE: + { + alt25=2; + } + break; + case PHRASE_ANYTHING: + { + alt25=3; + } + break; + case DATE_TOKEN: + { + alt25=4; + } + break; + case NUMBER: + case TERM_NORMAL: + { + alt25=5; + } + break; + case STAR: + { + alt25=6; + } + break; + default: + if (state.backtracking>0) {state.failed=true; return retval;} + NoViableAltException nvae = + new NoViableAltException("", 25, 0, input); + + throw nvae; + + } + + switch (alt25) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:183:2: truncated + { + pushFollow(FOLLOW_truncated_in_range_value1002); + truncated47=truncated(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_truncated.add(truncated47.getTree()); + + // AST REWRITE + // elements: truncated + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 183:12: -> ^( QTRUNCATED truncated ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:183:15: ^( QTRUNCATED truncated ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(QTRUNCATED, "QTRUNCATED") + , root_1); + + adaptor.addChild(root_1, stream_truncated.nextTree()); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + case 2 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:184:4: quoted + { + pushFollow(FOLLOW_quoted_in_range_value1015); + quoted48=quoted(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_quoted.add(quoted48.getTree()); + + // AST REWRITE + // elements: quoted + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 184:11: -> ^( QPHRASE quoted ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:184:14: ^( QPHRASE quoted ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(QPHRASE, "QPHRASE") + , root_1); + + adaptor.addChild(root_1, stream_quoted.nextTree()); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + case 3 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:185:4: quoted_truncated + { + pushFollow(FOLLOW_quoted_truncated_in_range_value1028); + quoted_truncated49=quoted_truncated(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_quoted_truncated.add(quoted_truncated49.getTree()); + + // AST REWRITE + // elements: quoted_truncated + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 185:21: -> ^( QPHRASETRUNC quoted_truncated ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:185:24: ^( QPHRASETRUNC quoted_truncated ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(QPHRASETRUNC, "QPHRASETRUNC") + , root_1); + + adaptor.addChild(root_1, stream_quoted_truncated.nextTree()); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + case 4 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:186:4: date + { + pushFollow(FOLLOW_date_in_range_value1041); + date50=date(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_date.add(date50.getTree()); + + // AST REWRITE + // elements: date + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 186:9: -> ^( QNORMAL date ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:186:12: ^( QNORMAL date ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(QNORMAL, "QNORMAL") + , root_1); + + adaptor.addChild(root_1, stream_date.nextTree()); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + case 5 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:187:4: normal + { + pushFollow(FOLLOW_normal_in_range_value1054); + normal51=normal(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_normal.add(normal51.getTree()); + + // AST REWRITE + // elements: normal + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 187:11: -> ^( QNORMAL normal ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:187:14: ^( QNORMAL normal ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(QNORMAL, "QNORMAL") + , root_1); + + adaptor.addChild(root_1, stream_normal.nextTree()); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + case 6 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:188:4: STAR + { + STAR52=(Token)match(input,STAR,FOLLOW_STAR_in_range_value1068); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_STAR.add(STAR52); + + + // AST REWRITE + // elements: STAR + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 188:9: -> ^( QANYTHING STAR ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:188:12: ^( QANYTHING STAR ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(QANYTHING, "QANYTHING") + , root_1); + + adaptor.addChild(root_1, + stream_STAR.nextNode() + ); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + + } + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "range_value" + + + public static class multi_value_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "multi_value" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:191:1: multi_value : LPAREN multiClause RPAREN -> multiClause ; + public final ExtendedLuceneGrammarParser.multi_value_return multi_value() throws RecognitionException { + ExtendedLuceneGrammarParser.multi_value_return retval = new ExtendedLuceneGrammarParser.multi_value_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + Token LPAREN53=null; + Token RPAREN55=null; + ExtendedLuceneGrammarParser.multiClause_return multiClause54 =null; + + + Object LPAREN53_tree=null; + Object RPAREN55_tree=null; + RewriteRuleTokenStream stream_RPAREN=new RewriteRuleTokenStream(adaptor,"token RPAREN"); + RewriteRuleTokenStream stream_LPAREN=new RewriteRuleTokenStream(adaptor,"token LPAREN"); + RewriteRuleSubtreeStream stream_multiClause=new RewriteRuleSubtreeStream(adaptor,"rule multiClause"); + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:192:2: ( LPAREN multiClause RPAREN -> multiClause ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:193:2: LPAREN multiClause RPAREN + { + LPAREN53=(Token)match(input,LPAREN,FOLLOW_LPAREN_in_multi_value1089); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_LPAREN.add(LPAREN53); + + + pushFollow(FOLLOW_multiClause_in_multi_value1091); + multiClause54=multiClause(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_multiClause.add(multiClause54.getTree()); + + RPAREN55=(Token)match(input,RPAREN,FOLLOW_RPAREN_in_multi_value1093); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_RPAREN.add(RPAREN55); + + + // AST REWRITE + // elements: multiClause + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 193:28: -> multiClause + { + adaptor.addChild(root_0, stream_multiClause.nextTree()); + + } + + + retval.tree = root_0; + } + + } + + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "multi_value" + + + public static class multiClause_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "multiClause" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:198:1: multiClause : ( clauseOr )+ -> ^( OPERATOR[\"DEFOP\"] ( clauseOr )+ ) ; + public final ExtendedLuceneGrammarParser.multiClause_return multiClause() throws RecognitionException { + ExtendedLuceneGrammarParser.multiClause_return retval = new ExtendedLuceneGrammarParser.multiClause_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + ExtendedLuceneGrammarParser.clauseOr_return clauseOr56 =null; + + + RewriteRuleSubtreeStream stream_clauseOr=new RewriteRuleSubtreeStream(adaptor,"rule clauseOr"); + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:199:2: ( ( clauseOr )+ -> ^( OPERATOR[\"DEFOP\"] ( clauseOr )+ ) ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:204:2: ( clauseOr )+ + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:204:2: ( clauseOr )+ + int cnt26=0; + loop26: + do { + int alt26=2; + int LA26_0 = input.LA(1); + + if ( ((LA26_0 >= LBRACK && LA26_0 <= MINUS)||LA26_0==NUMBER||(LA26_0 >= PHRASE && LA26_0 <= PLUS)||LA26_0==QMARK||LA26_0==STAR||LA26_0==TERM_NORMAL||LA26_0==TERM_TRUNCATED) ) { + alt26=1; + } + + + switch (alt26) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:204:2: clauseOr + { + pushFollow(FOLLOW_clauseOr_in_multiClause1120); + clauseOr56=clauseOr(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_clauseOr.add(clauseOr56.getTree()); + + } + break; + + default : + if ( cnt26 >= 1 ) break loop26; + if (state.backtracking>0) {state.failed=true; return retval;} + EarlyExitException eee = + new EarlyExitException(26, input); + throw eee; + } + cnt26++; + } while (true); + + + // AST REWRITE + // elements: clauseOr + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 204:12: -> ^( OPERATOR[\"DEFOP\"] ( clauseOr )+ ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:204:15: ^( OPERATOR[\"DEFOP\"] ( clauseOr )+ ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(OPERATOR, "DEFOP") + , root_1); + + if ( !(stream_clauseOr.hasNext()) ) { + throw new RewriteEarlyExitException(); + } + while ( stream_clauseOr.hasNext() ) { + adaptor.addChild(root_1, stream_clauseOr.nextTree()); + + } + stream_clauseOr.reset(); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "multiClause" + + + public static class multiDefault_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "multiDefault" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:217:1: multiDefault : ( multiOr )+ -> ^( OPERATOR[\"DEFOP\"] ( multiOr )+ ) ; + public final ExtendedLuceneGrammarParser.multiDefault_return multiDefault() throws RecognitionException { + ExtendedLuceneGrammarParser.multiDefault_return retval = new ExtendedLuceneGrammarParser.multiDefault_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + ExtendedLuceneGrammarParser.multiOr_return multiOr57 =null; + + + RewriteRuleSubtreeStream stream_multiOr=new RewriteRuleSubtreeStream(adaptor,"rule multiOr"); + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:218:2: ( ( multiOr )+ -> ^( OPERATOR[\"DEFOP\"] ( multiOr )+ ) ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:219:2: ( multiOr )+ + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:219:2: ( multiOr )+ + int cnt27=0; + loop27: + do { + int alt27=2; + int LA27_0 = input.LA(1); + + if ( ((LA27_0 >= LBRACK && LA27_0 <= LCURLY)||LA27_0==MINUS||LA27_0==NUMBER||(LA27_0 >= PHRASE && LA27_0 <= PLUS)||LA27_0==QMARK||LA27_0==STAR||LA27_0==TERM_NORMAL||LA27_0==TERM_TRUNCATED) ) { + alt27=1; + } + + + switch (alt27) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:219:2: multiOr + { + pushFollow(FOLLOW_multiOr_in_multiDefault1164); + multiOr57=multiOr(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_multiOr.add(multiOr57.getTree()); + + } + break; + + default : + if ( cnt27 >= 1 ) break loop27; + if (state.backtracking>0) {state.failed=true; return retval;} + EarlyExitException eee = + new EarlyExitException(27, input); + throw eee; + } + cnt27++; + } while (true); + + + // AST REWRITE + // elements: multiOr + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 219:11: -> ^( OPERATOR[\"DEFOP\"] ( multiOr )+ ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:219:14: ^( OPERATOR[\"DEFOP\"] ( multiOr )+ ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(OPERATOR, "DEFOP") + , root_1); + + if ( !(stream_multiOr.hasNext()) ) { + throw new RewriteEarlyExitException(); + } + while ( stream_multiOr.hasNext() ) { + adaptor.addChild(root_1, stream_multiOr.nextTree()); + + } + stream_multiOr.reset(); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "multiDefault" + + + public static class multiOr_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "multiOr" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:222:1: multiOr : (first= multiAnd -> $first) ( or others= multiAnd -> ^( OPERATOR[\"OR\"] ( multiAnd )+ ) )* ; + public final ExtendedLuceneGrammarParser.multiOr_return multiOr() throws RecognitionException { + ExtendedLuceneGrammarParser.multiOr_return retval = new ExtendedLuceneGrammarParser.multiOr_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + ExtendedLuceneGrammarParser.multiAnd_return first =null; + + ExtendedLuceneGrammarParser.multiAnd_return others =null; + + ExtendedLuceneGrammarParser.or_return or58 =null; + + + RewriteRuleSubtreeStream stream_multiAnd=new RewriteRuleSubtreeStream(adaptor,"rule multiAnd"); + RewriteRuleSubtreeStream stream_or=new RewriteRuleSubtreeStream(adaptor,"rule or"); + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:223:2: ( (first= multiAnd -> $first) ( or others= multiAnd -> ^( OPERATOR[\"OR\"] ( multiAnd )+ ) )* ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:224:2: (first= multiAnd -> $first) ( or others= multiAnd -> ^( OPERATOR[\"OR\"] ( multiAnd )+ ) )* + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:224:2: (first= multiAnd -> $first) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:224:3: first= multiAnd + { + pushFollow(FOLLOW_multiAnd_in_multiOr1192); + first=multiAnd(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_multiAnd.add(first.getTree()); + + // AST REWRITE + // elements: first + // token labels: + // rule labels: retval, first + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + RewriteRuleSubtreeStream stream_first=new RewriteRuleSubtreeStream(adaptor,"rule first",first!=null?first.tree:null); + + root_0 = (Object)adaptor.nil(); + // 224:19: -> $first + { + adaptor.addChild(root_0, stream_first.nextTree()); + + } + + + retval.tree = root_0; + } + + } + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:224:30: ( or others= multiAnd -> ^( OPERATOR[\"OR\"] ( multiAnd )+ ) )* + loop28: + do { + int alt28=2; + int LA28_0 = input.LA(1); + + if ( (LA28_0==OR) ) { + alt28=1; + } + + + switch (alt28) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:224:31: or others= multiAnd + { + pushFollow(FOLLOW_or_in_multiOr1202); + or58=or(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_or.add(or58.getTree()); + + pushFollow(FOLLOW_multiAnd_in_multiOr1206); + others=multiAnd(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_multiAnd.add(others.getTree()); + + // AST REWRITE + // elements: multiAnd + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 224:49: -> ^( OPERATOR[\"OR\"] ( multiAnd )+ ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:224:52: ^( OPERATOR[\"OR\"] ( multiAnd )+ ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(OPERATOR, "OR") + , root_1); + + if ( !(stream_multiAnd.hasNext()) ) { + throw new RewriteEarlyExitException(); + } + while ( stream_multiAnd.hasNext() ) { + adaptor.addChild(root_1, stream_multiAnd.nextTree()); + + } + stream_multiAnd.reset(); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + + default : + break loop28; + } + } while (true); + + + } + + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "multiOr" + + + public static class multiAnd_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "multiAnd" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:227:1: multiAnd : (first= multiNot -> $first) ( and others= multiNot -> ^( OPERATOR[\"AND\"] ( multiNot )+ ) )* ; + public final ExtendedLuceneGrammarParser.multiAnd_return multiAnd() throws RecognitionException { + ExtendedLuceneGrammarParser.multiAnd_return retval = new ExtendedLuceneGrammarParser.multiAnd_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + ExtendedLuceneGrammarParser.multiNot_return first =null; + + ExtendedLuceneGrammarParser.multiNot_return others =null; + + ExtendedLuceneGrammarParser.and_return and59 =null; + + + RewriteRuleSubtreeStream stream_multiNot=new RewriteRuleSubtreeStream(adaptor,"rule multiNot"); + RewriteRuleSubtreeStream stream_and=new RewriteRuleSubtreeStream(adaptor,"rule and"); + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:228:2: ( (first= multiNot -> $first) ( and others= multiNot -> ^( OPERATOR[\"AND\"] ( multiNot )+ ) )* ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:229:2: (first= multiNot -> $first) ( and others= multiNot -> ^( OPERATOR[\"AND\"] ( multiNot )+ ) )* + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:229:2: (first= multiNot -> $first) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:229:3: first= multiNot + { + pushFollow(FOLLOW_multiNot_in_multiAnd1237); + first=multiNot(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_multiNot.add(first.getTree()); + + // AST REWRITE + // elements: first + // token labels: + // rule labels: retval, first + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + RewriteRuleSubtreeStream stream_first=new RewriteRuleSubtreeStream(adaptor,"rule first",first!=null?first.tree:null); + + root_0 = (Object)adaptor.nil(); + // 229:19: -> $first + { + adaptor.addChild(root_0, stream_first.nextTree()); + + } + + + retval.tree = root_0; + } + + } + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:229:30: ( and others= multiNot -> ^( OPERATOR[\"AND\"] ( multiNot )+ ) )* + loop29: + do { + int alt29=2; + int LA29_0 = input.LA(1); + + if ( (LA29_0==AND) ) { + alt29=1; + } + + + switch (alt29) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:229:31: and others= multiNot + { + pushFollow(FOLLOW_and_in_multiAnd1247); + and59=and(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_and.add(and59.getTree()); + + pushFollow(FOLLOW_multiNot_in_multiAnd1251); + others=multiNot(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_multiNot.add(others.getTree()); + + // AST REWRITE + // elements: multiNot + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 229:51: -> ^( OPERATOR[\"AND\"] ( multiNot )+ ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:229:54: ^( OPERATOR[\"AND\"] ( multiNot )+ ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(OPERATOR, "AND") + , root_1); + + if ( !(stream_multiNot.hasNext()) ) { + throw new RewriteEarlyExitException(); + } + while ( stream_multiNot.hasNext() ) { + adaptor.addChild(root_1, stream_multiNot.nextTree()); + + } + stream_multiNot.reset(); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + + default : + break loop29; + } + } while (true); + + + } + + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "multiAnd" + + + public static class multiNot_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "multiNot" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:232:1: multiNot : (first= multiNear -> $first) ( not others= multiNear -> ^( OPERATOR[\"NOT\"] ( multiNear )+ ) )* ; + public final ExtendedLuceneGrammarParser.multiNot_return multiNot() throws RecognitionException { + ExtendedLuceneGrammarParser.multiNot_return retval = new ExtendedLuceneGrammarParser.multiNot_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + ExtendedLuceneGrammarParser.multiNear_return first =null; + + ExtendedLuceneGrammarParser.multiNear_return others =null; + + ExtendedLuceneGrammarParser.not_return not60 =null; + + + RewriteRuleSubtreeStream stream_not=new RewriteRuleSubtreeStream(adaptor,"rule not"); + RewriteRuleSubtreeStream stream_multiNear=new RewriteRuleSubtreeStream(adaptor,"rule multiNear"); + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:233:2: ( (first= multiNear -> $first) ( not others= multiNear -> ^( OPERATOR[\"NOT\"] ( multiNear )+ ) )* ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:234:2: (first= multiNear -> $first) ( not others= multiNear -> ^( OPERATOR[\"NOT\"] ( multiNear )+ ) )* + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:234:2: (first= multiNear -> $first) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:234:3: first= multiNear + { + pushFollow(FOLLOW_multiNear_in_multiNot1282); + first=multiNear(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_multiNear.add(first.getTree()); + + // AST REWRITE + // elements: first + // token labels: + // rule labels: retval, first + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + RewriteRuleSubtreeStream stream_first=new RewriteRuleSubtreeStream(adaptor,"rule first",first!=null?first.tree:null); + + root_0 = (Object)adaptor.nil(); + // 234:20: -> $first + { + adaptor.addChild(root_0, stream_first.nextTree()); + + } + + + retval.tree = root_0; + } + + } + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:234:31: ( not others= multiNear -> ^( OPERATOR[\"NOT\"] ( multiNear )+ ) )* + loop30: + do { + int alt30=2; + int LA30_0 = input.LA(1); + + if ( (LA30_0==AND) ) { + int LA30_1 = input.LA(2); + + if ( (LA30_1==NOT) ) { + alt30=1; + } + + + } + else if ( (LA30_0==NOT) ) { + alt30=1; + } + + + switch (alt30) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:234:32: not others= multiNear + { + pushFollow(FOLLOW_not_in_multiNot1292); + not60=not(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_not.add(not60.getTree()); + + pushFollow(FOLLOW_multiNear_in_multiNot1296); + others=multiNear(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_multiNear.add(others.getTree()); + + // AST REWRITE + // elements: multiNear + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 234:52: -> ^( OPERATOR[\"NOT\"] ( multiNear )+ ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:234:55: ^( OPERATOR[\"NOT\"] ( multiNear )+ ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(OPERATOR, "NOT") + , root_1); + + if ( !(stream_multiNear.hasNext()) ) { + throw new RewriteEarlyExitException(); + } + while ( stream_multiNear.hasNext() ) { + adaptor.addChild(root_1, stream_multiNear.nextTree()); + + } + stream_multiNear.reset(); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + + default : + break loop30; + } + } while (true); + + + } + + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "multiNot" + + + public static class multiNear_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "multiNear" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:237:1: multiNear : (first= multiBasic -> $first) ( near others= multiBasic -> ^( near ( multiBasic )+ ) )* ; + public final ExtendedLuceneGrammarParser.multiNear_return multiNear() throws RecognitionException { + ExtendedLuceneGrammarParser.multiNear_return retval = new ExtendedLuceneGrammarParser.multiNear_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + ExtendedLuceneGrammarParser.multiBasic_return first =null; + + ExtendedLuceneGrammarParser.multiBasic_return others =null; + + ExtendedLuceneGrammarParser.near_return near61 =null; + + + RewriteRuleSubtreeStream stream_near=new RewriteRuleSubtreeStream(adaptor,"rule near"); + RewriteRuleSubtreeStream stream_multiBasic=new RewriteRuleSubtreeStream(adaptor,"rule multiBasic"); + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:238:2: ( (first= multiBasic -> $first) ( near others= multiBasic -> ^( near ( multiBasic )+ ) )* ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:239:2: (first= multiBasic -> $first) ( near others= multiBasic -> ^( near ( multiBasic )+ ) )* + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:239:2: (first= multiBasic -> $first) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:239:3: first= multiBasic + { + pushFollow(FOLLOW_multiBasic_in_multiNear1326); + first=multiBasic(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_multiBasic.add(first.getTree()); + + // AST REWRITE + // elements: first + // token labels: + // rule labels: retval, first + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + RewriteRuleSubtreeStream stream_first=new RewriteRuleSubtreeStream(adaptor,"rule first",first!=null?first.tree:null); + + root_0 = (Object)adaptor.nil(); + // 239:21: -> $first + { + adaptor.addChild(root_0, stream_first.nextTree()); + + } + + + retval.tree = root_0; + } + + } + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:239:32: ( near others= multiBasic -> ^( near ( multiBasic )+ ) )* + loop31: + do { + int alt31=2; + int LA31_0 = input.LA(1); + + if ( (LA31_0==NEAR) ) { + alt31=1; + } + + + switch (alt31) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:239:33: near others= multiBasic + { + pushFollow(FOLLOW_near_in_multiNear1336); + near61=near(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_near.add(near61.getTree()); + + pushFollow(FOLLOW_multiBasic_in_multiNear1340); + others=multiBasic(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_multiBasic.add(others.getTree()); + + // AST REWRITE + // elements: multiBasic, near + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 239:55: -> ^( near ( multiBasic )+ ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:239:58: ^( near ( multiBasic )+ ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot(stream_near.nextNode(), root_1); + + if ( !(stream_multiBasic.hasNext()) ) { + throw new RewriteEarlyExitException(); + } + while ( stream_multiBasic.hasNext() ) { + adaptor.addChild(root_1, stream_multiBasic.nextTree()); + + } + stream_multiBasic.reset(); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + + default : + break loop31; + } + } while (true); + + + } + + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "multiNear" + + + public static class multiBasic_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "multiBasic" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:243:1: multiBasic : mterm ; + public final ExtendedLuceneGrammarParser.multiBasic_return multiBasic() throws RecognitionException { + ExtendedLuceneGrammarParser.multiBasic_return retval = new ExtendedLuceneGrammarParser.multiBasic_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + ExtendedLuceneGrammarParser.mterm_return mterm62 =null; + + + + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:244:2: ( mterm ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:245:2: mterm + { + root_0 = (Object)adaptor.nil(); + + + pushFollow(FOLLOW_mterm_in_multiBasic1366); + mterm62=mterm(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) adaptor.addChild(root_0, mterm62.getTree()); + + } + + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "multiBasic" + + + public static class mterm_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "mterm" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:248:1: mterm : ( modifier )? value -> ^( MODIFIER ( modifier )? value ) ; + public final ExtendedLuceneGrammarParser.mterm_return mterm() throws RecognitionException { + ExtendedLuceneGrammarParser.mterm_return retval = new ExtendedLuceneGrammarParser.mterm_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + ExtendedLuceneGrammarParser.modifier_return modifier63 =null; + + ExtendedLuceneGrammarParser.value_return value64 =null; + + + RewriteRuleSubtreeStream stream_modifier=new RewriteRuleSubtreeStream(adaptor,"rule modifier"); + RewriteRuleSubtreeStream stream_value=new RewriteRuleSubtreeStream(adaptor,"rule value"); + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:249:2: ( ( modifier )? value -> ^( MODIFIER ( modifier )? value ) ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:250:2: ( modifier )? value + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:250:2: ( modifier )? + int alt32=2; + int LA32_0 = input.LA(1); + + if ( (LA32_0==MINUS||LA32_0==PLUS) ) { + alt32=1; + } + switch (alt32) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:250:2: modifier + { + pushFollow(FOLLOW_modifier_in_mterm1382); + modifier63=modifier(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_modifier.add(modifier63.getTree()); + + } + break; + + } + + + pushFollow(FOLLOW_value_in_mterm1385); + value64=value(); + + state._fsp--; + if (state.failed) return retval; + if ( state.backtracking==0 ) stream_value.add(value64.getTree()); + + // AST REWRITE + // elements: modifier, value + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 250:18: -> ^( MODIFIER ( modifier )? value ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:250:21: ^( MODIFIER ( modifier )? value ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(MODIFIER, "MODIFIER") + , root_1); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:250:32: ( modifier )? + if ( stream_modifier.hasNext() ) { + adaptor.addChild(root_1, stream_modifier.nextTree()); + + } + stream_modifier.reset(); + + adaptor.addChild(root_1, stream_value.nextTree()); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "mterm" + + + public static class normal_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "normal" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:254:1: normal : ( TERM_NORMAL | NUMBER ); + public final ExtendedLuceneGrammarParser.normal_return normal() throws RecognitionException { + ExtendedLuceneGrammarParser.normal_return retval = new ExtendedLuceneGrammarParser.normal_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + Token set65=null; + + Object set65_tree=null; + + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:255:2: ( TERM_NORMAL | NUMBER ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g: + { + root_0 = (Object)adaptor.nil(); + + + set65=(Token)input.LT(1); + + if ( input.LA(1)==NUMBER||input.LA(1)==TERM_NORMAL ) { + input.consume(); + if ( state.backtracking==0 ) adaptor.addChild(root_0, + (Object)adaptor.create(set65) + ); + state.errorRecovery=false; + state.failed=false; + } + else { + if (state.backtracking>0) {state.failed=true; return retval;} + MismatchedSetException mse = new MismatchedSetException(null,input); + throw mse; + } + + + } + + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "normal" + + + public static class truncated_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "truncated" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:263:1: truncated : TERM_TRUNCATED ; + public final ExtendedLuceneGrammarParser.truncated_return truncated() throws RecognitionException { + ExtendedLuceneGrammarParser.truncated_return retval = new ExtendedLuceneGrammarParser.truncated_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + Token TERM_TRUNCATED66=null; + + Object TERM_TRUNCATED66_tree=null; + + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:264:2: ( TERM_TRUNCATED ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:265:2: TERM_TRUNCATED + { + root_0 = (Object)adaptor.nil(); + + + TERM_TRUNCATED66=(Token)match(input,TERM_TRUNCATED,FOLLOW_TERM_TRUNCATED_in_truncated1438); if (state.failed) return retval; + if ( state.backtracking==0 ) { + TERM_TRUNCATED66_tree = + (Object)adaptor.create(TERM_TRUNCATED66) + ; + adaptor.addChild(root_0, TERM_TRUNCATED66_tree); + } + + } + + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "truncated" + + + public static class quoted_truncated_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "quoted_truncated" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:269:1: quoted_truncated : PHRASE_ANYTHING ; + public final ExtendedLuceneGrammarParser.quoted_truncated_return quoted_truncated() throws RecognitionException { + ExtendedLuceneGrammarParser.quoted_truncated_return retval = new ExtendedLuceneGrammarParser.quoted_truncated_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + Token PHRASE_ANYTHING67=null; + + Object PHRASE_ANYTHING67_tree=null; + + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:270:2: ( PHRASE_ANYTHING ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:271:2: PHRASE_ANYTHING + { + root_0 = (Object)adaptor.nil(); + + + PHRASE_ANYTHING67=(Token)match(input,PHRASE_ANYTHING,FOLLOW_PHRASE_ANYTHING_in_quoted_truncated1453); if (state.failed) return retval; + if ( state.backtracking==0 ) { + PHRASE_ANYTHING67_tree = + (Object)adaptor.create(PHRASE_ANYTHING67) + ; + adaptor.addChild(root_0, PHRASE_ANYTHING67_tree); + } + + } + + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "quoted_truncated" + + + public static class quoted_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "quoted" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:274:1: quoted : PHRASE ; + public final ExtendedLuceneGrammarParser.quoted_return quoted() throws RecognitionException { + ExtendedLuceneGrammarParser.quoted_return retval = new ExtendedLuceneGrammarParser.quoted_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + Token PHRASE68=null; + + Object PHRASE68_tree=null; + + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:274:8: ( PHRASE ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:275:2: PHRASE + { + root_0 = (Object)adaptor.nil(); + + + PHRASE68=(Token)match(input,PHRASE,FOLLOW_PHRASE_in_quoted1465); if (state.failed) return retval; + if ( state.backtracking==0 ) { + PHRASE68_tree = + (Object)adaptor.create(PHRASE68) + ; + adaptor.addChild(root_0, PHRASE68_tree); + } + + } + + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "quoted" + + + public static class operator_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "operator" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:281:1: operator : ( AND -> OPERATOR[\"AND\"] | OR -> OPERATOR[\"OR\"] | NOT -> OPERATOR[\"NOT\"] | NEAR -> OPERATOR[\"NEAR\"] ) ; + public final ExtendedLuceneGrammarParser.operator_return operator() throws RecognitionException { + ExtendedLuceneGrammarParser.operator_return retval = new ExtendedLuceneGrammarParser.operator_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + Token AND69=null; + Token OR70=null; + Token NOT71=null; + Token NEAR72=null; + + Object AND69_tree=null; + Object OR70_tree=null; + Object NOT71_tree=null; + Object NEAR72_tree=null; + RewriteRuleTokenStream stream_NEAR=new RewriteRuleTokenStream(adaptor,"token NEAR"); + RewriteRuleTokenStream stream_NOT=new RewriteRuleTokenStream(adaptor,"token NOT"); + RewriteRuleTokenStream stream_AND=new RewriteRuleTokenStream(adaptor,"token AND"); + RewriteRuleTokenStream stream_OR=new RewriteRuleTokenStream(adaptor,"token OR"); + + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:281:9: ( ( AND -> OPERATOR[\"AND\"] | OR -> OPERATOR[\"OR\"] | NOT -> OPERATOR[\"NOT\"] | NEAR -> OPERATOR[\"NEAR\"] ) ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:281:11: ( AND -> OPERATOR[\"AND\"] | OR -> OPERATOR[\"OR\"] | NOT -> OPERATOR[\"NOT\"] | NEAR -> OPERATOR[\"NEAR\"] ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:281:11: ( AND -> OPERATOR[\"AND\"] | OR -> OPERATOR[\"OR\"] | NOT -> OPERATOR[\"NOT\"] | NEAR -> OPERATOR[\"NEAR\"] ) + int alt33=4; + switch ( input.LA(1) ) { + case AND: + { + alt33=1; + } + break; + case OR: + { + alt33=2; + } + break; + case NOT: + { + alt33=3; + } + break; + case NEAR: + { + alt33=4; + } + break; + default: + if (state.backtracking>0) {state.failed=true; return retval;} + NoViableAltException nvae = + new NoViableAltException("", 33, 0, input); + + throw nvae; + + } + + switch (alt33) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:282:2: AND + { + AND69=(Token)match(input,AND,FOLLOW_AND_in_operator1481); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_AND.add(AND69); + + + // AST REWRITE + // elements: + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 282:6: -> OPERATOR[\"AND\"] + { + adaptor.addChild(root_0, + (Object)adaptor.create(OPERATOR, "AND") + ); + + } + + + retval.tree = root_0; + } + + } + break; + case 2 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:283:4: OR + { + OR70=(Token)match(input,OR,FOLLOW_OR_in_operator1491); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_OR.add(OR70); + + + // AST REWRITE + // elements: + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 283:7: -> OPERATOR[\"OR\"] + { + adaptor.addChild(root_0, + (Object)adaptor.create(OPERATOR, "OR") + ); + + } + + + retval.tree = root_0; + } + + } + break; + case 3 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:284:4: NOT + { + NOT71=(Token)match(input,NOT,FOLLOW_NOT_in_operator1501); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_NOT.add(NOT71); + + + // AST REWRITE + // elements: + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 284:8: -> OPERATOR[\"NOT\"] + { + adaptor.addChild(root_0, + (Object)adaptor.create(OPERATOR, "NOT") + ); + + } + + + retval.tree = root_0; + } + + } + break; + case 4 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:285:4: NEAR + { + NEAR72=(Token)match(input,NEAR,FOLLOW_NEAR_in_operator1511); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_NEAR.add(NEAR72); + + + // AST REWRITE + // elements: + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 285:9: -> OPERATOR[\"NEAR\"] + { + adaptor.addChild(root_0, + (Object)adaptor.create(OPERATOR, "NEAR") + ); + + } + + + retval.tree = root_0; + } + + } + break; + + } + + + } + + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "operator" + + + public static class modifier_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "modifier" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:288:1: modifier : ( PLUS -> PLUS[\"+\"] | MINUS -> MINUS[\"-\"] ); + public final ExtendedLuceneGrammarParser.modifier_return modifier() throws RecognitionException { + ExtendedLuceneGrammarParser.modifier_return retval = new ExtendedLuceneGrammarParser.modifier_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + Token PLUS73=null; + Token MINUS74=null; + + Object PLUS73_tree=null; + Object MINUS74_tree=null; + RewriteRuleTokenStream stream_PLUS=new RewriteRuleTokenStream(adaptor,"token PLUS"); + RewriteRuleTokenStream stream_MINUS=new RewriteRuleTokenStream(adaptor,"token MINUS"); + + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:288:9: ( PLUS -> PLUS[\"+\"] | MINUS -> MINUS[\"-\"] ) + int alt34=2; + int LA34_0 = input.LA(1); + + if ( (LA34_0==PLUS) ) { + alt34=1; + } + else if ( (LA34_0==MINUS) ) { + alt34=2; + } + else { + if (state.backtracking>0) {state.failed=true; return retval;} + NoViableAltException nvae = + new NoViableAltException("", 34, 0, input); + + throw nvae; + + } + switch (alt34) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:289:2: PLUS + { + PLUS73=(Token)match(input,PLUS,FOLLOW_PLUS_in_modifier1528); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_PLUS.add(PLUS73); + + + // AST REWRITE + // elements: PLUS + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 289:7: -> PLUS[\"+\"] + { + adaptor.addChild(root_0, + (Object)adaptor.create(PLUS, "+") + ); + + } + + + retval.tree = root_0; + } + + } + break; + case 2 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:290:4: MINUS + { + MINUS74=(Token)match(input,MINUS,FOLLOW_MINUS_in_modifier1538); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_MINUS.add(MINUS74); + + + // AST REWRITE + // elements: MINUS + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 290:10: -> MINUS[\"-\"] + { + adaptor.addChild(root_0, + (Object)adaptor.create(MINUS, "-") + ); + + } + + + retval.tree = root_0; + } + + } + break; + + } + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "modifier" + + + public static class term_modifier_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "term_modifier" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:302:1: term_modifier : ( TILDE ( CARAT )? -> ^( BOOST ( CARAT )? ) ^( FUZZY TILDE ) | CARAT ( TILDE )? -> ^( BOOST CARAT ) ^( FUZZY ( TILDE )? ) ); + public final ExtendedLuceneGrammarParser.term_modifier_return term_modifier() throws RecognitionException { + ExtendedLuceneGrammarParser.term_modifier_return retval = new ExtendedLuceneGrammarParser.term_modifier_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + Token TILDE75=null; + Token CARAT76=null; + Token CARAT77=null; + Token TILDE78=null; + + Object TILDE75_tree=null; + Object CARAT76_tree=null; + Object CARAT77_tree=null; + Object TILDE78_tree=null; + RewriteRuleTokenStream stream_CARAT=new RewriteRuleTokenStream(adaptor,"token CARAT"); + RewriteRuleTokenStream stream_TILDE=new RewriteRuleTokenStream(adaptor,"token TILDE"); + + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:302:15: ( TILDE ( CARAT )? -> ^( BOOST ( CARAT )? ) ^( FUZZY TILDE ) | CARAT ( TILDE )? -> ^( BOOST CARAT ) ^( FUZZY ( TILDE )? ) ) + int alt37=2; + int LA37_0 = input.LA(1); + + if ( (LA37_0==TILDE) ) { + alt37=1; + } + else if ( (LA37_0==CARAT) ) { + alt37=2; + } + else { + if (state.backtracking>0) {state.failed=true; return retval;} + NoViableAltException nvae = + new NoViableAltException("", 37, 0, input); + + throw nvae; + + } + switch (alt37) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:303:2: TILDE ( CARAT )? + { + TILDE75=(Token)match(input,TILDE,FOLLOW_TILDE_in_term_modifier1556); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_TILDE.add(TILDE75); + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:303:8: ( CARAT )? + int alt35=2; + int LA35_0 = input.LA(1); + + if ( (LA35_0==CARAT) ) { + alt35=1; + } + switch (alt35) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:303:8: CARAT + { + CARAT76=(Token)match(input,CARAT,FOLLOW_CARAT_in_term_modifier1558); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_CARAT.add(CARAT76); + + + } + break; + + } + + + // AST REWRITE + // elements: TILDE, CARAT + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 303:15: -> ^( BOOST ( CARAT )? ) ^( FUZZY TILDE ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:303:18: ^( BOOST ( CARAT )? ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(BOOST, "BOOST") + , root_1); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:303:26: ( CARAT )? + if ( stream_CARAT.hasNext() ) { + adaptor.addChild(root_1, + stream_CARAT.nextNode() + ); + + } + stream_CARAT.reset(); + + adaptor.addChild(root_0, root_1); + } + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:303:34: ^( FUZZY TILDE ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(FUZZY, "FUZZY") + , root_1); + + adaptor.addChild(root_1, + stream_TILDE.nextNode() + ); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + case 2 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:304:4: CARAT ( TILDE )? + { + CARAT77=(Token)match(input,CARAT,FOLLOW_CARAT_in_term_modifier1580); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_CARAT.add(CARAT77); + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:304:10: ( TILDE )? + int alt36=2; + int LA36_0 = input.LA(1); + + if ( (LA36_0==TILDE) ) { + alt36=1; + } + switch (alt36) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:304:10: TILDE + { + TILDE78=(Token)match(input,TILDE,FOLLOW_TILDE_in_term_modifier1582); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_TILDE.add(TILDE78); + + + } + break; + + } + + + // AST REWRITE + // elements: TILDE, CARAT + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 304:17: -> ^( BOOST CARAT ) ^( FUZZY ( TILDE )? ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:304:20: ^( BOOST CARAT ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(BOOST, "BOOST") + , root_1); + + adaptor.addChild(root_1, + stream_CARAT.nextNode() + ); + + adaptor.addChild(root_0, root_1); + } + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:304:35: ^( FUZZY ( TILDE )? ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(FUZZY, "FUZZY") + , root_1); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:304:43: ( TILDE )? + if ( stream_TILDE.hasNext() ) { + adaptor.addChild(root_1, + stream_TILDE.nextNode() + ); + + } + stream_TILDE.reset(); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + + } + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "term_modifier" + + + public static class boost_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "boost" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:324:1: boost : ( CARAT -> ^( BOOST NUMBER[\"DEF\"] ) ) ( NUMBER -> ^( BOOST NUMBER ) )? ; + public final ExtendedLuceneGrammarParser.boost_return boost() throws RecognitionException { + ExtendedLuceneGrammarParser.boost_return retval = new ExtendedLuceneGrammarParser.boost_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + Token CARAT79=null; + Token NUMBER80=null; + + Object CARAT79_tree=null; + Object NUMBER80_tree=null; + RewriteRuleTokenStream stream_CARAT=new RewriteRuleTokenStream(adaptor,"token CARAT"); + RewriteRuleTokenStream stream_NUMBER=new RewriteRuleTokenStream(adaptor,"token NUMBER"); + + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:324:7: ( ( CARAT -> ^( BOOST NUMBER[\"DEF\"] ) ) ( NUMBER -> ^( BOOST NUMBER ) )? ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:325:2: ( CARAT -> ^( BOOST NUMBER[\"DEF\"] ) ) ( NUMBER -> ^( BOOST NUMBER ) )? + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:325:2: ( CARAT -> ^( BOOST NUMBER[\"DEF\"] ) ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:325:3: CARAT + { + CARAT79=(Token)match(input,CARAT,FOLLOW_CARAT_in_boost1614); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_CARAT.add(CARAT79); + + + // AST REWRITE + // elements: + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 325:9: -> ^( BOOST NUMBER[\"DEF\"] ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:325:12: ^( BOOST NUMBER[\"DEF\"] ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(BOOST, "BOOST") + , root_1); + + adaptor.addChild(root_1, + (Object)adaptor.create(NUMBER, "DEF") + ); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:326:2: ( NUMBER -> ^( BOOST NUMBER ) )? + int alt38=2; + int LA38_0 = input.LA(1); + + if ( (LA38_0==NUMBER) ) { + alt38=1; + } + switch (alt38) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:326:3: NUMBER + { + NUMBER80=(Token)match(input,NUMBER,FOLLOW_NUMBER_in_boost1629); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_NUMBER.add(NUMBER80); + + + // AST REWRITE + // elements: NUMBER + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 326:10: -> ^( BOOST NUMBER ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:326:13: ^( BOOST NUMBER ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(BOOST, "BOOST") + , root_1); + + adaptor.addChild(root_1, + stream_NUMBER.nextNode() + ); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + + } + + + } + + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "boost" + + + public static class fuzzy_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "fuzzy" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:329:1: fuzzy : ( TILDE -> ^( FUZZY NUMBER[\"DEF\"] ) ) ( NUMBER -> ^( FUZZY NUMBER ) )? ; + public final ExtendedLuceneGrammarParser.fuzzy_return fuzzy() throws RecognitionException { + ExtendedLuceneGrammarParser.fuzzy_return retval = new ExtendedLuceneGrammarParser.fuzzy_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + Token TILDE81=null; + Token NUMBER82=null; + + Object TILDE81_tree=null; + Object NUMBER82_tree=null; + RewriteRuleTokenStream stream_TILDE=new RewriteRuleTokenStream(adaptor,"token TILDE"); + RewriteRuleTokenStream stream_NUMBER=new RewriteRuleTokenStream(adaptor,"token NUMBER"); + + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:329:7: ( ( TILDE -> ^( FUZZY NUMBER[\"DEF\"] ) ) ( NUMBER -> ^( FUZZY NUMBER ) )? ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:330:2: ( TILDE -> ^( FUZZY NUMBER[\"DEF\"] ) ) ( NUMBER -> ^( FUZZY NUMBER ) )? + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:330:2: ( TILDE -> ^( FUZZY NUMBER[\"DEF\"] ) ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:330:3: TILDE + { + TILDE81=(Token)match(input,TILDE,FOLLOW_TILDE_in_fuzzy1652); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_TILDE.add(TILDE81); + + + // AST REWRITE + // elements: + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 330:9: -> ^( FUZZY NUMBER[\"DEF\"] ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:330:12: ^( FUZZY NUMBER[\"DEF\"] ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(FUZZY, "FUZZY") + , root_1); + + adaptor.addChild(root_1, + (Object)adaptor.create(NUMBER, "DEF") + ); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:331:2: ( NUMBER -> ^( FUZZY NUMBER ) )? + int alt39=2; + int LA39_0 = input.LA(1); + + if ( (LA39_0==NUMBER) ) { + alt39=1; + } + switch (alt39) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:331:3: NUMBER + { + NUMBER82=(Token)match(input,NUMBER,FOLLOW_NUMBER_in_fuzzy1667); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_NUMBER.add(NUMBER82); + + + // AST REWRITE + // elements: NUMBER + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 331:10: -> ^( FUZZY NUMBER ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:331:13: ^( FUZZY NUMBER ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(FUZZY, "FUZZY") + , root_1); + + adaptor.addChild(root_1, + stream_NUMBER.nextNode() + ); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + break; + + } + + + } + + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "fuzzy" + + + public static class not_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "not" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:334:1: not : ( ( AND NOT )=> AND NOT | NOT ); + public final ExtendedLuceneGrammarParser.not_return not() throws RecognitionException { + ExtendedLuceneGrammarParser.not_return retval = new ExtendedLuceneGrammarParser.not_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + Token AND83=null; + Token NOT84=null; + Token NOT85=null; + + Object AND83_tree=null; + Object NOT84_tree=null; + Object NOT85_tree=null; + + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:334:5: ( ( AND NOT )=> AND NOT | NOT ) + int alt40=2; + int LA40_0 = input.LA(1); + + if ( (LA40_0==AND) && (synpred4_ExtendedLuceneGrammar())) { + alt40=1; + } + else if ( (LA40_0==NOT) ) { + alt40=2; + } + else { + if (state.backtracking>0) {state.failed=true; return retval;} + NoViableAltException nvae = + new NoViableAltException("", 40, 0, input); + + throw nvae; + + } + switch (alt40) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:335:2: ( AND NOT )=> AND NOT + { + root_0 = (Object)adaptor.nil(); + + + AND83=(Token)match(input,AND,FOLLOW_AND_in_not1697); if (state.failed) return retval; + if ( state.backtracking==0 ) { + AND83_tree = + (Object)adaptor.create(AND83) + ; + adaptor.addChild(root_0, AND83_tree); + } + + NOT84=(Token)match(input,NOT,FOLLOW_NOT_in_not1699); if (state.failed) return retval; + if ( state.backtracking==0 ) { + NOT84_tree = + (Object)adaptor.create(NOT84) + ; + adaptor.addChild(root_0, NOT84_tree); + } + + } + break; + case 2 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:336:4: NOT + { + root_0 = (Object)adaptor.nil(); + + + NOT85=(Token)match(input,NOT,FOLLOW_NOT_in_not1704); if (state.failed) return retval; + if ( state.backtracking==0 ) { + NOT85_tree = + (Object)adaptor.create(NOT85) + ; + adaptor.addChild(root_0, NOT85_tree); + } + + } + break; + + } + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "not" + + + public static class and_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "and" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:339:1: and : AND ; + public final ExtendedLuceneGrammarParser.and_return and() throws RecognitionException { + ExtendedLuceneGrammarParser.and_return retval = new ExtendedLuceneGrammarParser.and_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + Token AND86=null; + + Object AND86_tree=null; + + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:339:6: ( AND ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:340:2: AND + { + root_0 = (Object)adaptor.nil(); + + + AND86=(Token)match(input,AND,FOLLOW_AND_in_and1718); if (state.failed) return retval; + if ( state.backtracking==0 ) { + AND86_tree = + (Object)adaptor.create(AND86) + ; + adaptor.addChild(root_0, AND86_tree); + } + + } + + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "and" + + + public static class or_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "or" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:343:1: or : OR ; + public final ExtendedLuceneGrammarParser.or_return or() throws RecognitionException { + ExtendedLuceneGrammarParser.or_return retval = new ExtendedLuceneGrammarParser.or_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + Token OR87=null; + + Object OR87_tree=null; + + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:343:5: ( OR ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:344:2: OR + { + root_0 = (Object)adaptor.nil(); + + + OR87=(Token)match(input,OR,FOLLOW_OR_in_or1732); if (state.failed) return retval; + if ( state.backtracking==0 ) { + OR87_tree = + (Object)adaptor.create(OR87) + ; + adaptor.addChild(root_0, OR87_tree); + } + + } + + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "or" + + + public static class near_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "near" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:347:1: near : ( NEAR -> ^( OPERATOR[$NEAR] ) ) ; + public final ExtendedLuceneGrammarParser.near_return near() throws RecognitionException { + ExtendedLuceneGrammarParser.near_return retval = new ExtendedLuceneGrammarParser.near_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + Token NEAR88=null; + + Object NEAR88_tree=null; + RewriteRuleTokenStream stream_NEAR=new RewriteRuleTokenStream(adaptor,"token NEAR"); + + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:347:7: ( ( NEAR -> ^( OPERATOR[$NEAR] ) ) ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:348:3: ( NEAR -> ^( OPERATOR[$NEAR] ) ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:348:3: ( NEAR -> ^( OPERATOR[$NEAR] ) ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:348:4: NEAR + { + NEAR88=(Token)match(input,NEAR,FOLLOW_NEAR_in_near1749); if (state.failed) return retval; + if ( state.backtracking==0 ) stream_NEAR.add(NEAR88); + + + // AST REWRITE + // elements: + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + if ( state.backtracking==0 ) { + + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); + + root_0 = (Object)adaptor.nil(); + // 348:9: -> ^( OPERATOR[$NEAR] ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:348:12: ^( OPERATOR[$NEAR] ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot( + (Object)adaptor.create(OPERATOR, NEAR88) + , root_1); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + } + + } + + + } + + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "near" + + + public static class date_return extends ParserRuleReturnScope { + Object tree; + public Object getTree() { return tree; } + }; + + + // $ANTLR start "date" + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:351:1: date : DATE_TOKEN ; + public final ExtendedLuceneGrammarParser.date_return date() throws RecognitionException { + ExtendedLuceneGrammarParser.date_return retval = new ExtendedLuceneGrammarParser.date_return(); + retval.start = input.LT(1); + + + Object root_0 = null; + + Token DATE_TOKEN89=null; + + Object DATE_TOKEN89_tree=null; + + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:351:6: ( DATE_TOKEN ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:353:2: DATE_TOKEN + { + root_0 = (Object)adaptor.nil(); + + + DATE_TOKEN89=(Token)match(input,DATE_TOKEN,FOLLOW_DATE_TOKEN_in_date1773); if (state.failed) return retval; + if ( state.backtracking==0 ) { + DATE_TOKEN89_tree = + (Object)adaptor.create(DATE_TOKEN89) + ; + adaptor.addChild(root_0, DATE_TOKEN89_tree); + } + + } + + retval.stop = input.LT(-1); + + + if ( state.backtracking==0 ) { + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + } + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + + } + + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "date" + + // $ANTLR start synpred1_ExtendedLuceneGrammar + public final void synpred1_ExtendedLuceneGrammar_fragment() throws RecognitionException { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:125:2: ( modifier LPAREN ( clauseOr )+ RPAREN ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:125:3: modifier LPAREN ( clauseOr )+ RPAREN + { + pushFollow(FOLLOW_modifier_in_synpred1_ExtendedLuceneGrammar415); + modifier(); + + state._fsp--; + if (state.failed) return ; + + match(input,LPAREN,FOLLOW_LPAREN_in_synpred1_ExtendedLuceneGrammar417); if (state.failed) return ; + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:125:19: ( clauseOr )+ + int cnt41=0; + loop41: + do { + int alt41=2; + int LA41_0 = input.LA(1); + + if ( ((LA41_0 >= LBRACK && LA41_0 <= MINUS)||LA41_0==NUMBER||(LA41_0 >= PHRASE && LA41_0 <= PLUS)||LA41_0==QMARK||LA41_0==STAR||LA41_0==TERM_NORMAL||LA41_0==TERM_TRUNCATED) ) { + alt41=1; + } + + + switch (alt41) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:125:19: clauseOr + { + pushFollow(FOLLOW_clauseOr_in_synpred1_ExtendedLuceneGrammar419); + clauseOr(); + + state._fsp--; + if (state.failed) return ; + + } + break; + + default : + if ( cnt41 >= 1 ) break loop41; + if (state.backtracking>0) {state.failed=true; return ;} + EarlyExitException eee = + new EarlyExitException(41, input); + throw eee; + } + cnt41++; + } while (true); + + + match(input,RPAREN,FOLLOW_RPAREN_in_synpred1_ExtendedLuceneGrammar422); if (state.failed) return ; + + } + + } + // $ANTLR end synpred1_ExtendedLuceneGrammar + + // $ANTLR start synpred2_ExtendedLuceneGrammar + public final void synpred2_ExtendedLuceneGrammar_fragment() throws RecognitionException { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:127:4: ( LPAREN ( clauseOr )+ RPAREN term_modifier ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:127:5: LPAREN ( clauseOr )+ RPAREN term_modifier + { + match(input,LPAREN,FOLLOW_LPAREN_in_synpred2_ExtendedLuceneGrammar476); if (state.failed) return ; + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:127:12: ( clauseOr )+ + int cnt42=0; + loop42: + do { + int alt42=2; + int LA42_0 = input.LA(1); + + if ( ((LA42_0 >= LBRACK && LA42_0 <= MINUS)||LA42_0==NUMBER||(LA42_0 >= PHRASE && LA42_0 <= PLUS)||LA42_0==QMARK||LA42_0==STAR||LA42_0==TERM_NORMAL||LA42_0==TERM_TRUNCATED) ) { + alt42=1; + } + + + switch (alt42) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:127:12: clauseOr + { + pushFollow(FOLLOW_clauseOr_in_synpred2_ExtendedLuceneGrammar478); + clauseOr(); + + state._fsp--; + if (state.failed) return ; + + } + break; + + default : + if ( cnt42 >= 1 ) break loop42; + if (state.backtracking>0) {state.failed=true; return ;} + EarlyExitException eee = + new EarlyExitException(42, input); + throw eee; + } + cnt42++; + } while (true); + + + match(input,RPAREN,FOLLOW_RPAREN_in_synpred2_ExtendedLuceneGrammar481); if (state.failed) return ; + + pushFollow(FOLLOW_term_modifier_in_synpred2_ExtendedLuceneGrammar483); + term_modifier(); + + state._fsp--; + if (state.failed) return ; + + } + + } + // $ANTLR end synpred2_ExtendedLuceneGrammar + + // $ANTLR start synpred3_ExtendedLuceneGrammar + public final void synpred3_ExtendedLuceneGrammar_fragment() throws RecognitionException { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:129:4: ( LPAREN ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:129:5: LPAREN + { + match(input,LPAREN,FOLLOW_LPAREN_in_synpred3_ExtendedLuceneGrammar536); if (state.failed) return ; + + } + + } + // $ANTLR end synpred3_ExtendedLuceneGrammar + + // $ANTLR start synpred4_ExtendedLuceneGrammar + public final void synpred4_ExtendedLuceneGrammar_fragment() throws RecognitionException { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:335:2: ( AND NOT ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:335:3: AND NOT + { + match(input,AND,FOLLOW_AND_in_synpred4_ExtendedLuceneGrammar1691); if (state.failed) return ; + + match(input,NOT,FOLLOW_NOT_in_synpred4_ExtendedLuceneGrammar1693); if (state.failed) return ; + + } + + } + // $ANTLR end synpred4_ExtendedLuceneGrammar + + // Delegated rules + + public final boolean synpred3_ExtendedLuceneGrammar() { + state.backtracking++; + int start = input.mark(); + try { + synpred3_ExtendedLuceneGrammar_fragment(); // can never throw exception + } catch (RecognitionException re) { + System.err.println("impossible: "+re); + } + boolean success = !state.failed; + input.rewind(start); + state.backtracking--; + state.failed=false; + return success; + } + public final boolean synpred1_ExtendedLuceneGrammar() { + state.backtracking++; + int start = input.mark(); + try { + synpred1_ExtendedLuceneGrammar_fragment(); // can never throw exception + } catch (RecognitionException re) { + System.err.println("impossible: "+re); + } + boolean success = !state.failed; + input.rewind(start); + state.backtracking--; + state.failed=false; + return success; + } + public final boolean synpred2_ExtendedLuceneGrammar() { + state.backtracking++; + int start = input.mark(); + try { + synpred2_ExtendedLuceneGrammar_fragment(); // can never throw exception + } catch (RecognitionException re) { + System.err.println("impossible: "+re); + } + boolean success = !state.failed; + input.rewind(start); + state.backtracking--; + state.failed=false; + return success; + } + public final boolean synpred4_ExtendedLuceneGrammar() { + state.backtracking++; + int start = input.mark(); + try { + synpred4_ExtendedLuceneGrammar_fragment(); // can never throw exception + } catch (RecognitionException re) { + System.err.println("impossible: "+re); + } + boolean success = !state.failed; + input.rewind(start); + state.backtracking--; + state.failed=false; + return success; + } + + + + + public static final BitSet FOLLOW_clauseOr_in_mainQ212 = new BitSet(new long[]{0x0000A801391E0000L}); + public static final BitSet FOLLOW_EOF_in_mainQ215 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_clauseAnd_in_clauseOr242 = new BitSet(new long[]{0x0000000004000002L}); + public static final BitSet FOLLOW_or_in_clauseOr251 = new BitSet(new long[]{0x0000A801391E0000L}); + public static final BitSet FOLLOW_clauseAnd_in_clauseOr255 = new BitSet(new long[]{0x0000000004000002L}); + public static final BitSet FOLLOW_clauseNot_in_clauseAnd284 = new BitSet(new long[]{0x0000000000000022L}); + public static final BitSet FOLLOW_and_in_clauseAnd294 = new BitSet(new long[]{0x0000A801391E0000L}); + public static final BitSet FOLLOW_clauseNot_in_clauseAnd298 = new BitSet(new long[]{0x0000000000000022L}); + public static final BitSet FOLLOW_clauseNear_in_clauseNot329 = new BitSet(new long[]{0x0000000000800022L}); + public static final BitSet FOLLOW_not_in_clauseNot338 = new BitSet(new long[]{0x0000A801391E0000L}); + public static final BitSet FOLLOW_clauseNear_in_clauseNot342 = new BitSet(new long[]{0x0000000000800022L}); + public static final BitSet FOLLOW_clauseBasic_in_clauseNear373 = new BitSet(new long[]{0x0000000000400002L}); + public static final BitSet FOLLOW_near_in_clauseNear382 = new BitSet(new long[]{0x0000A801391E0000L}); + public static final BitSet FOLLOW_clauseBasic_in_clauseNear386 = new BitSet(new long[]{0x0000000000400002L}); + public static final BitSet FOLLOW_modifier_in_clauseBasic427 = new BitSet(new long[]{0x0000000000080000L}); + public static final BitSet FOLLOW_LPAREN_in_clauseBasic430 = new BitSet(new long[]{0x0000A801391E0000L}); + public static final BitSet FOLLOW_clauseOr_in_clauseBasic432 = new BitSet(new long[]{0x0000AA01391E0000L}); + public static final BitSet FOLLOW_RPAREN_in_clauseBasic435 = new BitSet(new long[]{0x0001000000000102L}); + public static final BitSet FOLLOW_term_modifier_in_clauseBasic437 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_modifier_in_clauseBasic487 = new BitSet(new long[]{0x0000000000080000L}); + public static final BitSet FOLLOW_LPAREN_in_clauseBasic490 = new BitSet(new long[]{0x0000A801391E0000L}); + public static final BitSet FOLLOW_clauseOr_in_clauseBasic492 = new BitSet(new long[]{0x0000AA01391E0000L}); + public static final BitSet FOLLOW_RPAREN_in_clauseBasic495 = new BitSet(new long[]{0x0001000000000102L}); + public static final BitSet FOLLOW_term_modifier_in_clauseBasic497 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_LPAREN_in_clauseBasic542 = new BitSet(new long[]{0x0000A801391E0000L}); + public static final BitSet FOLLOW_clauseOr_in_clauseBasic544 = new BitSet(new long[]{0x0000AA01391E0000L}); + public static final BitSet FOLLOW_RPAREN_in_clauseBasic547 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_atom_in_clauseBasic559 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_modifier_in_atom580 = new BitSet(new long[]{0x0000200000000000L}); + public static final BitSet FOLLOW_field_in_atom583 = new BitSet(new long[]{0x0000000000080000L}); + public static final BitSet FOLLOW_multi_value_in_atom585 = new BitSet(new long[]{0x0001000000000102L}); + public static final BitSet FOLLOW_term_modifier_in_atom587 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_modifier_in_atom623 = new BitSet(new long[]{0x0000A80119060000L}); + public static final BitSet FOLLOW_field_in_atom626 = new BitSet(new long[]{0x0000A80119060000L}); + public static final BitSet FOLLOW_value_in_atom629 = new BitSet(new long[]{0x0001000000000102L}); + public static final BitSet FOLLOW_term_modifier_in_atom631 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_TERM_NORMAL_in_field678 = new BitSet(new long[]{0x0000000000000400L}); + public static final BitSet FOLLOW_COLON_in_field680 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_range_term_in_in_value699 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_range_term_ex_in_value712 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_normal_in_value726 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_truncated_in_value740 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_quoted_in_value754 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_quoted_truncated_in_value767 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_QMARK_in_value780 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_STAR_in_value793 = new BitSet(new long[]{0x0000000000000400L}); + public static final BitSet FOLLOW_COLON_in_value795 = new BitSet(new long[]{0x0000080000000000L}); + public static final BitSet FOLLOW_STAR_in_value799 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_STAR_in_value814 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_LBRACK_in_range_term_in845 = new BitSet(new long[]{0x0000A80019000800L}); + public static final BitSet FOLLOW_range_value_in_range_term_in857 = new BitSet(new long[]{0x0004A88019000800L}); + public static final BitSet FOLLOW_TO_in_range_term_in880 = new BitSet(new long[]{0x0000A80019000800L}); + public static final BitSet FOLLOW_range_value_in_range_term_in885 = new BitSet(new long[]{0x0000008000000000L}); + public static final BitSet FOLLOW_RBRACK_in_range_term_in906 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_LCURLY_in_range_term_ex926 = new BitSet(new long[]{0x0000A80019000800L}); + public static final BitSet FOLLOW_range_value_in_range_term_ex939 = new BitSet(new long[]{0x0004A90019000800L}); + public static final BitSet FOLLOW_TO_in_range_term_ex962 = new BitSet(new long[]{0x0000A80019000800L}); + public static final BitSet FOLLOW_range_value_in_range_term_ex967 = new BitSet(new long[]{0x0000010000000000L}); + public static final BitSet FOLLOW_RCURLY_in_range_term_ex988 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_truncated_in_range_value1002 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_quoted_in_range_value1015 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_quoted_truncated_in_range_value1028 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_date_in_range_value1041 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_normal_in_range_value1054 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_STAR_in_range_value1068 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_LPAREN_in_multi_value1089 = new BitSet(new long[]{0x0000A801391E0000L}); + public static final BitSet FOLLOW_multiClause_in_multi_value1091 = new BitSet(new long[]{0x0000020000000000L}); + public static final BitSet FOLLOW_RPAREN_in_multi_value1093 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_clauseOr_in_multiClause1120 = new BitSet(new long[]{0x0000A801391E0002L}); + public static final BitSet FOLLOW_multiOr_in_multiDefault1164 = new BitSet(new long[]{0x0000A80139160002L}); + public static final BitSet FOLLOW_multiAnd_in_multiOr1192 = new BitSet(new long[]{0x0000000004000002L}); + public static final BitSet FOLLOW_or_in_multiOr1202 = new BitSet(new long[]{0x0000A80139160000L}); + public static final BitSet FOLLOW_multiAnd_in_multiOr1206 = new BitSet(new long[]{0x0000000004000002L}); + public static final BitSet FOLLOW_multiNot_in_multiAnd1237 = new BitSet(new long[]{0x0000000000000022L}); + public static final BitSet FOLLOW_and_in_multiAnd1247 = new BitSet(new long[]{0x0000A80139160000L}); + public static final BitSet FOLLOW_multiNot_in_multiAnd1251 = new BitSet(new long[]{0x0000000000000022L}); + public static final BitSet FOLLOW_multiNear_in_multiNot1282 = new BitSet(new long[]{0x0000000000800022L}); + public static final BitSet FOLLOW_not_in_multiNot1292 = new BitSet(new long[]{0x0000A80139160000L}); + public static final BitSet FOLLOW_multiNear_in_multiNot1296 = new BitSet(new long[]{0x0000000000800022L}); + public static final BitSet FOLLOW_multiBasic_in_multiNear1326 = new BitSet(new long[]{0x0000000000400002L}); + public static final BitSet FOLLOW_near_in_multiNear1336 = new BitSet(new long[]{0x0000A80139160000L}); + public static final BitSet FOLLOW_multiBasic_in_multiNear1340 = new BitSet(new long[]{0x0000000000400002L}); + public static final BitSet FOLLOW_mterm_in_multiBasic1366 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_modifier_in_mterm1382 = new BitSet(new long[]{0x0000A80119060000L}); + public static final BitSet FOLLOW_value_in_mterm1385 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_TERM_TRUNCATED_in_truncated1438 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_PHRASE_ANYTHING_in_quoted_truncated1453 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_PHRASE_in_quoted1465 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_AND_in_operator1481 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_OR_in_operator1491 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_NOT_in_operator1501 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_NEAR_in_operator1511 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_PLUS_in_modifier1528 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_MINUS_in_modifier1538 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_TILDE_in_term_modifier1556 = new BitSet(new long[]{0x0000000000000102L}); + public static final BitSet FOLLOW_CARAT_in_term_modifier1558 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_CARAT_in_term_modifier1580 = new BitSet(new long[]{0x0001000000000002L}); + public static final BitSet FOLLOW_TILDE_in_term_modifier1582 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_CARAT_in_boost1614 = new BitSet(new long[]{0x0000000001000002L}); + public static final BitSet FOLLOW_NUMBER_in_boost1629 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_TILDE_in_fuzzy1652 = new BitSet(new long[]{0x0000000001000002L}); + public static final BitSet FOLLOW_NUMBER_in_fuzzy1667 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_AND_in_not1697 = new BitSet(new long[]{0x0000000000800000L}); + public static final BitSet FOLLOW_NOT_in_not1699 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_NOT_in_not1704 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_AND_in_and1718 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_OR_in_or1732 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_NEAR_in_near1749 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_DATE_TOKEN_in_date1773 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_modifier_in_synpred1_ExtendedLuceneGrammar415 = new BitSet(new long[]{0x0000000000080000L}); + public static final BitSet FOLLOW_LPAREN_in_synpred1_ExtendedLuceneGrammar417 = new BitSet(new long[]{0x0000A801391E0000L}); + public static final BitSet FOLLOW_clauseOr_in_synpred1_ExtendedLuceneGrammar419 = new BitSet(new long[]{0x0000AA01391E0000L}); + public static final BitSet FOLLOW_RPAREN_in_synpred1_ExtendedLuceneGrammar422 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_LPAREN_in_synpred2_ExtendedLuceneGrammar476 = new BitSet(new long[]{0x0000A801391E0000L}); + public static final BitSet FOLLOW_clauseOr_in_synpred2_ExtendedLuceneGrammar478 = new BitSet(new long[]{0x0000AA01391E0000L}); + public static final BitSet FOLLOW_RPAREN_in_synpred2_ExtendedLuceneGrammar481 = new BitSet(new long[]{0x0001000000000100L}); + public static final BitSet FOLLOW_term_modifier_in_synpred2_ExtendedLuceneGrammar483 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_LPAREN_in_synpred3_ExtendedLuceneGrammar536 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_AND_in_synpred4_ExtendedLuceneGrammar1691 = new BitSet(new long[]{0x0000000000800000L}); + public static final BitSet FOLLOW_NOT_in_synpred4_ExtendedLuceneGrammar1693 = new BitSet(new long[]{0x0000000000000002L}); + +} \ No newline at end of file Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/parser/AqpStandardQueryNodeProcessorPipeline.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/parser/AqpStandardQueryNodeProcessorPipeline.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/parser/AqpStandardQueryNodeProcessorPipeline.java (revision 0) @@ -0,0 +1,141 @@ +package org.apache.lucene.queryparser.flexible.aqp.parser; + +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import org.apache.lucene.queryparser.flexible.core.config.QueryConfigHandler; +import org.apache.lucene.queryparser.flexible.core.processors.NoChildOptimizationQueryNodeProcessor; +import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessorPipeline; +import org.apache.lucene.queryparser.flexible.core.processors.RemoveDeletedQueryNodesProcessor; +import org.apache.lucene.queryparser.flexible.standard.builders.StandardQueryTreeBuilder; +import org.apache.lucene.queryparser.flexible.standard.config.StandardQueryConfigHandler; +import org.apache.lucene.queryparser.flexible.standard.parser.StandardSyntaxParser; +import org.apache.lucene.queryparser.flexible.standard.processors.AllowLeadingWildcardProcessor; +import org.apache.lucene.queryparser.flexible.standard.processors.AnalyzerQueryNodeProcessor; +import org.apache.lucene.queryparser.flexible.standard.processors.BooleanSingleChildOptimizationQueryNodeProcessor; +import org.apache.lucene.queryparser.flexible.standard.processors.BoostQueryNodeProcessor; +import org.apache.lucene.queryparser.flexible.standard.processors.DefaultPhraseSlopQueryNodeProcessor; +import org.apache.lucene.queryparser.flexible.standard.processors.FuzzyQueryNodeProcessor; +import org.apache.lucene.queryparser.flexible.standard.processors.LowercaseExpandedTermsQueryNodeProcessor; +import org.apache.lucene.queryparser.flexible.standard.processors.MatchAllDocsQueryNodeProcessor; +import org.apache.lucene.queryparser.flexible.standard.processors.MultiFieldQueryNodeProcessor; +import org.apache.lucene.queryparser.flexible.standard.processors.MultiTermRewriteMethodProcessor; +import org.apache.lucene.queryparser.flexible.standard.processors.NumericQueryNodeProcessor; +import org.apache.lucene.queryparser.flexible.standard.processors.NumericRangeQueryNodeProcessor; +import org.apache.lucene.queryparser.flexible.standard.processors.OpenRangeQueryNodeProcessor; +import org.apache.lucene.queryparser.flexible.standard.processors.PhraseSlopQueryNodeProcessor; +import org.apache.lucene.queryparser.flexible.standard.processors.RemoveEmptyNonLeafQueryNodeProcessor; +import org.apache.lucene.queryparser.flexible.standard.processors.TermRangeQueryNodeProcessor; +import org.apache.lucene.queryparser.flexible.standard.processors.WildcardQueryNodeProcessor; +import org.apache.lucene.queryparser.flexible.aqp.processors.AqpBOOSTProcessor; +import org.apache.lucene.queryparser.flexible.aqp.processors.AqpCLAUSEProcessor; +import org.apache.lucene.queryparser.flexible.aqp.processors.AqpDEFOPProcessor; +import org.apache.lucene.queryparser.flexible.aqp.processors.AqpFIELDProcessor; +import org.apache.lucene.queryparser.flexible.aqp.processors.AqpFUZZYProcessor; +import org.apache.lucene.queryparser.flexible.aqp.processors.AqpFuzzyModifierProcessor; +import org.apache.lucene.queryparser.flexible.aqp.processors.AqpGroupQueryOptimizerProcessor; +import org.apache.lucene.queryparser.flexible.aqp.processors.AqpMODIFIERProcessor; +import org.apache.lucene.queryparser.flexible.aqp.processors.AqpOPERATORProcessor; +import org.apache.lucene.queryparser.flexible.aqp.processors.AqpOptimizationProcessor; +import org.apache.lucene.queryparser.flexible.aqp.processors.AqpQANYTHINGProcessor; +import org.apache.lucene.queryparser.flexible.aqp.processors.AqpQNORMALProcessor; +import org.apache.lucene.queryparser.flexible.aqp.processors.AqpQPHRASEProcessor; +import org.apache.lucene.queryparser.flexible.aqp.processors.AqpQPHRASETRUNCProcessor; +import org.apache.lucene.queryparser.flexible.aqp.processors.AqpQRANGEEXProcessor; +import org.apache.lucene.queryparser.flexible.aqp.processors.AqpQRANGEINProcessor; +import org.apache.lucene.queryparser.flexible.aqp.processors.AqpQTRUNCATEDProcessor; +import org.apache.lucene.queryparser.flexible.aqp.processors.AqpTMODIFIERProcessor; +import org.apache.lucene.queryparser.flexible.aqp.processors.AqpTreeRewriteProcessor; +import org.apache.lucene.search.Query; + +/** + * This is based on the standard/processors + * + * This pipeline has all the processors needed to process a query node tree, + * generated by {@link StandardSyntaxParser}, already assembled.
+ *
+ * The order they are assembled affects the results.
+ *
+ * This processor pipeline was designed to work with + * {@link StandardQueryConfigHandler}.
+ *
+ * The result query node tree can be used to build a {@link Query} object using + * {@link StandardQueryTreeBuilder}.
+ * + * @see StandardQueryTreeBuilder + * @see StandardQueryConfigHandler + * @see StandardSyntaxParser + */ +public class AqpStandardQueryNodeProcessorPipeline extends + QueryNodeProcessorPipeline { + + public AqpStandardQueryNodeProcessorPipeline(QueryConfigHandler queryConfig) { + super(queryConfig); + + add(new AqpDEFOPProcessor()); + add(new AqpTreeRewriteProcessor()); + + add(new AqpCLAUSEProcessor()); + add(new AqpMODIFIERProcessor()); + + add(new AqpOPERATORProcessor()); + add(new AqpTMODIFIERProcessor()); + add(new AqpBOOSTProcessor()); + add(new AqpFUZZYProcessor()); + + add(new AqpQRANGEINProcessor()); + add(new AqpQRANGEEXProcessor()); + add(new AqpQNORMALProcessor()); + add(new AqpQPHRASEProcessor()); + add(new AqpQPHRASETRUNCProcessor()); + add(new AqpQTRUNCATEDProcessor()); + add(new AqpQRANGEINProcessor()); + add(new AqpQRANGEEXProcessor()); + add(new AqpQANYTHINGProcessor()); + add(new AqpFIELDProcessor()); + + add(new AqpFuzzyModifierProcessor()); + + // TODO: remove the processors which are not needed + // these were the standard guys before AQP ones were added + + add(new WildcardQueryNodeProcessor()); + add(new MultiFieldQueryNodeProcessor()); + add(new FuzzyQueryNodeProcessor()); + add(new MatchAllDocsQueryNodeProcessor()); + add(new OpenRangeQueryNodeProcessor()); + add(new NumericQueryNodeProcessor()); + add(new NumericRangeQueryNodeProcessor()); + add(new LowercaseExpandedTermsQueryNodeProcessor()); + add(new TermRangeQueryNodeProcessor()); + add(new AllowLeadingWildcardProcessor()); + add(new AnalyzerQueryNodeProcessor()); + add(new PhraseSlopQueryNodeProcessor()); + // add(new GroupQueryNodeProcessor()); + add(new NoChildOptimizationQueryNodeProcessor()); + add(new RemoveDeletedQueryNodesProcessor()); + add(new RemoveEmptyNonLeafQueryNodeProcessor()); + add(new BooleanSingleChildOptimizationQueryNodeProcessor()); + add(new DefaultPhraseSlopQueryNodeProcessor()); + add(new BoostQueryNodeProcessor()); + add(new MultiTermRewriteMethodProcessor()); + + add(new AqpOptimizationProcessor()); + add(new AqpGroupQueryOptimizerProcessor()); + } + +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/parser/StandardLuceneGrammar.tokens =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/parser/StandardLuceneGrammar.tokens (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/parser/StandardLuceneGrammar.tokens (revision 0) @@ -0,0 +1,48 @@ +AMPER=4 +AND=5 +ATOM=6 +BOOST=7 +CARAT=8 +CLAUSE=9 +COLON=10 +DATE_TOKEN=11 +DQUOTE=12 +ESC_CHAR=13 +FIELD=14 +FUZZY=15 +INT=16 +LBRACK=17 +LCURLY=18 +LPAREN=19 +MINUS=20 +MODIFIER=21 +NOT=22 +NUMBER=23 +OPERATOR=24 +OR=25 +PHRASE=26 +PHRASE_ANYTHING=27 +PLUS=28 +QANYTHING=29 +QDATE=30 +QMARK=31 +QNORMAL=32 +QPHRASE=33 +QPHRASETRUNC=34 +QRANGEEX=35 +QRANGEIN=36 +QTRUNCATED=37 +RBRACK=38 +RCURLY=39 +RPAREN=40 +SQUOTE=41 +STAR=42 +TERM_CHAR=43 +TERM_NORMAL=44 +TERM_START_CHAR=45 +TERM_TRUNCATED=46 +TILDE=47 +TMODIFIER=48 +TO=49 +VBAR=50 +WS=51 Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/parser/ExtendedLuceneGrammar.tokens =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/parser/ExtendedLuceneGrammar.tokens (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/parser/ExtendedLuceneGrammar.tokens (revision 0) @@ -0,0 +1,49 @@ +AMPER=4 +AND=5 +ATOM=6 +BOOST=7 +CARAT=8 +CLAUSE=9 +COLON=10 +DATE_TOKEN=11 +DQUOTE=12 +ESC_CHAR=13 +FIELD=14 +FUZZY=15 +INT=16 +LBRACK=17 +LCURLY=18 +LPAREN=19 +MINUS=20 +MODIFIER=21 +NEAR=22 +NOT=23 +NUMBER=24 +OPERATOR=25 +OR=26 +PHRASE=27 +PHRASE_ANYTHING=28 +PLUS=29 +QANYTHING=30 +QDATE=31 +QMARK=32 +QNORMAL=33 +QPHRASE=34 +QPHRASETRUNC=35 +QRANGEEX=36 +QRANGEIN=37 +QTRUNCATED=38 +RBRACK=39 +RCURLY=40 +RPAREN=41 +SQUOTE=42 +STAR=43 +TERM_CHAR=44 +TERM_NORMAL=45 +TERM_START_CHAR=46 +TERM_TRUNCATED=47 +TILDE=48 +TMODIFIER=49 +TO=50 +VBAR=51 +WS=52 Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/parser/StandardLuceneGrammarSyntaxParser.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/parser/StandardLuceneGrammarSyntaxParser.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/parser/StandardLuceneGrammarSyntaxParser.java (revision 0) @@ -0,0 +1,85 @@ + + +package org.apache.lucene.queryparser.flexible.aqp.parser; + + +/** + * Automatically generated SyntaxParser wrapper built by ant + * from the grammar source: /dvt/workspace/lucene_4x/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/parser/StandardLuceneGrammar.g + * + * YOUR CHANGES WILL BE OVERWRITTEN BY NEXT ANT RUN! + */ + +import org.antlr.runtime.ANTLRStringStream; +import org.antlr.runtime.CommonTokenStream; +import org.antlr.runtime.RecognitionException; +import org.antlr.runtime.TokenStream; +import org.apache.lucene.queryparser.flexible.messages.Message; +import org.apache.lucene.queryparser.flexible.messages.MessageImpl; +import org.apache.lucene.queryparser.flexible.aqp.util.AqpCommonTree; +import org.apache.lucene.queryparser.flexible.aqp.util.AqpCommonTreeAdaptor; +import org.apache.lucene.queryparser.flexible.core.QueryNodeParseException; +import org.apache.lucene.queryparser.flexible.core.messages.QueryParserMessages; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; + +import org.apache.lucene.queryparser.flexible.aqp.AqpSyntaxParser; +import org.apache.lucene.queryparser.flexible.aqp.AqpSyntaxParserAbstract; +import org.apache.lucene.queryparser.flexible.aqp.parser.StandardLuceneGrammarLexer; +import org.apache.lucene.queryparser.flexible.aqp.parser.StandardLuceneGrammarParser; + + +public class StandardLuceneGrammarSyntaxParser extends AqpSyntaxParserAbstract { + public AqpSyntaxParser initializeGrammar(String grammarName) + throws QueryNodeParseException { + return this; + } + + public TokenStream getTokenStream(CharSequence input) { + ANTLRStringStream in = new ANTLRStringStream(input.toString()); + StandardLuceneGrammarLexer lexer = new StandardLuceneGrammarLexer(in); + CommonTokenStream tokens = new CommonTokenStream(lexer); + return tokens; + } + + public QueryNode parseTokenStream(TokenStream tokens, CharSequence query, + CharSequence field) throws QueryNodeParseException { + StandardLuceneGrammarParser parser = new StandardLuceneGrammarParser(tokens); + StandardLuceneGrammarParser.mainQ_return returnValue; + + AqpCommonTreeAdaptor adaptor = new AqpCommonTreeAdaptor(parser.getTokenNames()); + parser.setTreeAdaptor(adaptor); + + AqpCommonTree astTree; + + try { + returnValue = parser.mainQ(); + // this prevents parser from recovering, however it can also interfere + // with custom error handling (if present inside the grammar) + if (parser.getNumberOfSyntaxErrors() > 0) { + throw new Exception("The parser reported a syntax error, antlrqueryparser hates errors!"); + } + astTree = (AqpCommonTree) returnValue.getTree(); + return astTree.toQueryNodeTree(); + } catch (RecognitionException e) { + throw new QueryNodeParseException(new MessageImpl(query + " " + parser.getErrorMessage(e, parser.getTokenNames()))); + } catch (Exception e) { + Message message = new MessageImpl( + QueryParserMessages.INVALID_SYNTAX_CANNOT_PARSE, query, + e.getMessage()); + QueryNodeParseException ee = new QueryNodeParseException(e); + ee.setQuery(query); + ee.setNonLocalizedMessage(message); + throw ee; + } catch (Error e) { + Message message = new MessageImpl( + QueryParserMessages.INVALID_SYNTAX_CANNOT_PARSE, query, + e.getMessage()); + QueryNodeParseException ee = new QueryNodeParseException(e); + ee.setQuery(query); + ee.setNonLocalizedMessage(message); + throw ee; + } + } +} + + \ No newline at end of file Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/parser/ExtendedLuceneGrammarSyntaxParser.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/parser/ExtendedLuceneGrammarSyntaxParser.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/parser/ExtendedLuceneGrammarSyntaxParser.java (revision 0) @@ -0,0 +1,85 @@ + + +package org.apache.lucene.queryparser.flexible.aqp.parser; + + +/** + * Automatically generated SyntaxParser wrapper built by ant + * from the grammar source: /dvt/workspace/lucene_4x/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/parser/ExtendedLuceneGrammar.g + * + * YOUR CHANGES WILL BE OVERWRITTEN BY NEXT ANT RUN! + */ + +import org.antlr.runtime.ANTLRStringStream; +import org.antlr.runtime.CommonTokenStream; +import org.antlr.runtime.RecognitionException; +import org.antlr.runtime.TokenStream; +import org.apache.lucene.queryparser.flexible.messages.Message; +import org.apache.lucene.queryparser.flexible.messages.MessageImpl; +import org.apache.lucene.queryparser.flexible.aqp.util.AqpCommonTree; +import org.apache.lucene.queryparser.flexible.aqp.util.AqpCommonTreeAdaptor; +import org.apache.lucene.queryparser.flexible.core.QueryNodeParseException; +import org.apache.lucene.queryparser.flexible.core.messages.QueryParserMessages; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; + +import org.apache.lucene.queryparser.flexible.aqp.AqpSyntaxParser; +import org.apache.lucene.queryparser.flexible.aqp.AqpSyntaxParserAbstract; +import org.apache.lucene.queryparser.flexible.aqp.parser.ExtendedLuceneGrammarLexer; +import org.apache.lucene.queryparser.flexible.aqp.parser.ExtendedLuceneGrammarParser; + + +public class ExtendedLuceneGrammarSyntaxParser extends AqpSyntaxParserAbstract { + public AqpSyntaxParser initializeGrammar(String grammarName) + throws QueryNodeParseException { + return this; + } + + public TokenStream getTokenStream(CharSequence input) { + ANTLRStringStream in = new ANTLRStringStream(input.toString()); + ExtendedLuceneGrammarLexer lexer = new ExtendedLuceneGrammarLexer(in); + CommonTokenStream tokens = new CommonTokenStream(lexer); + return tokens; + } + + public QueryNode parseTokenStream(TokenStream tokens, CharSequence query, + CharSequence field) throws QueryNodeParseException { + ExtendedLuceneGrammarParser parser = new ExtendedLuceneGrammarParser(tokens); + ExtendedLuceneGrammarParser.mainQ_return returnValue; + + AqpCommonTreeAdaptor adaptor = new AqpCommonTreeAdaptor(parser.getTokenNames()); + parser.setTreeAdaptor(adaptor); + + AqpCommonTree astTree; + + try { + returnValue = parser.mainQ(); + // this prevents parser from recovering, however it can also interfere + // with custom error handling (if present inside the grammar) + if (parser.getNumberOfSyntaxErrors() > 0) { + throw new Exception("The parser reported a syntax error, antlrqueryparser hates errors!"); + } + astTree = (AqpCommonTree) returnValue.getTree(); + return astTree.toQueryNodeTree(); + } catch (RecognitionException e) { + throw new QueryNodeParseException(new MessageImpl(query + " " + parser.getErrorMessage(e, parser.getTokenNames()))); + } catch (Exception e) { + Message message = new MessageImpl( + QueryParserMessages.INVALID_SYNTAX_CANNOT_PARSE, query, + e.getMessage()); + QueryNodeParseException ee = new QueryNodeParseException(e); + ee.setQuery(query); + ee.setNonLocalizedMessage(message); + throw ee; + } catch (Error e) { + Message message = new MessageImpl( + QueryParserMessages.INVALID_SYNTAX_CANNOT_PARSE, query, + e.getMessage()); + QueryNodeParseException ee = new QueryNodeParseException(e); + ee.setQuery(query); + ee.setNonLocalizedMessage(message); + throw ee; + } + } +} + + \ No newline at end of file Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/parser/StandardLuceneGrammarLexer.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/parser/StandardLuceneGrammarLexer.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/parser/StandardLuceneGrammarLexer.java (revision 0) @@ -0,0 +1,3952 @@ +// $ANTLR 3.4 /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g 2013-06-27 18:55:23 + + package org.apache.lucene.queryparser.flexible.aqp.parser; + + +import org.antlr.runtime.*; +import java.util.Stack; +import java.util.List; +import java.util.ArrayList; + +@SuppressWarnings({"all", "warnings", "unchecked"}) +public class StandardLuceneGrammarLexer extends Lexer { + public static final int EOF=-1; + public static final int AMPER=4; + public static final int AND=5; + public static final int ATOM=6; + public static final int BOOST=7; + public static final int CARAT=8; + public static final int CLAUSE=9; + public static final int COLON=10; + public static final int DATE_TOKEN=11; + public static final int DQUOTE=12; + public static final int ESC_CHAR=13; + public static final int FIELD=14; + public static final int FUZZY=15; + public static final int INT=16; + public static final int LBRACK=17; + public static final int LCURLY=18; + public static final int LPAREN=19; + public static final int MINUS=20; + public static final int MODIFIER=21; + public static final int NOT=22; + public static final int NUMBER=23; + public static final int OPERATOR=24; + public static final int OR=25; + public static final int PHRASE=26; + public static final int PHRASE_ANYTHING=27; + public static final int PLUS=28; + public static final int QANYTHING=29; + public static final int QDATE=30; + public static final int QMARK=31; + public static final int QNORMAL=32; + public static final int QPHRASE=33; + public static final int QPHRASETRUNC=34; + public static final int QRANGEEX=35; + public static final int QRANGEIN=36; + public static final int QTRUNCATED=37; + public static final int RBRACK=38; + public static final int RCURLY=39; + public static final int RPAREN=40; + public static final int SQUOTE=41; + public static final int STAR=42; + public static final int TERM_CHAR=43; + public static final int TERM_NORMAL=44; + public static final int TERM_START_CHAR=45; + public static final int TERM_TRUNCATED=46; + public static final int TILDE=47; + public static final int TMODIFIER=48; + public static final int TO=49; + public static final int VBAR=50; + public static final int WS=51; + + public void recover(RecognitionException re) { + // throw unchecked exception + throw new RuntimeException(re); + } + + + // delegates + // delegators + public Lexer[] getDelegates() { + return new Lexer[] {}; + } + + public StandardLuceneGrammarLexer() {} + public StandardLuceneGrammarLexer(CharStream input) { + this(input, new RecognizerSharedState()); + } + public StandardLuceneGrammarLexer(CharStream input, RecognizerSharedState state) { + super(input,state); + } + public String getGrammarFileName() { return "/dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g"; } + + // $ANTLR start "LPAREN" + public final void mLPAREN() throws RecognitionException { + try { + int _type = LPAREN; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:353:9: ( '(' ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:353:11: '(' + { + match('('); + + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "LPAREN" + + // $ANTLR start "RPAREN" + public final void mRPAREN() throws RecognitionException { + try { + int _type = RPAREN; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:355:9: ( ')' ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:355:11: ')' + { + match(')'); + + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "RPAREN" + + // $ANTLR start "LBRACK" + public final void mLBRACK() throws RecognitionException { + try { + int _type = LBRACK; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:357:9: ( '[' ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:357:11: '[' + { + match('['); + + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "LBRACK" + + // $ANTLR start "RBRACK" + public final void mRBRACK() throws RecognitionException { + try { + int _type = RBRACK; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:359:9: ( ']' ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:359:11: ']' + { + match(']'); + + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "RBRACK" + + // $ANTLR start "COLON" + public final void mCOLON() throws RecognitionException { + try { + int _type = COLON; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:361:9: ( ':' ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:361:11: ':' + { + match(':'); + + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "COLON" + + // $ANTLR start "PLUS" + public final void mPLUS() throws RecognitionException { + try { + int _type = PLUS; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:363:7: ( '+' ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:363:9: '+' + { + match('+'); + + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "PLUS" + + // $ANTLR start "MINUS" + public final void mMINUS() throws RecognitionException { + try { + int _type = MINUS; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:365:7: ( ( '-' | '\\!' ) ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g: + { + if ( input.LA(1)=='!'||input.LA(1)=='-' ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "MINUS" + + // $ANTLR start "STAR" + public final void mSTAR() throws RecognitionException { + try { + int _type = STAR; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:367:7: ( '*' ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:367:9: '*' + { + match('*'); + + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "STAR" + + // $ANTLR start "QMARK" + public final void mQMARK() throws RecognitionException { + try { + int _type = QMARK; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:369:8: ( ( '?' )+ ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:369:10: ( '?' )+ + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:369:10: ( '?' )+ + int cnt1=0; + loop1: + do { + int alt1=2; + int LA1_0 = input.LA(1); + + if ( (LA1_0=='?') ) { + alt1=1; + } + + + switch (alt1) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:369:10: '?' + { + match('?'); + + } + break; + + default : + if ( cnt1 >= 1 ) break loop1; + EarlyExitException eee = + new EarlyExitException(1, input); + throw eee; + } + cnt1++; + } while (true); + + + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "QMARK" + + // $ANTLR start "VBAR" + public final void mVBAR() throws RecognitionException { + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:371:16: ( '|' ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:371:18: '|' + { + match('|'); + + } + + + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "VBAR" + + // $ANTLR start "AMPER" + public final void mAMPER() throws RecognitionException { + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:373:16: ( '&' ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:373:18: '&' + { + match('&'); + + } + + + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "AMPER" + + // $ANTLR start "LCURLY" + public final void mLCURLY() throws RecognitionException { + try { + int _type = LCURLY; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:375:9: ( '{' ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:375:11: '{' + { + match('{'); + + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "LCURLY" + + // $ANTLR start "RCURLY" + public final void mRCURLY() throws RecognitionException { + try { + int _type = RCURLY; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:377:9: ( '}' ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:377:11: '}' + { + match('}'); + + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "RCURLY" + + // $ANTLR start "CARAT" + public final void mCARAT() throws RecognitionException { + try { + int _type = CARAT; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:379:7: ( '^' ( ( INT )+ ( '.' ( INT )+ )? )? ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:379:9: '^' ( ( INT )+ ( '.' ( INT )+ )? )? + { + match('^'); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:379:13: ( ( INT )+ ( '.' ( INT )+ )? )? + int alt5=2; + int LA5_0 = input.LA(1); + + if ( ((LA5_0 >= '0' && LA5_0 <= '9')) ) { + alt5=1; + } + switch (alt5) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:379:14: ( INT )+ ( '.' ( INT )+ )? + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:379:14: ( INT )+ + int cnt2=0; + loop2: + do { + int alt2=2; + int LA2_0 = input.LA(1); + + if ( ((LA2_0 >= '0' && LA2_0 <= '9')) ) { + alt2=1; + } + + + switch (alt2) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g: + { + if ( (input.LA(1) >= '0' && input.LA(1) <= '9') ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + } + break; + + default : + if ( cnt2 >= 1 ) break loop2; + EarlyExitException eee = + new EarlyExitException(2, input); + throw eee; + } + cnt2++; + } while (true); + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:379:19: ( '.' ( INT )+ )? + int alt4=2; + int LA4_0 = input.LA(1); + + if ( (LA4_0=='.') ) { + alt4=1; + } + switch (alt4) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:379:20: '.' ( INT )+ + { + match('.'); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:379:24: ( INT )+ + int cnt3=0; + loop3: + do { + int alt3=2; + int LA3_0 = input.LA(1); + + if ( ((LA3_0 >= '0' && LA3_0 <= '9')) ) { + alt3=1; + } + + + switch (alt3) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g: + { + if ( (input.LA(1) >= '0' && input.LA(1) <= '9') ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + } + break; + + default : + if ( cnt3 >= 1 ) break loop3; + EarlyExitException eee = + new EarlyExitException(3, input); + throw eee; + } + cnt3++; + } while (true); + + + } + break; + + } + + + } + break; + + } + + + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "CARAT" + + // $ANTLR start "TILDE" + public final void mTILDE() throws RecognitionException { + try { + int _type = TILDE; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:381:7: ( '~' ( ( INT )+ ( '.' ( INT )+ )? )? ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:381:9: '~' ( ( INT )+ ( '.' ( INT )+ )? )? + { + match('~'); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:381:13: ( ( INT )+ ( '.' ( INT )+ )? )? + int alt9=2; + int LA9_0 = input.LA(1); + + if ( ((LA9_0 >= '0' && LA9_0 <= '9')) ) { + alt9=1; + } + switch (alt9) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:381:14: ( INT )+ ( '.' ( INT )+ )? + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:381:14: ( INT )+ + int cnt6=0; + loop6: + do { + int alt6=2; + int LA6_0 = input.LA(1); + + if ( ((LA6_0 >= '0' && LA6_0 <= '9')) ) { + alt6=1; + } + + + switch (alt6) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g: + { + if ( (input.LA(1) >= '0' && input.LA(1) <= '9') ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + } + break; + + default : + if ( cnt6 >= 1 ) break loop6; + EarlyExitException eee = + new EarlyExitException(6, input); + throw eee; + } + cnt6++; + } while (true); + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:381:19: ( '.' ( INT )+ )? + int alt8=2; + int LA8_0 = input.LA(1); + + if ( (LA8_0=='.') ) { + alt8=1; + } + switch (alt8) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:381:20: '.' ( INT )+ + { + match('.'); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:381:24: ( INT )+ + int cnt7=0; + loop7: + do { + int alt7=2; + int LA7_0 = input.LA(1); + + if ( ((LA7_0 >= '0' && LA7_0 <= '9')) ) { + alt7=1; + } + + + switch (alt7) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g: + { + if ( (input.LA(1) >= '0' && input.LA(1) <= '9') ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + } + break; + + default : + if ( cnt7 >= 1 ) break loop7; + EarlyExitException eee = + new EarlyExitException(7, input); + throw eee; + } + cnt7++; + } while (true); + + + } + break; + + } + + + } + break; + + } + + + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "TILDE" + + // $ANTLR start "DQUOTE" + public final void mDQUOTE() throws RecognitionException { + try { + int _type = DQUOTE; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:384:2: ( '\\\"' ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:384:4: '\\\"' + { + match('\"'); + + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "DQUOTE" + + // $ANTLR start "SQUOTE" + public final void mSQUOTE() throws RecognitionException { + try { + int _type = SQUOTE; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:387:2: ( '\\'' ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:387:4: '\\'' + { + match('\''); + + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "SQUOTE" + + // $ANTLR start "TO" + public final void mTO() throws RecognitionException { + try { + int _type = TO; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:392:4: ( 'TO' ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:392:6: 'TO' + { + match("TO"); + + + + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "TO" + + // $ANTLR start "AND" + public final void mAND() throws RecognitionException { + try { + int _type = AND; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:395:7: ( ( ( 'a' | 'A' ) ( 'n' | 'N' ) ( 'd' | 'D' ) | ( AMPER ( AMPER )? ) ) ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:395:9: ( ( 'a' | 'A' ) ( 'n' | 'N' ) ( 'd' | 'D' ) | ( AMPER ( AMPER )? ) ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:395:9: ( ( 'a' | 'A' ) ( 'n' | 'N' ) ( 'd' | 'D' ) | ( AMPER ( AMPER )? ) ) + int alt11=2; + int LA11_0 = input.LA(1); + + if ( (LA11_0=='A'||LA11_0=='a') ) { + alt11=1; + } + else if ( (LA11_0=='&') ) { + alt11=2; + } + else { + NoViableAltException nvae = + new NoViableAltException("", 11, 0, input); + + throw nvae; + + } + switch (alt11) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:395:10: ( 'a' | 'A' ) ( 'n' | 'N' ) ( 'd' | 'D' ) + { + if ( input.LA(1)=='A'||input.LA(1)=='a' ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + if ( input.LA(1)=='N'||input.LA(1)=='n' ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + if ( input.LA(1)=='D'||input.LA(1)=='d' ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + } + break; + case 2 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:395:48: ( AMPER ( AMPER )? ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:395:48: ( AMPER ( AMPER )? ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:395:49: AMPER ( AMPER )? + { + mAMPER(); + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:395:55: ( AMPER )? + int alt10=2; + int LA10_0 = input.LA(1); + + if ( (LA10_0=='&') ) { + alt10=1; + } + switch (alt10) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g: + { + if ( input.LA(1)=='&' ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + } + break; + + } + + + } + + + } + break; + + } + + + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "AND" + + // $ANTLR start "OR" + public final void mOR() throws RecognitionException { + try { + int _type = OR; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:396:5: ( ( ( 'o' | 'O' ) ( 'r' | 'R' ) | ( VBAR ( VBAR )? ) ) ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:396:7: ( ( 'o' | 'O' ) ( 'r' | 'R' ) | ( VBAR ( VBAR )? ) ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:396:7: ( ( 'o' | 'O' ) ( 'r' | 'R' ) | ( VBAR ( VBAR )? ) ) + int alt13=2; + int LA13_0 = input.LA(1); + + if ( (LA13_0=='O'||LA13_0=='o') ) { + alt13=1; + } + else if ( (LA13_0=='|') ) { + alt13=2; + } + else { + NoViableAltException nvae = + new NoViableAltException("", 13, 0, input); + + throw nvae; + + } + switch (alt13) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:396:8: ( 'o' | 'O' ) ( 'r' | 'R' ) + { + if ( input.LA(1)=='O'||input.LA(1)=='o' ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + if ( input.LA(1)=='R'||input.LA(1)=='r' ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + } + break; + case 2 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:396:34: ( VBAR ( VBAR )? ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:396:34: ( VBAR ( VBAR )? ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:396:35: VBAR ( VBAR )? + { + mVBAR(); + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:396:40: ( VBAR )? + int alt12=2; + int LA12_0 = input.LA(1); + + if ( (LA12_0=='|') ) { + alt12=1; + } + switch (alt12) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g: + { + if ( input.LA(1)=='|' ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + } + break; + + } + + + } + + + } + break; + + } + + + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "OR" + + // $ANTLR start "NOT" + public final void mNOT() throws RecognitionException { + try { + int _type = NOT; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:397:7: ( ( 'n' | 'N' ) ( 'o' | 'O' ) ( 't' | 'T' ) ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:397:9: ( 'n' | 'N' ) ( 'o' | 'O' ) ( 't' | 'T' ) + { + if ( input.LA(1)=='N'||input.LA(1)=='n' ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + if ( input.LA(1)=='O'||input.LA(1)=='o' ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + if ( input.LA(1)=='T'||input.LA(1)=='t' ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "NOT" + + // $ANTLR start "WS" + public final void mWS() throws RecognitionException { + try { + int _type = WS; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:400:5: ( ( ' ' | '\\t' | '\\r' | '\\n' | '\\u3000' ) ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:400:9: ( ' ' | '\\t' | '\\r' | '\\n' | '\\u3000' ) + { + if ( (input.LA(1) >= '\t' && input.LA(1) <= '\n')||input.LA(1)=='\r'||input.LA(1)==' '||input.LA(1)=='\u3000' ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + _channel=HIDDEN; + + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "WS" + + // $ANTLR start "INT" + public final void mINT() throws RecognitionException { + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:421:13: ( '0' .. '9' ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g: + { + if ( (input.LA(1) >= '0' && input.LA(1) <= '9') ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + } + + + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "INT" + + // $ANTLR start "ESC_CHAR" + public final void mESC_CHAR() throws RecognitionException { + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:424:18: ( '\\\\' . ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:424:21: '\\\\' . + { + match('\\'); + + matchAny(); + + } + + + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "ESC_CHAR" + + // $ANTLR start "TERM_START_CHAR" + public final void mTERM_START_CHAR() throws RecognitionException { + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:428:2: ( (~ ( ' ' | '\\t' | '\\n' | '\\r' | '\\u3000' | '\\'' | '\\\"' | '(' | ')' | '[' | ']' | '{' | '}' | '+' | '-' | '!' | ':' | '~' | '^' | '?' | '*' | '\\\\' ) | ESC_CHAR ) ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:429:2: (~ ( ' ' | '\\t' | '\\n' | '\\r' | '\\u3000' | '\\'' | '\\\"' | '(' | ')' | '[' | ']' | '{' | '}' | '+' | '-' | '!' | ':' | '~' | '^' | '?' | '*' | '\\\\' ) | ESC_CHAR ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:429:2: (~ ( ' ' | '\\t' | '\\n' | '\\r' | '\\u3000' | '\\'' | '\\\"' | '(' | ')' | '[' | ']' | '{' | '}' | '+' | '-' | '!' | ':' | '~' | '^' | '?' | '*' | '\\\\' ) | ESC_CHAR ) + int alt14=2; + int LA14_0 = input.LA(1); + + if ( ((LA14_0 >= '\u0000' && LA14_0 <= '\b')||(LA14_0 >= '\u000B' && LA14_0 <= '\f')||(LA14_0 >= '\u000E' && LA14_0 <= '\u001F')||(LA14_0 >= '#' && LA14_0 <= '&')||LA14_0==','||(LA14_0 >= '.' && LA14_0 <= '9')||(LA14_0 >= ';' && LA14_0 <= '>')||(LA14_0 >= '@' && LA14_0 <= 'Z')||(LA14_0 >= '_' && LA14_0 <= 'z')||LA14_0=='|'||(LA14_0 >= '\u007F' && LA14_0 <= '\u2FFF')||(LA14_0 >= '\u3001' && LA14_0 <= '\uFFFF')) ) { + alt14=1; + } + else if ( (LA14_0=='\\') ) { + alt14=2; + } + else { + NoViableAltException nvae = + new NoViableAltException("", 14, 0, input); + + throw nvae; + + } + switch (alt14) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:429:3: ~ ( ' ' | '\\t' | '\\n' | '\\r' | '\\u3000' | '\\'' | '\\\"' | '(' | ')' | '[' | ']' | '{' | '}' | '+' | '-' | '!' | ':' | '~' | '^' | '?' | '*' | '\\\\' ) + { + if ( (input.LA(1) >= '\u0000' && input.LA(1) <= '\b')||(input.LA(1) >= '\u000B' && input.LA(1) <= '\f')||(input.LA(1) >= '\u000E' && input.LA(1) <= '\u001F')||(input.LA(1) >= '#' && input.LA(1) <= '&')||input.LA(1)==','||(input.LA(1) >= '.' && input.LA(1) <= '9')||(input.LA(1) >= ';' && input.LA(1) <= '>')||(input.LA(1) >= '@' && input.LA(1) <= 'Z')||(input.LA(1) >= '_' && input.LA(1) <= 'z')||input.LA(1)=='|'||(input.LA(1) >= '\u007F' && input.LA(1) <= '\u2FFF')||(input.LA(1) >= '\u3001' && input.LA(1) <= '\uFFFF') ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + } + break; + case 2 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:435:5: ESC_CHAR + { + mESC_CHAR(); + + + } + break; + + } + + + } + + + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "TERM_START_CHAR" + + // $ANTLR start "TERM_CHAR" + public final void mTERM_CHAR() throws RecognitionException { + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:439:2: ( ( TERM_START_CHAR | '-' | '+' ) ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:440:2: ( TERM_START_CHAR | '-' | '+' ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:440:2: ( TERM_START_CHAR | '-' | '+' ) + int alt15=3; + int LA15_0 = input.LA(1); + + if ( ((LA15_0 >= '\u0000' && LA15_0 <= '\b')||(LA15_0 >= '\u000B' && LA15_0 <= '\f')||(LA15_0 >= '\u000E' && LA15_0 <= '\u001F')||(LA15_0 >= '#' && LA15_0 <= '&')||LA15_0==','||(LA15_0 >= '.' && LA15_0 <= '9')||(LA15_0 >= ';' && LA15_0 <= '>')||(LA15_0 >= '@' && LA15_0 <= 'Z')||LA15_0=='\\'||(LA15_0 >= '_' && LA15_0 <= 'z')||LA15_0=='|'||(LA15_0 >= '\u007F' && LA15_0 <= '\u2FFF')||(LA15_0 >= '\u3001' && LA15_0 <= '\uFFFF')) ) { + alt15=1; + } + else if ( (LA15_0=='-') ) { + alt15=2; + } + else if ( (LA15_0=='+') ) { + alt15=3; + } + else { + NoViableAltException nvae = + new NoViableAltException("", 15, 0, input); + + throw nvae; + + } + switch (alt15) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:440:3: TERM_START_CHAR + { + mTERM_START_CHAR(); + + + } + break; + case 2 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:440:21: '-' + { + match('-'); + + } + break; + case 3 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:440:27: '+' + { + match('+'); + + } + break; + + } + + + } + + + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "TERM_CHAR" + + // $ANTLR start "NUMBER" + public final void mNUMBER() throws RecognitionException { + try { + int _type = NUMBER; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:445:2: ( ( INT )+ ( '.' ( INT )+ )? ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:446:2: ( INT )+ ( '.' ( INT )+ )? + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:446:2: ( INT )+ + int cnt16=0; + loop16: + do { + int alt16=2; + int LA16_0 = input.LA(1); + + if ( ((LA16_0 >= '0' && LA16_0 <= '9')) ) { + alt16=1; + } + + + switch (alt16) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g: + { + if ( (input.LA(1) >= '0' && input.LA(1) <= '9') ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + } + break; + + default : + if ( cnt16 >= 1 ) break loop16; + EarlyExitException eee = + new EarlyExitException(16, input); + throw eee; + } + cnt16++; + } while (true); + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:446:7: ( '.' ( INT )+ )? + int alt18=2; + int LA18_0 = input.LA(1); + + if ( (LA18_0=='.') ) { + alt18=1; + } + switch (alt18) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:446:8: '.' ( INT )+ + { + match('.'); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:446:12: ( INT )+ + int cnt17=0; + loop17: + do { + int alt17=2; + int LA17_0 = input.LA(1); + + if ( ((LA17_0 >= '0' && LA17_0 <= '9')) ) { + alt17=1; + } + + + switch (alt17) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g: + { + if ( (input.LA(1) >= '0' && input.LA(1) <= '9') ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + } + break; + + default : + if ( cnt17 >= 1 ) break loop17; + EarlyExitException eee = + new EarlyExitException(17, input); + throw eee; + } + cnt17++; + } while (true); + + + } + break; + + } + + + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "NUMBER" + + // $ANTLR start "DATE_TOKEN" + public final void mDATE_TOKEN() throws RecognitionException { + try { + int _type = DATE_TOKEN; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:450:2: ( INT ( INT )? ( '/' | '-' | '.' ) INT ( INT )? ( '/' | '-' | '.' ) INT INT ( INT INT )? ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:451:2: INT ( INT )? ( '/' | '-' | '.' ) INT ( INT )? ( '/' | '-' | '.' ) INT INT ( INT INT )? + { + mINT(); + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:451:6: ( INT )? + int alt19=2; + int LA19_0 = input.LA(1); + + if ( ((LA19_0 >= '0' && LA19_0 <= '9')) ) { + alt19=1; + } + switch (alt19) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g: + { + if ( (input.LA(1) >= '0' && input.LA(1) <= '9') ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + } + break; + + } + + + if ( (input.LA(1) >= '-' && input.LA(1) <= '/') ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + mINT(); + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:451:29: ( INT )? + int alt20=2; + int LA20_0 = input.LA(1); + + if ( ((LA20_0 >= '0' && LA20_0 <= '9')) ) { + alt20=1; + } + switch (alt20) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g: + { + if ( (input.LA(1) >= '0' && input.LA(1) <= '9') ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + } + break; + + } + + + if ( (input.LA(1) >= '-' && input.LA(1) <= '/') ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + mINT(); + + + mINT(); + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:451:56: ( INT INT )? + int alt21=2; + int LA21_0 = input.LA(1); + + if ( ((LA21_0 >= '0' && LA21_0 <= '9')) ) { + alt21=1; + } + switch (alt21) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:451:57: INT INT + { + mINT(); + + + mINT(); + + + } + break; + + } + + + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "DATE_TOKEN" + + // $ANTLR start "TERM_NORMAL" + public final void mTERM_NORMAL() throws RecognitionException { + try { + int _type = TERM_NORMAL; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:455:2: ( TERM_START_CHAR ( TERM_CHAR )* ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:456:2: TERM_START_CHAR ( TERM_CHAR )* + { + mTERM_START_CHAR(); + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:456:18: ( TERM_CHAR )* + loop22: + do { + int alt22=2; + int LA22_0 = input.LA(1); + + if ( ((LA22_0 >= '\u0000' && LA22_0 <= '\b')||(LA22_0 >= '\u000B' && LA22_0 <= '\f')||(LA22_0 >= '\u000E' && LA22_0 <= '\u001F')||(LA22_0 >= '#' && LA22_0 <= '&')||(LA22_0 >= '+' && LA22_0 <= '9')||(LA22_0 >= ';' && LA22_0 <= '>')||(LA22_0 >= '@' && LA22_0 <= 'Z')||LA22_0=='\\'||(LA22_0 >= '_' && LA22_0 <= 'z')||LA22_0=='|'||(LA22_0 >= '\u007F' && LA22_0 <= '\u2FFF')||(LA22_0 >= '\u3001' && LA22_0 <= '\uFFFF')) ) { + alt22=1; + } + + + switch (alt22) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:456:20: TERM_CHAR + { + mTERM_CHAR(); + + + } + break; + + default : + break loop22; + } + } while (true); + + + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "TERM_NORMAL" + + // $ANTLR start "TERM_TRUNCATED" + public final void mTERM_TRUNCATED() throws RecognitionException { + try { + int _type = TERM_TRUNCATED; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:460:15: ( ( STAR | QMARK ) ( ( TERM_CHAR )+ ( QMARK | STAR ) )+ ( TERM_CHAR )* | TERM_START_CHAR ( ( TERM_CHAR )* ( QMARK | STAR ) )+ ( TERM_CHAR )* | ( STAR | QMARK ) ( TERM_CHAR )+ ) + int alt34=3; + alt34 = dfa34.predict(input); + switch (alt34) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:461:2: ( STAR | QMARK ) ( ( TERM_CHAR )+ ( QMARK | STAR ) )+ ( TERM_CHAR )* + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:461:2: ( STAR | QMARK ) + int alt23=2; + int LA23_0 = input.LA(1); + + if ( (LA23_0=='*') ) { + alt23=1; + } + else if ( (LA23_0=='?') ) { + alt23=2; + } + else { + NoViableAltException nvae = + new NoViableAltException("", 23, 0, input); + + throw nvae; + + } + switch (alt23) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:461:3: STAR + { + mSTAR(); + + + } + break; + case 2 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:461:8: QMARK + { + mQMARK(); + + + } + break; + + } + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:461:15: ( ( TERM_CHAR )+ ( QMARK | STAR ) )+ + int cnt26=0; + loop26: + do { + int alt26=2; + alt26 = dfa26.predict(input); + switch (alt26) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:461:16: ( TERM_CHAR )+ ( QMARK | STAR ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:461:16: ( TERM_CHAR )+ + int cnt24=0; + loop24: + do { + int alt24=2; + int LA24_0 = input.LA(1); + + if ( ((LA24_0 >= '\u0000' && LA24_0 <= '\b')||(LA24_0 >= '\u000B' && LA24_0 <= '\f')||(LA24_0 >= '\u000E' && LA24_0 <= '\u001F')||(LA24_0 >= '#' && LA24_0 <= '&')||(LA24_0 >= '+' && LA24_0 <= '9')||(LA24_0 >= ';' && LA24_0 <= '>')||(LA24_0 >= '@' && LA24_0 <= 'Z')||LA24_0=='\\'||(LA24_0 >= '_' && LA24_0 <= 'z')||LA24_0=='|'||(LA24_0 >= '\u007F' && LA24_0 <= '\u2FFF')||(LA24_0 >= '\u3001' && LA24_0 <= '\uFFFF')) ) { + alt24=1; + } + + + switch (alt24) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:461:16: TERM_CHAR + { + mTERM_CHAR(); + + + } + break; + + default : + if ( cnt24 >= 1 ) break loop24; + EarlyExitException eee = + new EarlyExitException(24, input); + throw eee; + } + cnt24++; + } while (true); + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:461:27: ( QMARK | STAR ) + int alt25=2; + int LA25_0 = input.LA(1); + + if ( (LA25_0=='?') ) { + alt25=1; + } + else if ( (LA25_0=='*') ) { + alt25=2; + } + else { + NoViableAltException nvae = + new NoViableAltException("", 25, 0, input); + + throw nvae; + + } + switch (alt25) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:461:28: QMARK + { + mQMARK(); + + + } + break; + case 2 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:461:34: STAR + { + mSTAR(); + + + } + break; + + } + + + } + break; + + default : + if ( cnt26 >= 1 ) break loop26; + EarlyExitException eee = + new EarlyExitException(26, input); + throw eee; + } + cnt26++; + } while (true); + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:461:42: ( TERM_CHAR )* + loop27: + do { + int alt27=2; + int LA27_0 = input.LA(1); + + if ( ((LA27_0 >= '\u0000' && LA27_0 <= '\b')||(LA27_0 >= '\u000B' && LA27_0 <= '\f')||(LA27_0 >= '\u000E' && LA27_0 <= '\u001F')||(LA27_0 >= '#' && LA27_0 <= '&')||(LA27_0 >= '+' && LA27_0 <= '9')||(LA27_0 >= ';' && LA27_0 <= '>')||(LA27_0 >= '@' && LA27_0 <= 'Z')||LA27_0=='\\'||(LA27_0 >= '_' && LA27_0 <= 'z')||LA27_0=='|'||(LA27_0 >= '\u007F' && LA27_0 <= '\u2FFF')||(LA27_0 >= '\u3001' && LA27_0 <= '\uFFFF')) ) { + alt27=1; + } + + + switch (alt27) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:461:43: TERM_CHAR + { + mTERM_CHAR(); + + + } + break; + + default : + break loop27; + } + } while (true); + + + } + break; + case 2 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:462:4: TERM_START_CHAR ( ( TERM_CHAR )* ( QMARK | STAR ) )+ ( TERM_CHAR )* + { + mTERM_START_CHAR(); + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:462:20: ( ( TERM_CHAR )* ( QMARK | STAR ) )+ + int cnt30=0; + loop30: + do { + int alt30=2; + alt30 = dfa30.predict(input); + switch (alt30) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:462:21: ( TERM_CHAR )* ( QMARK | STAR ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:462:21: ( TERM_CHAR )* + loop28: + do { + int alt28=2; + int LA28_0 = input.LA(1); + + if ( ((LA28_0 >= '\u0000' && LA28_0 <= '\b')||(LA28_0 >= '\u000B' && LA28_0 <= '\f')||(LA28_0 >= '\u000E' && LA28_0 <= '\u001F')||(LA28_0 >= '#' && LA28_0 <= '&')||(LA28_0 >= '+' && LA28_0 <= '9')||(LA28_0 >= ';' && LA28_0 <= '>')||(LA28_0 >= '@' && LA28_0 <= 'Z')||LA28_0=='\\'||(LA28_0 >= '_' && LA28_0 <= 'z')||LA28_0=='|'||(LA28_0 >= '\u007F' && LA28_0 <= '\u2FFF')||(LA28_0 >= '\u3001' && LA28_0 <= '\uFFFF')) ) { + alt28=1; + } + + + switch (alt28) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:462:21: TERM_CHAR + { + mTERM_CHAR(); + + + } + break; + + default : + break loop28; + } + } while (true); + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:462:32: ( QMARK | STAR ) + int alt29=2; + int LA29_0 = input.LA(1); + + if ( (LA29_0=='?') ) { + alt29=1; + } + else if ( (LA29_0=='*') ) { + alt29=2; + } + else { + NoViableAltException nvae = + new NoViableAltException("", 29, 0, input); + + throw nvae; + + } + switch (alt29) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:462:33: QMARK + { + mQMARK(); + + + } + break; + case 2 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:462:39: STAR + { + mSTAR(); + + + } + break; + + } + + + } + break; + + default : + if ( cnt30 >= 1 ) break loop30; + EarlyExitException eee = + new EarlyExitException(30, input); + throw eee; + } + cnt30++; + } while (true); + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:462:47: ( TERM_CHAR )* + loop31: + do { + int alt31=2; + int LA31_0 = input.LA(1); + + if ( ((LA31_0 >= '\u0000' && LA31_0 <= '\b')||(LA31_0 >= '\u000B' && LA31_0 <= '\f')||(LA31_0 >= '\u000E' && LA31_0 <= '\u001F')||(LA31_0 >= '#' && LA31_0 <= '&')||(LA31_0 >= '+' && LA31_0 <= '9')||(LA31_0 >= ';' && LA31_0 <= '>')||(LA31_0 >= '@' && LA31_0 <= 'Z')||LA31_0=='\\'||(LA31_0 >= '_' && LA31_0 <= 'z')||LA31_0=='|'||(LA31_0 >= '\u007F' && LA31_0 <= '\u2FFF')||(LA31_0 >= '\u3001' && LA31_0 <= '\uFFFF')) ) { + alt31=1; + } + + + switch (alt31) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:462:48: TERM_CHAR + { + mTERM_CHAR(); + + + } + break; + + default : + break loop31; + } + } while (true); + + + } + break; + case 3 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:463:4: ( STAR | QMARK ) ( TERM_CHAR )+ + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:463:4: ( STAR | QMARK ) + int alt32=2; + int LA32_0 = input.LA(1); + + if ( (LA32_0=='*') ) { + alt32=1; + } + else if ( (LA32_0=='?') ) { + alt32=2; + } + else { + NoViableAltException nvae = + new NoViableAltException("", 32, 0, input); + + throw nvae; + + } + switch (alt32) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:463:5: STAR + { + mSTAR(); + + + } + break; + case 2 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:463:10: QMARK + { + mQMARK(); + + + } + break; + + } + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:463:17: ( TERM_CHAR )+ + int cnt33=0; + loop33: + do { + int alt33=2; + int LA33_0 = input.LA(1); + + if ( ((LA33_0 >= '\u0000' && LA33_0 <= '\b')||(LA33_0 >= '\u000B' && LA33_0 <= '\f')||(LA33_0 >= '\u000E' && LA33_0 <= '\u001F')||(LA33_0 >= '#' && LA33_0 <= '&')||(LA33_0 >= '+' && LA33_0 <= '9')||(LA33_0 >= ';' && LA33_0 <= '>')||(LA33_0 >= '@' && LA33_0 <= 'Z')||LA33_0=='\\'||(LA33_0 >= '_' && LA33_0 <= 'z')||LA33_0=='|'||(LA33_0 >= '\u007F' && LA33_0 <= '\u2FFF')||(LA33_0 >= '\u3001' && LA33_0 <= '\uFFFF')) ) { + alt33=1; + } + + + switch (alt33) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:463:17: TERM_CHAR + { + mTERM_CHAR(); + + + } + break; + + default : + if ( cnt33 >= 1 ) break loop33; + EarlyExitException eee = + new EarlyExitException(33, input); + throw eee; + } + cnt33++; + } while (true); + + + } + break; + + } + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "TERM_TRUNCATED" + + // $ANTLR start "PHRASE" + public final void mPHRASE() throws RecognitionException { + try { + int _type = PHRASE; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:468:2: ( DQUOTE ( ESC_CHAR |~ ( '\\\"' | '\\\\' | '?' | '*' ) )+ DQUOTE ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:469:2: DQUOTE ( ESC_CHAR |~ ( '\\\"' | '\\\\' | '?' | '*' ) )+ DQUOTE + { + mDQUOTE(); + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:469:9: ( ESC_CHAR |~ ( '\\\"' | '\\\\' | '?' | '*' ) )+ + int cnt35=0; + loop35: + do { + int alt35=3; + int LA35_0 = input.LA(1); + + if ( (LA35_0=='\\') ) { + alt35=1; + } + else if ( ((LA35_0 >= '\u0000' && LA35_0 <= '!')||(LA35_0 >= '#' && LA35_0 <= ')')||(LA35_0 >= '+' && LA35_0 <= '>')||(LA35_0 >= '@' && LA35_0 <= '[')||(LA35_0 >= ']' && LA35_0 <= '\uFFFF')) ) { + alt35=2; + } + + + switch (alt35) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:469:10: ESC_CHAR + { + mESC_CHAR(); + + + } + break; + case 2 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:469:19: ~ ( '\\\"' | '\\\\' | '?' | '*' ) + { + if ( (input.LA(1) >= '\u0000' && input.LA(1) <= '!')||(input.LA(1) >= '#' && input.LA(1) <= ')')||(input.LA(1) >= '+' && input.LA(1) <= '>')||(input.LA(1) >= '@' && input.LA(1) <= '[')||(input.LA(1) >= ']' && input.LA(1) <= '\uFFFF') ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + } + break; + + default : + if ( cnt35 >= 1 ) break loop35; + EarlyExitException eee = + new EarlyExitException(35, input); + throw eee; + } + cnt35++; + } while (true); + + + mDQUOTE(); + + + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "PHRASE" + + // $ANTLR start "PHRASE_ANYTHING" + public final void mPHRASE_ANYTHING() throws RecognitionException { + try { + int _type = PHRASE_ANYTHING; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:472:17: ( DQUOTE ( ESC_CHAR |~ ( '\\\"' | '\\\\' ) )+ DQUOTE ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:473:2: DQUOTE ( ESC_CHAR |~ ( '\\\"' | '\\\\' ) )+ DQUOTE + { + mDQUOTE(); + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:473:9: ( ESC_CHAR |~ ( '\\\"' | '\\\\' ) )+ + int cnt36=0; + loop36: + do { + int alt36=3; + int LA36_0 = input.LA(1); + + if ( (LA36_0=='\\') ) { + alt36=1; + } + else if ( ((LA36_0 >= '\u0000' && LA36_0 <= '!')||(LA36_0 >= '#' && LA36_0 <= '[')||(LA36_0 >= ']' && LA36_0 <= '\uFFFF')) ) { + alt36=2; + } + + + switch (alt36) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:473:10: ESC_CHAR + { + mESC_CHAR(); + + + } + break; + case 2 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:473:19: ~ ( '\\\"' | '\\\\' ) + { + if ( (input.LA(1) >= '\u0000' && input.LA(1) <= '!')||(input.LA(1) >= '#' && input.LA(1) <= '[')||(input.LA(1) >= ']' && input.LA(1) <= '\uFFFF') ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + } + break; + + default : + if ( cnt36 >= 1 ) break loop36; + EarlyExitException eee = + new EarlyExitException(36, input); + throw eee; + } + cnt36++; + } while (true); + + + mDQUOTE(); + + + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "PHRASE_ANYTHING" + + public void mTokens() throws RecognitionException { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:1:8: ( LPAREN | RPAREN | LBRACK | RBRACK | COLON | PLUS | MINUS | STAR | QMARK | LCURLY | RCURLY | CARAT | TILDE | DQUOTE | SQUOTE | TO | AND | OR | NOT | WS | NUMBER | DATE_TOKEN | TERM_NORMAL | TERM_TRUNCATED | PHRASE | PHRASE_ANYTHING ) + int alt37=26; + alt37 = dfa37.predict(input); + switch (alt37) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:1:10: LPAREN + { + mLPAREN(); + + + } + break; + case 2 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:1:17: RPAREN + { + mRPAREN(); + + + } + break; + case 3 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:1:24: LBRACK + { + mLBRACK(); + + + } + break; + case 4 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:1:31: RBRACK + { + mRBRACK(); + + + } + break; + case 5 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:1:38: COLON + { + mCOLON(); + + + } + break; + case 6 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:1:44: PLUS + { + mPLUS(); + + + } + break; + case 7 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:1:49: MINUS + { + mMINUS(); + + + } + break; + case 8 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:1:55: STAR + { + mSTAR(); + + + } + break; + case 9 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:1:60: QMARK + { + mQMARK(); + + + } + break; + case 10 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:1:66: LCURLY + { + mLCURLY(); + + + } + break; + case 11 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:1:73: RCURLY + { + mRCURLY(); + + + } + break; + case 12 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:1:80: CARAT + { + mCARAT(); + + + } + break; + case 13 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:1:86: TILDE + { + mTILDE(); + + + } + break; + case 14 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:1:92: DQUOTE + { + mDQUOTE(); + + + } + break; + case 15 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:1:99: SQUOTE + { + mSQUOTE(); + + + } + break; + case 16 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:1:106: TO + { + mTO(); + + + } + break; + case 17 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:1:109: AND + { + mAND(); + + + } + break; + case 18 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:1:113: OR + { + mOR(); + + + } + break; + case 19 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:1:116: NOT + { + mNOT(); + + + } + break; + case 20 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:1:120: WS + { + mWS(); + + + } + break; + case 21 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:1:123: NUMBER + { + mNUMBER(); + + + } + break; + case 22 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:1:130: DATE_TOKEN + { + mDATE_TOKEN(); + + + } + break; + case 23 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:1:141: TERM_NORMAL + { + mTERM_NORMAL(); + + + } + break; + case 24 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:1:153: TERM_TRUNCATED + { + mTERM_TRUNCATED(); + + + } + break; + case 25 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:1:168: PHRASE + { + mPHRASE(); + + + } + break; + case 26 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/StandardLuceneGrammar.g:1:175: PHRASE_ANYTHING + { + mPHRASE_ANYTHING(); + + + } + break; + + } + + } + + + protected DFA34 dfa34 = new DFA34(this); + protected DFA26 dfa26 = new DFA26(this); + protected DFA30 dfa30 = new DFA30(this); + protected DFA37 dfa37 = new DFA37(this); + static final String DFA34_eotS = + "\4\uffff\1\10\1\uffff\2\10\2\uffff\1\10"; + static final String DFA34_eofS = + "\13\uffff"; + static final String DFA34_minS = + "\3\0\1\uffff\4\0\2\uffff\1\0"; + static final String DFA34_maxS = + "\3\uffff\1\uffff\4\uffff\2\uffff\1\uffff"; + static final String DFA34_acceptS = + "\3\uffff\1\2\4\uffff\1\3\1\1\1\uffff"; + static final String DFA34_specialS = + "\1\1\1\0\1\5\1\uffff\1\2\1\6\1\7\1\4\2\uffff\1\3}>"; + static final String[] DFA34_transitionS = { + "\11\3\2\uffff\2\3\1\uffff\22\3\3\uffff\4\3\3\uffff\1\1\1\uffff"+ + "\1\3\1\uffff\14\3\1\uffff\4\3\1\2\33\3\1\uffff\1\3\2\uffff\34"+ + "\3\1\uffff\1\3\2\uffff\u2f81\3\1\uffff\ucfff\3", + "\11\4\2\uffff\2\4\1\uffff\22\4\3\uffff\4\4\4\uffff\1\7\1\4"+ + "\1\6\14\4\1\uffff\4\4\1\uffff\33\4\1\uffff\1\5\2\uffff\34\4"+ + "\1\uffff\1\4\2\uffff\u2f81\4\1\uffff\ucfff\4", + "\11\4\2\uffff\2\4\1\uffff\22\4\3\uffff\4\4\4\uffff\1\7\1\4"+ + "\1\6\14\4\1\uffff\4\4\1\2\33\4\1\uffff\1\5\2\uffff\34\4\1\uffff"+ + "\1\4\2\uffff\u2f81\4\1\uffff\ucfff\4", + "", + "\11\4\2\uffff\2\4\1\uffff\22\4\3\uffff\4\4\3\uffff\1\11\1\7"+ + "\1\4\1\6\14\4\1\uffff\4\4\1\11\33\4\1\uffff\1\5\2\uffff\34\4"+ + "\1\uffff\1\4\2\uffff\u2f81\4\1\uffff\ucfff\4", + "\0\12", + "\11\4\2\uffff\2\4\1\uffff\22\4\3\uffff\4\4\3\uffff\1\11\1\7"+ + "\1\4\1\6\14\4\1\uffff\4\4\1\11\33\4\1\uffff\1\5\2\uffff\34\4"+ + "\1\uffff\1\4\2\uffff\u2f81\4\1\uffff\ucfff\4", + "\11\4\2\uffff\2\4\1\uffff\22\4\3\uffff\4\4\3\uffff\1\11\1\7"+ + "\1\4\1\6\14\4\1\uffff\4\4\1\11\33\4\1\uffff\1\5\2\uffff\34\4"+ + "\1\uffff\1\4\2\uffff\u2f81\4\1\uffff\ucfff\4", + "", + "", + "\11\4\2\uffff\2\4\1\uffff\22\4\3\uffff\4\4\3\uffff\1\11\1\7"+ + "\1\4\1\6\14\4\1\uffff\4\4\1\11\33\4\1\uffff\1\5\2\uffff\34\4"+ + "\1\uffff\1\4\2\uffff\u2f81\4\1\uffff\ucfff\4" + }; + + static final short[] DFA34_eot = DFA.unpackEncodedString(DFA34_eotS); + static final short[] DFA34_eof = DFA.unpackEncodedString(DFA34_eofS); + static final char[] DFA34_min = DFA.unpackEncodedStringToUnsignedChars(DFA34_minS); + static final char[] DFA34_max = DFA.unpackEncodedStringToUnsignedChars(DFA34_maxS); + static final short[] DFA34_accept = DFA.unpackEncodedString(DFA34_acceptS); + static final short[] DFA34_special = DFA.unpackEncodedString(DFA34_specialS); + static final short[][] DFA34_transition; + + static { + int numStates = DFA34_transitionS.length; + DFA34_transition = new short[numStates][]; + for (int i=0; i= '\u0000' && LA34_1 <= '\b')||(LA34_1 >= '\u000B' && LA34_1 <= '\f')||(LA34_1 >= '\u000E' && LA34_1 <= '\u001F')||(LA34_1 >= '#' && LA34_1 <= '&')||LA34_1==','||(LA34_1 >= '.' && LA34_1 <= '9')||(LA34_1 >= ';' && LA34_1 <= '>')||(LA34_1 >= '@' && LA34_1 <= 'Z')||(LA34_1 >= '_' && LA34_1 <= 'z')||LA34_1=='|'||(LA34_1 >= '\u007F' && LA34_1 <= '\u2FFF')||(LA34_1 >= '\u3001' && LA34_1 <= '\uFFFF')) ) {s = 4;} + + else if ( (LA34_1=='\\') ) {s = 5;} + + else if ( (LA34_1=='-') ) {s = 6;} + + else if ( (LA34_1=='+') ) {s = 7;} + + if ( s>=0 ) return s; + break; + case 1 : + int LA34_0 = input.LA(1); + + s = -1; + if ( (LA34_0=='*') ) {s = 1;} + + else if ( (LA34_0=='?') ) {s = 2;} + + else if ( ((LA34_0 >= '\u0000' && LA34_0 <= '\b')||(LA34_0 >= '\u000B' && LA34_0 <= '\f')||(LA34_0 >= '\u000E' && LA34_0 <= '\u001F')||(LA34_0 >= '#' && LA34_0 <= '&')||LA34_0==','||(LA34_0 >= '.' && LA34_0 <= '9')||(LA34_0 >= ';' && LA34_0 <= '>')||(LA34_0 >= '@' && LA34_0 <= 'Z')||LA34_0=='\\'||(LA34_0 >= '_' && LA34_0 <= 'z')||LA34_0=='|'||(LA34_0 >= '\u007F' && LA34_0 <= '\u2FFF')||(LA34_0 >= '\u3001' && LA34_0 <= '\uFFFF')) ) {s = 3;} + + if ( s>=0 ) return s; + break; + case 2 : + int LA34_4 = input.LA(1); + + s = -1; + if ( (LA34_4=='*'||LA34_4=='?') ) {s = 9;} + + else if ( ((LA34_4 >= '\u0000' && LA34_4 <= '\b')||(LA34_4 >= '\u000B' && LA34_4 <= '\f')||(LA34_4 >= '\u000E' && LA34_4 <= '\u001F')||(LA34_4 >= '#' && LA34_4 <= '&')||LA34_4==','||(LA34_4 >= '.' && LA34_4 <= '9')||(LA34_4 >= ';' && LA34_4 <= '>')||(LA34_4 >= '@' && LA34_4 <= 'Z')||(LA34_4 >= '_' && LA34_4 <= 'z')||LA34_4=='|'||(LA34_4 >= '\u007F' && LA34_4 <= '\u2FFF')||(LA34_4 >= '\u3001' && LA34_4 <= '\uFFFF')) ) {s = 4;} + + else if ( (LA34_4=='\\') ) {s = 5;} + + else if ( (LA34_4=='-') ) {s = 6;} + + else if ( (LA34_4=='+') ) {s = 7;} + + else s = 8; + + if ( s>=0 ) return s; + break; + case 3 : + int LA34_10 = input.LA(1); + + s = -1; + if ( (LA34_10=='*'||LA34_10=='?') ) {s = 9;} + + else if ( ((LA34_10 >= '\u0000' && LA34_10 <= '\b')||(LA34_10 >= '\u000B' && LA34_10 <= '\f')||(LA34_10 >= '\u000E' && LA34_10 <= '\u001F')||(LA34_10 >= '#' && LA34_10 <= '&')||LA34_10==','||(LA34_10 >= '.' && LA34_10 <= '9')||(LA34_10 >= ';' && LA34_10 <= '>')||(LA34_10 >= '@' && LA34_10 <= 'Z')||(LA34_10 >= '_' && LA34_10 <= 'z')||LA34_10=='|'||(LA34_10 >= '\u007F' && LA34_10 <= '\u2FFF')||(LA34_10 >= '\u3001' && LA34_10 <= '\uFFFF')) ) {s = 4;} + + else if ( (LA34_10=='\\') ) {s = 5;} + + else if ( (LA34_10=='-') ) {s = 6;} + + else if ( (LA34_10=='+') ) {s = 7;} + + else s = 8; + + if ( s>=0 ) return s; + break; + case 4 : + int LA34_7 = input.LA(1); + + s = -1; + if ( (LA34_7=='*'||LA34_7=='?') ) {s = 9;} + + else if ( ((LA34_7 >= '\u0000' && LA34_7 <= '\b')||(LA34_7 >= '\u000B' && LA34_7 <= '\f')||(LA34_7 >= '\u000E' && LA34_7 <= '\u001F')||(LA34_7 >= '#' && LA34_7 <= '&')||LA34_7==','||(LA34_7 >= '.' && LA34_7 <= '9')||(LA34_7 >= ';' && LA34_7 <= '>')||(LA34_7 >= '@' && LA34_7 <= 'Z')||(LA34_7 >= '_' && LA34_7 <= 'z')||LA34_7=='|'||(LA34_7 >= '\u007F' && LA34_7 <= '\u2FFF')||(LA34_7 >= '\u3001' && LA34_7 <= '\uFFFF')) ) {s = 4;} + + else if ( (LA34_7=='\\') ) {s = 5;} + + else if ( (LA34_7=='-') ) {s = 6;} + + else if ( (LA34_7=='+') ) {s = 7;} + + else s = 8; + + if ( s>=0 ) return s; + break; + case 5 : + int LA34_2 = input.LA(1); + + s = -1; + if ( ((LA34_2 >= '\u0000' && LA34_2 <= '\b')||(LA34_2 >= '\u000B' && LA34_2 <= '\f')||(LA34_2 >= '\u000E' && LA34_2 <= '\u001F')||(LA34_2 >= '#' && LA34_2 <= '&')||LA34_2==','||(LA34_2 >= '.' && LA34_2 <= '9')||(LA34_2 >= ';' && LA34_2 <= '>')||(LA34_2 >= '@' && LA34_2 <= 'Z')||(LA34_2 >= '_' && LA34_2 <= 'z')||LA34_2=='|'||(LA34_2 >= '\u007F' && LA34_2 <= '\u2FFF')||(LA34_2 >= '\u3001' && LA34_2 <= '\uFFFF')) ) {s = 4;} + + else if ( (LA34_2=='\\') ) {s = 5;} + + else if ( (LA34_2=='-') ) {s = 6;} + + else if ( (LA34_2=='+') ) {s = 7;} + + else if ( (LA34_2=='?') ) {s = 2;} + + if ( s>=0 ) return s; + break; + case 6 : + int LA34_5 = input.LA(1); + + s = -1; + if ( ((LA34_5 >= '\u0000' && LA34_5 <= '\uFFFF')) ) {s = 10;} + + if ( s>=0 ) return s; + break; + case 7 : + int LA34_6 = input.LA(1); + + s = -1; + if ( (LA34_6=='*'||LA34_6=='?') ) {s = 9;} + + else if ( ((LA34_6 >= '\u0000' && LA34_6 <= '\b')||(LA34_6 >= '\u000B' && LA34_6 <= '\f')||(LA34_6 >= '\u000E' && LA34_6 <= '\u001F')||(LA34_6 >= '#' && LA34_6 <= '&')||LA34_6==','||(LA34_6 >= '.' && LA34_6 <= '9')||(LA34_6 >= ';' && LA34_6 <= '>')||(LA34_6 >= '@' && LA34_6 <= 'Z')||(LA34_6 >= '_' && LA34_6 <= 'z')||LA34_6=='|'||(LA34_6 >= '\u007F' && LA34_6 <= '\u2FFF')||(LA34_6 >= '\u3001' && LA34_6 <= '\uFFFF')) ) {s = 4;} + + else if ( (LA34_6=='\\') ) {s = 5;} + + else if ( (LA34_6=='-') ) {s = 6;} + + else if ( (LA34_6=='+') ) {s = 7;} + + else s = 8; + + if ( s>=0 ) return s; + break; + } + NoViableAltException nvae = + new NoViableAltException(getDescription(), 34, _s, input); + error(nvae); + throw nvae; + } + + } + static final String DFA26_eotS = + "\2\5\1\uffff\2\5\2\uffff\1\5"; + static final String DFA26_eofS = + "\10\uffff"; + static final String DFA26_minS = + "\5\0\2\uffff\1\0"; + static final String DFA26_maxS = + "\5\uffff\2\uffff\1\uffff"; + static final String DFA26_acceptS = + "\5\uffff\1\2\1\1\1\uffff"; + static final String DFA26_specialS = + "\1\2\1\3\1\0\1\5\1\4\2\uffff\1\1}>"; + static final String[] DFA26_transitionS = { + "\11\1\2\uffff\2\1\1\uffff\22\1\3\uffff\4\1\4\uffff\1\4\1\1\1"+ + "\3\14\1\1\uffff\4\1\1\uffff\33\1\1\uffff\1\2\2\uffff\34\1\1"+ + "\uffff\1\1\2\uffff\u2f81\1\1\uffff\ucfff\1", + "\11\1\2\uffff\2\1\1\uffff\22\1\3\uffff\4\1\3\uffff\1\6\1\4"+ + "\1\1\1\3\14\1\1\uffff\4\1\1\6\33\1\1\uffff\1\2\2\uffff\34\1"+ + "\1\uffff\1\1\2\uffff\u2f81\1\1\uffff\ucfff\1", + "\0\7", + "\11\1\2\uffff\2\1\1\uffff\22\1\3\uffff\4\1\3\uffff\1\6\1\4"+ + "\1\1\1\3\14\1\1\uffff\4\1\1\6\33\1\1\uffff\1\2\2\uffff\34\1"+ + "\1\uffff\1\1\2\uffff\u2f81\1\1\uffff\ucfff\1", + "\11\1\2\uffff\2\1\1\uffff\22\1\3\uffff\4\1\3\uffff\1\6\1\4"+ + "\1\1\1\3\14\1\1\uffff\4\1\1\6\33\1\1\uffff\1\2\2\uffff\34\1"+ + "\1\uffff\1\1\2\uffff\u2f81\1\1\uffff\ucfff\1", + "", + "", + "\11\1\2\uffff\2\1\1\uffff\22\1\3\uffff\4\1\3\uffff\1\6\1\4"+ + "\1\1\1\3\14\1\1\uffff\4\1\1\6\33\1\1\uffff\1\2\2\uffff\34\1"+ + "\1\uffff\1\1\2\uffff\u2f81\1\1\uffff\ucfff\1" + }; + + static final short[] DFA26_eot = DFA.unpackEncodedString(DFA26_eotS); + static final short[] DFA26_eof = DFA.unpackEncodedString(DFA26_eofS); + static final char[] DFA26_min = DFA.unpackEncodedStringToUnsignedChars(DFA26_minS); + static final char[] DFA26_max = DFA.unpackEncodedStringToUnsignedChars(DFA26_maxS); + static final short[] DFA26_accept = DFA.unpackEncodedString(DFA26_acceptS); + static final short[] DFA26_special = DFA.unpackEncodedString(DFA26_specialS); + static final short[][] DFA26_transition; + + static { + int numStates = DFA26_transitionS.length; + DFA26_transition = new short[numStates][]; + for (int i=0; i= '\u0000' && LA26_2 <= '\uFFFF')) ) {s = 7;} + + if ( s>=0 ) return s; + break; + case 1 : + int LA26_7 = input.LA(1); + + s = -1; + if ( ((LA26_7 >= '\u0000' && LA26_7 <= '\b')||(LA26_7 >= '\u000B' && LA26_7 <= '\f')||(LA26_7 >= '\u000E' && LA26_7 <= '\u001F')||(LA26_7 >= '#' && LA26_7 <= '&')||LA26_7==','||(LA26_7 >= '.' && LA26_7 <= '9')||(LA26_7 >= ';' && LA26_7 <= '>')||(LA26_7 >= '@' && LA26_7 <= 'Z')||(LA26_7 >= '_' && LA26_7 <= 'z')||LA26_7=='|'||(LA26_7 >= '\u007F' && LA26_7 <= '\u2FFF')||(LA26_7 >= '\u3001' && LA26_7 <= '\uFFFF')) ) {s = 1;} + + else if ( (LA26_7=='\\') ) {s = 2;} + + else if ( (LA26_7=='-') ) {s = 3;} + + else if ( (LA26_7=='+') ) {s = 4;} + + else if ( (LA26_7=='*'||LA26_7=='?') ) {s = 6;} + + else s = 5; + + if ( s>=0 ) return s; + break; + case 2 : + int LA26_0 = input.LA(1); + + s = -1; + if ( ((LA26_0 >= '\u0000' && LA26_0 <= '\b')||(LA26_0 >= '\u000B' && LA26_0 <= '\f')||(LA26_0 >= '\u000E' && LA26_0 <= '\u001F')||(LA26_0 >= '#' && LA26_0 <= '&')||LA26_0==','||(LA26_0 >= '.' && LA26_0 <= '9')||(LA26_0 >= ';' && LA26_0 <= '>')||(LA26_0 >= '@' && LA26_0 <= 'Z')||(LA26_0 >= '_' && LA26_0 <= 'z')||LA26_0=='|'||(LA26_0 >= '\u007F' && LA26_0 <= '\u2FFF')||(LA26_0 >= '\u3001' && LA26_0 <= '\uFFFF')) ) {s = 1;} + + else if ( (LA26_0=='\\') ) {s = 2;} + + else if ( (LA26_0=='-') ) {s = 3;} + + else if ( (LA26_0=='+') ) {s = 4;} + + else s = 5; + + if ( s>=0 ) return s; + break; + case 3 : + int LA26_1 = input.LA(1); + + s = -1; + if ( ((LA26_1 >= '\u0000' && LA26_1 <= '\b')||(LA26_1 >= '\u000B' && LA26_1 <= '\f')||(LA26_1 >= '\u000E' && LA26_1 <= '\u001F')||(LA26_1 >= '#' && LA26_1 <= '&')||LA26_1==','||(LA26_1 >= '.' && LA26_1 <= '9')||(LA26_1 >= ';' && LA26_1 <= '>')||(LA26_1 >= '@' && LA26_1 <= 'Z')||(LA26_1 >= '_' && LA26_1 <= 'z')||LA26_1=='|'||(LA26_1 >= '\u007F' && LA26_1 <= '\u2FFF')||(LA26_1 >= '\u3001' && LA26_1 <= '\uFFFF')) ) {s = 1;} + + else if ( (LA26_1=='\\') ) {s = 2;} + + else if ( (LA26_1=='-') ) {s = 3;} + + else if ( (LA26_1=='+') ) {s = 4;} + + else if ( (LA26_1=='*'||LA26_1=='?') ) {s = 6;} + + else s = 5; + + if ( s>=0 ) return s; + break; + case 4 : + int LA26_4 = input.LA(1); + + s = -1; + if ( ((LA26_4 >= '\u0000' && LA26_4 <= '\b')||(LA26_4 >= '\u000B' && LA26_4 <= '\f')||(LA26_4 >= '\u000E' && LA26_4 <= '\u001F')||(LA26_4 >= '#' && LA26_4 <= '&')||LA26_4==','||(LA26_4 >= '.' && LA26_4 <= '9')||(LA26_4 >= ';' && LA26_4 <= '>')||(LA26_4 >= '@' && LA26_4 <= 'Z')||(LA26_4 >= '_' && LA26_4 <= 'z')||LA26_4=='|'||(LA26_4 >= '\u007F' && LA26_4 <= '\u2FFF')||(LA26_4 >= '\u3001' && LA26_4 <= '\uFFFF')) ) {s = 1;} + + else if ( (LA26_4=='\\') ) {s = 2;} + + else if ( (LA26_4=='-') ) {s = 3;} + + else if ( (LA26_4=='+') ) {s = 4;} + + else if ( (LA26_4=='*'||LA26_4=='?') ) {s = 6;} + + else s = 5; + + if ( s>=0 ) return s; + break; + case 5 : + int LA26_3 = input.LA(1); + + s = -1; + if ( ((LA26_3 >= '\u0000' && LA26_3 <= '\b')||(LA26_3 >= '\u000B' && LA26_3 <= '\f')||(LA26_3 >= '\u000E' && LA26_3 <= '\u001F')||(LA26_3 >= '#' && LA26_3 <= '&')||LA26_3==','||(LA26_3 >= '.' && LA26_3 <= '9')||(LA26_3 >= ';' && LA26_3 <= '>')||(LA26_3 >= '@' && LA26_3 <= 'Z')||(LA26_3 >= '_' && LA26_3 <= 'z')||LA26_3=='|'||(LA26_3 >= '\u007F' && LA26_3 <= '\u2FFF')||(LA26_3 >= '\u3001' && LA26_3 <= '\uFFFF')) ) {s = 1;} + + else if ( (LA26_3=='\\') ) {s = 2;} + + else if ( (LA26_3=='-') ) {s = 3;} + + else if ( (LA26_3=='+') ) {s = 4;} + + else if ( (LA26_3=='*'||LA26_3=='?') ) {s = 6;} + + else s = 5; + + if ( s>=0 ) return s; + break; + } + NoViableAltException nvae = + new NoViableAltException(getDescription(), 26, _s, input); + error(nvae); + throw nvae; + } + + } + static final String DFA30_eotS = + "\2\5\1\uffff\2\5\2\uffff\1\5"; + static final String DFA30_eofS = + "\10\uffff"; + static final String DFA30_minS = + "\5\0\2\uffff\1\0"; + static final String DFA30_maxS = + "\5\uffff\2\uffff\1\uffff"; + static final String DFA30_acceptS = + "\5\uffff\1\2\1\1\1\uffff"; + static final String DFA30_specialS = + "\1\0\1\4\1\1\1\2\1\3\2\uffff\1\5}>"; + static final String[] DFA30_transitionS = { + "\11\1\2\uffff\2\1\1\uffff\22\1\3\uffff\4\1\3\uffff\1\6\1\4\1"+ + "\1\1\3\14\1\1\uffff\4\1\1\6\33\1\1\uffff\1\2\2\uffff\34\1\1"+ + "\uffff\1\1\2\uffff\u2f81\1\1\uffff\ucfff\1", + "\11\1\2\uffff\2\1\1\uffff\22\1\3\uffff\4\1\3\uffff\1\6\1\4"+ + "\1\1\1\3\14\1\1\uffff\4\1\1\6\33\1\1\uffff\1\2\2\uffff\34\1"+ + "\1\uffff\1\1\2\uffff\u2f81\1\1\uffff\ucfff\1", + "\0\7", + "\11\1\2\uffff\2\1\1\uffff\22\1\3\uffff\4\1\3\uffff\1\6\1\4"+ + "\1\1\1\3\14\1\1\uffff\4\1\1\6\33\1\1\uffff\1\2\2\uffff\34\1"+ + "\1\uffff\1\1\2\uffff\u2f81\1\1\uffff\ucfff\1", + "\11\1\2\uffff\2\1\1\uffff\22\1\3\uffff\4\1\3\uffff\1\6\1\4"+ + "\1\1\1\3\14\1\1\uffff\4\1\1\6\33\1\1\uffff\1\2\2\uffff\34\1"+ + "\1\uffff\1\1\2\uffff\u2f81\1\1\uffff\ucfff\1", + "", + "", + "\11\1\2\uffff\2\1\1\uffff\22\1\3\uffff\4\1\3\uffff\1\6\1\4"+ + "\1\1\1\3\14\1\1\uffff\4\1\1\6\33\1\1\uffff\1\2\2\uffff\34\1"+ + "\1\uffff\1\1\2\uffff\u2f81\1\1\uffff\ucfff\1" + }; + + static final short[] DFA30_eot = DFA.unpackEncodedString(DFA30_eotS); + static final short[] DFA30_eof = DFA.unpackEncodedString(DFA30_eofS); + static final char[] DFA30_min = DFA.unpackEncodedStringToUnsignedChars(DFA30_minS); + static final char[] DFA30_max = DFA.unpackEncodedStringToUnsignedChars(DFA30_maxS); + static final short[] DFA30_accept = DFA.unpackEncodedString(DFA30_acceptS); + static final short[] DFA30_special = DFA.unpackEncodedString(DFA30_specialS); + static final short[][] DFA30_transition; + + static { + int numStates = DFA30_transitionS.length; + DFA30_transition = new short[numStates][]; + for (int i=0; i= '\u0000' && LA30_0 <= '\b')||(LA30_0 >= '\u000B' && LA30_0 <= '\f')||(LA30_0 >= '\u000E' && LA30_0 <= '\u001F')||(LA30_0 >= '#' && LA30_0 <= '&')||LA30_0==','||(LA30_0 >= '.' && LA30_0 <= '9')||(LA30_0 >= ';' && LA30_0 <= '>')||(LA30_0 >= '@' && LA30_0 <= 'Z')||(LA30_0 >= '_' && LA30_0 <= 'z')||LA30_0=='|'||(LA30_0 >= '\u007F' && LA30_0 <= '\u2FFF')||(LA30_0 >= '\u3001' && LA30_0 <= '\uFFFF')) ) {s = 1;} + + else if ( (LA30_0=='\\') ) {s = 2;} + + else if ( (LA30_0=='-') ) {s = 3;} + + else if ( (LA30_0=='+') ) {s = 4;} + + else if ( (LA30_0=='*'||LA30_0=='?') ) {s = 6;} + + else s = 5; + + if ( s>=0 ) return s; + break; + case 1 : + int LA30_2 = input.LA(1); + + s = -1; + if ( ((LA30_2 >= '\u0000' && LA30_2 <= '\uFFFF')) ) {s = 7;} + + if ( s>=0 ) return s; + break; + case 2 : + int LA30_3 = input.LA(1); + + s = -1; + if ( ((LA30_3 >= '\u0000' && LA30_3 <= '\b')||(LA30_3 >= '\u000B' && LA30_3 <= '\f')||(LA30_3 >= '\u000E' && LA30_3 <= '\u001F')||(LA30_3 >= '#' && LA30_3 <= '&')||LA30_3==','||(LA30_3 >= '.' && LA30_3 <= '9')||(LA30_3 >= ';' && LA30_3 <= '>')||(LA30_3 >= '@' && LA30_3 <= 'Z')||(LA30_3 >= '_' && LA30_3 <= 'z')||LA30_3=='|'||(LA30_3 >= '\u007F' && LA30_3 <= '\u2FFF')||(LA30_3 >= '\u3001' && LA30_3 <= '\uFFFF')) ) {s = 1;} + + else if ( (LA30_3=='\\') ) {s = 2;} + + else if ( (LA30_3=='-') ) {s = 3;} + + else if ( (LA30_3=='+') ) {s = 4;} + + else if ( (LA30_3=='*'||LA30_3=='?') ) {s = 6;} + + else s = 5; + + if ( s>=0 ) return s; + break; + case 3 : + int LA30_4 = input.LA(1); + + s = -1; + if ( ((LA30_4 >= '\u0000' && LA30_4 <= '\b')||(LA30_4 >= '\u000B' && LA30_4 <= '\f')||(LA30_4 >= '\u000E' && LA30_4 <= '\u001F')||(LA30_4 >= '#' && LA30_4 <= '&')||LA30_4==','||(LA30_4 >= '.' && LA30_4 <= '9')||(LA30_4 >= ';' && LA30_4 <= '>')||(LA30_4 >= '@' && LA30_4 <= 'Z')||(LA30_4 >= '_' && LA30_4 <= 'z')||LA30_4=='|'||(LA30_4 >= '\u007F' && LA30_4 <= '\u2FFF')||(LA30_4 >= '\u3001' && LA30_4 <= '\uFFFF')) ) {s = 1;} + + else if ( (LA30_4=='\\') ) {s = 2;} + + else if ( (LA30_4=='-') ) {s = 3;} + + else if ( (LA30_4=='+') ) {s = 4;} + + else if ( (LA30_4=='*'||LA30_4=='?') ) {s = 6;} + + else s = 5; + + if ( s>=0 ) return s; + break; + case 4 : + int LA30_1 = input.LA(1); + + s = -1; + if ( ((LA30_1 >= '\u0000' && LA30_1 <= '\b')||(LA30_1 >= '\u000B' && LA30_1 <= '\f')||(LA30_1 >= '\u000E' && LA30_1 <= '\u001F')||(LA30_1 >= '#' && LA30_1 <= '&')||LA30_1==','||(LA30_1 >= '.' && LA30_1 <= '9')||(LA30_1 >= ';' && LA30_1 <= '>')||(LA30_1 >= '@' && LA30_1 <= 'Z')||(LA30_1 >= '_' && LA30_1 <= 'z')||LA30_1=='|'||(LA30_1 >= '\u007F' && LA30_1 <= '\u2FFF')||(LA30_1 >= '\u3001' && LA30_1 <= '\uFFFF')) ) {s = 1;} + + else if ( (LA30_1=='\\') ) {s = 2;} + + else if ( (LA30_1=='-') ) {s = 3;} + + else if ( (LA30_1=='+') ) {s = 4;} + + else if ( (LA30_1=='*'||LA30_1=='?') ) {s = 6;} + + else s = 5; + + if ( s>=0 ) return s; + break; + case 5 : + int LA30_7 = input.LA(1); + + s = -1; + if ( ((LA30_7 >= '\u0000' && LA30_7 <= '\b')||(LA30_7 >= '\u000B' && LA30_7 <= '\f')||(LA30_7 >= '\u000E' && LA30_7 <= '\u001F')||(LA30_7 >= '#' && LA30_7 <= '&')||LA30_7==','||(LA30_7 >= '.' && LA30_7 <= '9')||(LA30_7 >= ';' && LA30_7 <= '>')||(LA30_7 >= '@' && LA30_7 <= 'Z')||(LA30_7 >= '_' && LA30_7 <= 'z')||LA30_7=='|'||(LA30_7 >= '\u007F' && LA30_7 <= '\u2FFF')||(LA30_7 >= '\u3001' && LA30_7 <= '\uFFFF')) ) {s = 1;} + + else if ( (LA30_7=='\\') ) {s = 2;} + + else if ( (LA30_7=='-') ) {s = 3;} + + else if ( (LA30_7=='+') ) {s = 4;} + + else if ( (LA30_7=='*'||LA30_7=='?') ) {s = 6;} + + else s = 5; + + if ( s>=0 ) return s; + break; + } + NoViableAltException nvae = + new NoViableAltException(getDescription(), 30, _s, input); + error(nvae); + throw nvae; + } + + } + static final String DFA37_eotS = + "\10\uffff\1\32\1\34\4\uffff\1\35\1\uffff\2\42\1\50\1\42\1\53\1\42"+ + "\1\uffff\1\57\1\42\10\uffff\1\66\1\uffff\1\42\1\uffff\3\42\1\uffff"+ + "\1\50\1\53\1\uffff\1\53\2\42\1\uffff\1\57\3\42\3\uffff\1\42\1\50"+ + "\1\76\2\57\1\42\2\uffff\1\57\4\42\1\57\1\42\1\107\1\uffff\1\42\1"+ + "\107"; + static final String DFA37_eofS = + "\112\uffff"; + static final String DFA37_minS = + "\1\0\7\uffff\2\0\4\uffff\1\0\1\uffff\6\0\1\uffff\3\0\4\uffff\2\0"+ + "\1\uffff\1\0\1\uffff\5\0\1\uffff\2\0\1\uffff\3\0\1\uffff\5\0\2\uffff"+ + "\6\0\2\uffff\10\0\1\uffff\2\0"; + static final String DFA37_maxS = + "\1\uffff\7\uffff\2\uffff\4\uffff\1\uffff\1\uffff\6\uffff\1\uffff"+ + "\3\uffff\4\uffff\2\uffff\1\uffff\1\uffff\1\uffff\5\uffff\1\uffff"+ + "\2\uffff\1\uffff\3\uffff\1\uffff\5\uffff\2\uffff\6\uffff\2\uffff"+ + "\10\uffff\1\uffff\2\uffff"; + static final String DFA37_acceptS = + "\1\uffff\1\1\1\2\1\3\1\4\1\5\1\6\1\7\2\uffff\1\12\1\13\1\14\1\15"+ + "\1\uffff\1\17\6\uffff\1\24\3\uffff\1\10\1\30\1\11\1\16\2\uffff\1"+ + "\32\1\uffff\1\27\5\uffff\1\21\2\uffff\1\22\3\uffff\1\25\5\uffff"+ + "\1\31\1\20\6\uffff\1\31\1\23\10\uffff\1\26\2\uffff"; + static final String DFA37_specialS = + "\1\14\7\uffff\1\45\1\6\4\uffff\1\31\1\uffff\1\27\1\25\1\4\1\33\1"+ + "\11\1\40\1\uffff\1\12\1\5\1\35\4\uffff\1\32\1\17\1\uffff\1\20\1"+ + "\uffff\1\34\1\36\1\41\1\42\1\10\1\uffff\1\56\1\3\1\uffff\1\30\1"+ + "\50\1\51\1\uffff\1\7\1\23\1\2\1\55\1\13\2\uffff\1\44\1\21\1\43\1"+ + "\54\1\52\1\53\2\uffff\1\22\1\26\1\16\1\15\1\24\1\1\1\37\1\46\1\uffff"+ + "\1\47\1\0}>"; + static final String[] DFA37_transitionS = { + "\11\30\2\26\2\30\1\26\22\30\1\26\1\7\1\16\3\30\1\22\1\17\1\1"+ + "\1\2\1\10\1\6\1\30\1\7\2\30\12\27\1\5\4\30\1\11\1\30\1\21\14"+ + "\30\1\25\1\23\4\30\1\20\6\30\1\3\1\31\1\4\1\14\2\30\1\21\14"+ + "\30\1\25\1\23\13\30\1\12\1\24\1\13\1\15\u2f81\30\1\26\ucfff"+ + "\30", + "", + "", + "", + "", + "", + "", + "", + "\11\33\2\uffff\2\33\1\uffff\22\33\3\uffff\4\33\4\uffff\17\33"+ + "\1\uffff\4\33\1\uffff\33\33\1\uffff\1\33\2\uffff\34\33\1\uffff"+ + "\1\33\2\uffff\u2f81\33\1\uffff\ucfff\33", + "\11\33\2\uffff\2\33\1\uffff\22\33\3\uffff\4\33\4\uffff\17\33"+ + "\1\uffff\4\33\1\11\33\33\1\uffff\1\33\2\uffff\34\33\1\uffff"+ + "\1\33\2\uffff\u2f81\33\1\uffff\ucfff\33", + "", + "", + "", + "", + "\42\37\1\uffff\7\37\1\40\24\37\1\40\34\37\1\36\uffa3\37", + "", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\14\43\1\uffff\4\43\1\33\17\43\1\41\13\43\1\uffff"+ + "\1\44\2\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff"+ + "\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\14\43\1\uffff\4\43\1\33\16\43\1\47\14\43\1\uffff"+ + "\1\44\2\uffff\17\43\1\47\14\43\1\uffff\1\43\2\uffff\u2f81\43"+ + "\1\uffff\ucfff\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\3\43\1\51\3\uffff"+ + "\1\33\1\46\1\43\1\45\14\43\1\uffff\4\43\1\33\33\43\1\uffff\1"+ + "\44\2\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff"+ + "\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\14\43\1\uffff\4\43\1\33\22\43\1\52\10\43\1\uffff"+ + "\1\44\2\uffff\23\43\1\52\10\43\1\uffff\1\43\2\uffff\u2f81\43"+ + "\1\uffff\ucfff\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\14\43\1\uffff\4\43\1\33\33\43\1\uffff\1\44\2"+ + "\uffff\34\43\1\uffff\1\54\2\uffff\u2f81\43\1\uffff\ucfff\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\14\43\1\uffff\4\43\1\33\17\43\1\55\13\43\1\uffff"+ + "\1\44\2\uffff\20\43\1\55\13\43\1\uffff\1\43\2\uffff\u2f81\43"+ + "\1\uffff\ucfff\43", + "", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\62\1\56\1\61\12\60\1\uffff\4\43\1\33\33\43\1\uffff"+ + "\1\44\2\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff"+ + "\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\14\43\1\uffff\4\43\1\33\33\43\1\uffff\1\44\2"+ + "\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff\43", + "\0\63", + "", + "", + "", + "", + "\0\64", + "\42\37\1\65\7\37\1\40\24\37\1\40\34\37\1\36\uffa3\37", + "", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\14\43\1\uffff\4\43\1\33\33\43\1\uffff\1\44\2"+ + "\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff\43", + "", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\14\43\1\uffff\4\43\1\33\33\43\1\uffff\1\44\2"+ + "\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff\43", + "\0\67", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\14\43\1\uffff\4\43\1\33\33\43\1\uffff\1\44\2"+ + "\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\14\43\1\uffff\4\43\1\33\33\43\1\uffff\1\44\2"+ + "\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\14\43\1\uffff\4\43\1\33\4\43\1\70\26\43\1\uffff"+ + "\1\44\2\uffff\5\43\1\70\26\43\1\uffff\1\43\2\uffff\u2f81\43"+ + "\1\uffff\ucfff\43", + "", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\14\43\1\uffff\4\43\1\33\33\43\1\uffff\1\44\2"+ + "\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\14\43\1\uffff\4\43\1\33\33\43\1\uffff\1\44\2"+ + "\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff\43", + "", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\14\43\1\uffff\4\43\1\33\33\43\1\uffff\1\44\2"+ + "\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\14\43\1\uffff\4\43\1\33\24\43\1\71\6\43\1\uffff"+ + "\1\44\2\uffff\25\43\1\71\6\43\1\uffff\1\43\2\uffff\u2f81\43"+ + "\1\uffff\ucfff\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\2\43\12\72\1\uffff\4\43\1\33\33\43\1\uffff\1"+ + "\44\2\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff"+ + "\43", + "", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\62\1\56\1\61\12\73\1\uffff\4\43\1\33\33\43\1\uffff"+ + "\1\44\2\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff"+ + "\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\2\43\12\74\1\uffff\4\43\1\33\33\43\1\uffff\1"+ + "\44\2\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff"+ + "\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\2\43\12\74\1\uffff\4\43\1\33\33\43\1\uffff\1"+ + "\44\2\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff"+ + "\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\14\43\1\uffff\4\43\1\33\33\43\1\uffff\1\44\2"+ + "\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff\43", + "\42\37\1\65\7\37\1\40\24\37\1\40\34\37\1\36\uffa3\37", + "", + "", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\14\43\1\uffff\4\43\1\33\33\43\1\uffff\1\44\2"+ + "\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\14\43\1\uffff\4\43\1\33\33\43\1\uffff\1\44\2"+ + "\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\14\43\1\uffff\4\43\1\33\33\43\1\uffff\1\44\2"+ + "\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\101\2\100\12\77\1\uffff\4\43\1\33\33\43\1\uffff"+ + "\1\44\2\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff"+ + "\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\1\102\1\43\12\73\1\uffff\4\43\1\33\33\43\1\uffff"+ + "\1\44\2\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff"+ + "\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\101\2\100\12\103\1\uffff\4\43\1\33\33\43\1\uffff"+ + "\1\44\2\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff"+ + "\43", + "", + "", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\101\2\100\12\104\1\uffff\4\43\1\33\33\43\1\uffff"+ + "\1\44\2\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff"+ + "\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\2\43\12\105\1\uffff\4\43\1\33\33\43\1\uffff"+ + "\1\44\2\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff"+ + "\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\2\43\12\105\1\uffff\4\43\1\33\33\43\1\uffff"+ + "\1\44\2\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff"+ + "\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\2\43\12\104\1\uffff\4\43\1\33\33\43\1\uffff"+ + "\1\44\2\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff"+ + "\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\101\2\100\12\43\1\uffff\4\43\1\33\33\43\1\uffff"+ + "\1\44\2\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff"+ + "\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\2\43\12\104\1\uffff\4\43\1\33\33\43\1\uffff"+ + "\1\44\2\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff"+ + "\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\2\43\12\106\1\uffff\4\43\1\33\33\43\1\uffff"+ + "\1\44\2\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff"+ + "\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\2\43\12\110\1\uffff\4\43\1\33\33\43\1\uffff"+ + "\1\44\2\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff"+ + "\43", + "", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\2\43\12\111\1\uffff\4\43\1\33\33\43\1\uffff"+ + "\1\44\2\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff"+ + "\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\14\43\1\uffff\4\43\1\33\33\43\1\uffff\1\44\2"+ + "\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff\43" + }; + + static final short[] DFA37_eot = DFA.unpackEncodedString(DFA37_eotS); + static final short[] DFA37_eof = DFA.unpackEncodedString(DFA37_eofS); + static final char[] DFA37_min = DFA.unpackEncodedStringToUnsignedChars(DFA37_minS); + static final char[] DFA37_max = DFA.unpackEncodedStringToUnsignedChars(DFA37_maxS); + static final short[] DFA37_accept = DFA.unpackEncodedString(DFA37_acceptS); + static final short[] DFA37_special = DFA.unpackEncodedString(DFA37_specialS); + static final short[][] DFA37_transition; + + static { + int numStates = DFA37_transitionS.length; + DFA37_transition = new short[numStates][]; + for (int i=0; i= '\u0000' && LA37_73 <= '\b')||(LA37_73 >= '\u000B' && LA37_73 <= '\f')||(LA37_73 >= '\u000E' && LA37_73 <= '\u001F')||(LA37_73 >= '#' && LA37_73 <= '&')||LA37_73==','||(LA37_73 >= '.' && LA37_73 <= '9')||(LA37_73 >= ';' && LA37_73 <= '>')||(LA37_73 >= '@' && LA37_73 <= 'Z')||(LA37_73 >= '_' && LA37_73 <= 'z')||LA37_73=='|'||(LA37_73 >= '\u007F' && LA37_73 <= '\u2FFF')||(LA37_73 >= '\u3001' && LA37_73 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA37_73=='\\') ) {s = 36;} + + else if ( (LA37_73=='-') ) {s = 37;} + + else if ( (LA37_73=='+') ) {s = 38;} + + else if ( (LA37_73=='*'||LA37_73=='?') ) {s = 27;} + + else s = 71; + + if ( s>=0 ) return s; + break; + case 1 : + int LA37_68 = input.LA(1); + + s = -1; + if ( ((LA37_68 >= '0' && LA37_68 <= '9')) ) {s = 68;} + + else if ( ((LA37_68 >= '\u0000' && LA37_68 <= '\b')||(LA37_68 >= '\u000B' && LA37_68 <= '\f')||(LA37_68 >= '\u000E' && LA37_68 <= '\u001F')||(LA37_68 >= '#' && LA37_68 <= '&')||LA37_68==','||(LA37_68 >= '.' && LA37_68 <= '/')||(LA37_68 >= ';' && LA37_68 <= '>')||(LA37_68 >= '@' && LA37_68 <= 'Z')||(LA37_68 >= '_' && LA37_68 <= 'z')||LA37_68=='|'||(LA37_68 >= '\u007F' && LA37_68 <= '\u2FFF')||(LA37_68 >= '\u3001' && LA37_68 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA37_68=='\\') ) {s = 36;} + + else if ( (LA37_68=='-') ) {s = 37;} + + else if ( (LA37_68=='+') ) {s = 38;} + + else if ( (LA37_68=='*'||LA37_68=='?') ) {s = 27;} + + else s = 47; + + if ( s>=0 ) return s; + break; + case 2 : + int LA37_50 = input.LA(1); + + s = -1; + if ( ((LA37_50 >= '0' && LA37_50 <= '9')) ) {s = 60;} + + else if ( ((LA37_50 >= '\u0000' && LA37_50 <= '\b')||(LA37_50 >= '\u000B' && LA37_50 <= '\f')||(LA37_50 >= '\u000E' && LA37_50 <= '\u001F')||(LA37_50 >= '#' && LA37_50 <= '&')||LA37_50==','||(LA37_50 >= '.' && LA37_50 <= '/')||(LA37_50 >= ';' && LA37_50 <= '>')||(LA37_50 >= '@' && LA37_50 <= 'Z')||(LA37_50 >= '_' && LA37_50 <= 'z')||LA37_50=='|'||(LA37_50 >= '\u007F' && LA37_50 <= '\u2FFF')||(LA37_50 >= '\u3001' && LA37_50 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA37_50=='\\') ) {s = 36;} + + else if ( (LA37_50=='-') ) {s = 37;} + + else if ( (LA37_50=='+') ) {s = 38;} + + else if ( (LA37_50=='*'||LA37_50=='?') ) {s = 27;} + + else s = 34; + + if ( s>=0 ) return s; + break; + case 3 : + int LA37_42 = input.LA(1); + + s = -1; + if ( ((LA37_42 >= '\u0000' && LA37_42 <= '\b')||(LA37_42 >= '\u000B' && LA37_42 <= '\f')||(LA37_42 >= '\u000E' && LA37_42 <= '\u001F')||(LA37_42 >= '#' && LA37_42 <= '&')||LA37_42==','||(LA37_42 >= '.' && LA37_42 <= '9')||(LA37_42 >= ';' && LA37_42 <= '>')||(LA37_42 >= '@' && LA37_42 <= 'Z')||(LA37_42 >= '_' && LA37_42 <= 'z')||LA37_42=='|'||(LA37_42 >= '\u007F' && LA37_42 <= '\u2FFF')||(LA37_42 >= '\u3001' && LA37_42 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA37_42=='\\') ) {s = 36;} + + else if ( (LA37_42=='-') ) {s = 37;} + + else if ( (LA37_42=='+') ) {s = 38;} + + else if ( (LA37_42=='*'||LA37_42=='?') ) {s = 27;} + + else s = 43; + + if ( s>=0 ) return s; + break; + case 4 : + int LA37_18 = input.LA(1); + + s = -1; + if ( (LA37_18=='&') ) {s = 41;} + + else if ( ((LA37_18 >= '\u0000' && LA37_18 <= '\b')||(LA37_18 >= '\u000B' && LA37_18 <= '\f')||(LA37_18 >= '\u000E' && LA37_18 <= '\u001F')||(LA37_18 >= '#' && LA37_18 <= '%')||LA37_18==','||(LA37_18 >= '.' && LA37_18 <= '9')||(LA37_18 >= ';' && LA37_18 <= '>')||(LA37_18 >= '@' && LA37_18 <= 'Z')||(LA37_18 >= '_' && LA37_18 <= 'z')||LA37_18=='|'||(LA37_18 >= '\u007F' && LA37_18 <= '\u2FFF')||(LA37_18 >= '\u3001' && LA37_18 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA37_18=='\\') ) {s = 36;} + + else if ( (LA37_18=='-') ) {s = 37;} + + else if ( (LA37_18=='+') ) {s = 38;} + + else if ( (LA37_18=='*'||LA37_18=='?') ) {s = 27;} + + else s = 40; + + if ( s>=0 ) return s; + break; + case 5 : + int LA37_24 = input.LA(1); + + s = -1; + if ( ((LA37_24 >= '\u0000' && LA37_24 <= '\b')||(LA37_24 >= '\u000B' && LA37_24 <= '\f')||(LA37_24 >= '\u000E' && LA37_24 <= '\u001F')||(LA37_24 >= '#' && LA37_24 <= '&')||LA37_24==','||(LA37_24 >= '.' && LA37_24 <= '9')||(LA37_24 >= ';' && LA37_24 <= '>')||(LA37_24 >= '@' && LA37_24 <= 'Z')||(LA37_24 >= '_' && LA37_24 <= 'z')||LA37_24=='|'||(LA37_24 >= '\u007F' && LA37_24 <= '\u2FFF')||(LA37_24 >= '\u3001' && LA37_24 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA37_24=='\\') ) {s = 36;} + + else if ( (LA37_24=='-') ) {s = 37;} + + else if ( (LA37_24=='+') ) {s = 38;} + + else if ( (LA37_24=='*'||LA37_24=='?') ) {s = 27;} + + else s = 34; + + if ( s>=0 ) return s; + break; + case 6 : + int LA37_9 = input.LA(1); + + s = -1; + if ( (LA37_9=='?') ) {s = 9;} + + else if ( ((LA37_9 >= '\u0000' && LA37_9 <= '\b')||(LA37_9 >= '\u000B' && LA37_9 <= '\f')||(LA37_9 >= '\u000E' && LA37_9 <= '\u001F')||(LA37_9 >= '#' && LA37_9 <= '&')||(LA37_9 >= '+' && LA37_9 <= '9')||(LA37_9 >= ';' && LA37_9 <= '>')||(LA37_9 >= '@' && LA37_9 <= 'Z')||LA37_9=='\\'||(LA37_9 >= '_' && LA37_9 <= 'z')||LA37_9=='|'||(LA37_9 >= '\u007F' && LA37_9 <= '\u2FFF')||(LA37_9 >= '\u3001' && LA37_9 <= '\uFFFF')) ) {s = 27;} + + else s = 28; + + if ( s>=0 ) return s; + break; + case 7 : + int LA37_48 = input.LA(1); + + s = -1; + if ( (LA37_48=='.') ) {s = 46;} + + else if ( ((LA37_48 >= '0' && LA37_48 <= '9')) ) {s = 59;} + + else if ( (LA37_48=='/') ) {s = 49;} + + else if ( (LA37_48=='-') ) {s = 50;} + + else if ( ((LA37_48 >= '\u0000' && LA37_48 <= '\b')||(LA37_48 >= '\u000B' && LA37_48 <= '\f')||(LA37_48 >= '\u000E' && LA37_48 <= '\u001F')||(LA37_48 >= '#' && LA37_48 <= '&')||LA37_48==','||(LA37_48 >= ';' && LA37_48 <= '>')||(LA37_48 >= '@' && LA37_48 <= 'Z')||(LA37_48 >= '_' && LA37_48 <= 'z')||LA37_48=='|'||(LA37_48 >= '\u007F' && LA37_48 <= '\u2FFF')||(LA37_48 >= '\u3001' && LA37_48 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA37_48=='\\') ) {s = 36;} + + else if ( (LA37_48=='+') ) {s = 38;} + + else if ( (LA37_48=='*'||LA37_48=='?') ) {s = 27;} + + else s = 47; + + if ( s>=0 ) return s; + break; + case 8 : + int LA37_39 = input.LA(1); + + s = -1; + if ( (LA37_39=='D'||LA37_39=='d') ) {s = 56;} + + else if ( ((LA37_39 >= '\u0000' && LA37_39 <= '\b')||(LA37_39 >= '\u000B' && LA37_39 <= '\f')||(LA37_39 >= '\u000E' && LA37_39 <= '\u001F')||(LA37_39 >= '#' && LA37_39 <= '&')||LA37_39==','||(LA37_39 >= '.' && LA37_39 <= '9')||(LA37_39 >= ';' && LA37_39 <= '>')||(LA37_39 >= '@' && LA37_39 <= 'C')||(LA37_39 >= 'E' && LA37_39 <= 'Z')||(LA37_39 >= '_' && LA37_39 <= 'c')||(LA37_39 >= 'e' && LA37_39 <= 'z')||LA37_39=='|'||(LA37_39 >= '\u007F' && LA37_39 <= '\u2FFF')||(LA37_39 >= '\u3001' && LA37_39 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA37_39=='\\') ) {s = 36;} + + else if ( (LA37_39=='-') ) {s = 37;} + + else if ( (LA37_39=='+') ) {s = 38;} + + else if ( (LA37_39=='*'||LA37_39=='?') ) {s = 27;} + + else s = 34; + + if ( s>=0 ) return s; + break; + case 9 : + int LA37_20 = input.LA(1); + + s = -1; + if ( (LA37_20=='|') ) {s = 44;} + + else if ( ((LA37_20 >= '\u0000' && LA37_20 <= '\b')||(LA37_20 >= '\u000B' && LA37_20 <= '\f')||(LA37_20 >= '\u000E' && LA37_20 <= '\u001F')||(LA37_20 >= '#' && LA37_20 <= '&')||LA37_20==','||(LA37_20 >= '.' && LA37_20 <= '9')||(LA37_20 >= ';' && LA37_20 <= '>')||(LA37_20 >= '@' && LA37_20 <= 'Z')||(LA37_20 >= '_' && LA37_20 <= 'z')||(LA37_20 >= '\u007F' && LA37_20 <= '\u2FFF')||(LA37_20 >= '\u3001' && LA37_20 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA37_20=='\\') ) {s = 36;} + + else if ( (LA37_20=='-') ) {s = 37;} + + else if ( (LA37_20=='+') ) {s = 38;} + + else if ( (LA37_20=='*'||LA37_20=='?') ) {s = 27;} + + else s = 43; + + if ( s>=0 ) return s; + break; + case 10 : + int LA37_23 = input.LA(1); + + s = -1; + if ( (LA37_23=='.') ) {s = 46;} + + else if ( ((LA37_23 >= '0' && LA37_23 <= '9')) ) {s = 48;} + + else if ( (LA37_23=='/') ) {s = 49;} + + else if ( (LA37_23=='-') ) {s = 50;} + + else if ( ((LA37_23 >= '\u0000' && LA37_23 <= '\b')||(LA37_23 >= '\u000B' && LA37_23 <= '\f')||(LA37_23 >= '\u000E' && LA37_23 <= '\u001F')||(LA37_23 >= '#' && LA37_23 <= '&')||LA37_23==','||(LA37_23 >= ';' && LA37_23 <= '>')||(LA37_23 >= '@' && LA37_23 <= 'Z')||(LA37_23 >= '_' && LA37_23 <= 'z')||LA37_23=='|'||(LA37_23 >= '\u007F' && LA37_23 <= '\u2FFF')||(LA37_23 >= '\u3001' && LA37_23 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA37_23=='\\') ) {s = 36;} + + else if ( (LA37_23=='+') ) {s = 38;} + + else if ( (LA37_23=='*'||LA37_23=='?') ) {s = 27;} + + else s = 47; + + if ( s>=0 ) return s; + break; + case 11 : + int LA37_52 = input.LA(1); + + s = -1; + if ( (LA37_52=='\"') ) {s = 53;} + + else if ( (LA37_52=='\\') ) {s = 30;} + + else if ( ((LA37_52 >= '\u0000' && LA37_52 <= '!')||(LA37_52 >= '#' && LA37_52 <= ')')||(LA37_52 >= '+' && LA37_52 <= '>')||(LA37_52 >= '@' && LA37_52 <= '[')||(LA37_52 >= ']' && LA37_52 <= '\uFFFF')) ) {s = 31;} + + else if ( (LA37_52=='*'||LA37_52=='?') ) {s = 32;} + + if ( s>=0 ) return s; + break; + case 12 : + int LA37_0 = input.LA(1); + + s = -1; + if ( (LA37_0=='(') ) {s = 1;} + + else if ( (LA37_0==')') ) {s = 2;} + + else if ( (LA37_0=='[') ) {s = 3;} + + else if ( (LA37_0==']') ) {s = 4;} + + else if ( (LA37_0==':') ) {s = 5;} + + else if ( (LA37_0=='+') ) {s = 6;} + + else if ( (LA37_0=='!'||LA37_0=='-') ) {s = 7;} + + else if ( (LA37_0=='*') ) {s = 8;} + + else if ( (LA37_0=='?') ) {s = 9;} + + else if ( (LA37_0=='{') ) {s = 10;} + + else if ( (LA37_0=='}') ) {s = 11;} + + else if ( (LA37_0=='^') ) {s = 12;} + + else if ( (LA37_0=='~') ) {s = 13;} + + else if ( (LA37_0=='\"') ) {s = 14;} + + else if ( (LA37_0=='\'') ) {s = 15;} + + else if ( (LA37_0=='T') ) {s = 16;} + + else if ( (LA37_0=='A'||LA37_0=='a') ) {s = 17;} + + else if ( (LA37_0=='&') ) {s = 18;} + + else if ( (LA37_0=='O'||LA37_0=='o') ) {s = 19;} + + else if ( (LA37_0=='|') ) {s = 20;} + + else if ( (LA37_0=='N'||LA37_0=='n') ) {s = 21;} + + else if ( ((LA37_0 >= '\t' && LA37_0 <= '\n')||LA37_0=='\r'||LA37_0==' '||LA37_0=='\u3000') ) {s = 22;} + + else if ( ((LA37_0 >= '0' && LA37_0 <= '9')) ) {s = 23;} + + else if ( ((LA37_0 >= '\u0000' && LA37_0 <= '\b')||(LA37_0 >= '\u000B' && LA37_0 <= '\f')||(LA37_0 >= '\u000E' && LA37_0 <= '\u001F')||(LA37_0 >= '#' && LA37_0 <= '%')||LA37_0==','||(LA37_0 >= '.' && LA37_0 <= '/')||(LA37_0 >= ';' && LA37_0 <= '>')||LA37_0=='@'||(LA37_0 >= 'B' && LA37_0 <= 'M')||(LA37_0 >= 'P' && LA37_0 <= 'S')||(LA37_0 >= 'U' && LA37_0 <= 'Z')||(LA37_0 >= '_' && LA37_0 <= '`')||(LA37_0 >= 'b' && LA37_0 <= 'm')||(LA37_0 >= 'p' && LA37_0 <= 'z')||(LA37_0 >= '\u007F' && LA37_0 <= '\u2FFF')||(LA37_0 >= '\u3001' && LA37_0 <= '\uFFFF')) ) {s = 24;} + + else if ( (LA37_0=='\\') ) {s = 25;} + + if ( s>=0 ) return s; + break; + case 13 : + int LA37_66 = input.LA(1); + + s = -1; + if ( ((LA37_66 >= '0' && LA37_66 <= '9')) ) {s = 68;} + + else if ( ((LA37_66 >= '\u0000' && LA37_66 <= '\b')||(LA37_66 >= '\u000B' && LA37_66 <= '\f')||(LA37_66 >= '\u000E' && LA37_66 <= '\u001F')||(LA37_66 >= '#' && LA37_66 <= '&')||LA37_66==','||(LA37_66 >= '.' && LA37_66 <= '/')||(LA37_66 >= ';' && LA37_66 <= '>')||(LA37_66 >= '@' && LA37_66 <= 'Z')||(LA37_66 >= '_' && LA37_66 <= 'z')||LA37_66=='|'||(LA37_66 >= '\u007F' && LA37_66 <= '\u2FFF')||(LA37_66 >= '\u3001' && LA37_66 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA37_66=='\\') ) {s = 36;} + + else if ( (LA37_66=='-') ) {s = 37;} + + else if ( (LA37_66=='+') ) {s = 38;} + + else if ( (LA37_66=='*'||LA37_66=='?') ) {s = 27;} + + else s = 34; + + if ( s>=0 ) return s; + break; + case 14 : + int LA37_65 = input.LA(1); + + s = -1; + if ( ((LA37_65 >= '0' && LA37_65 <= '9')) ) {s = 69;} + + else if ( ((LA37_65 >= '\u0000' && LA37_65 <= '\b')||(LA37_65 >= '\u000B' && LA37_65 <= '\f')||(LA37_65 >= '\u000E' && LA37_65 <= '\u001F')||(LA37_65 >= '#' && LA37_65 <= '&')||LA37_65==','||(LA37_65 >= '.' && LA37_65 <= '/')||(LA37_65 >= ';' && LA37_65 <= '>')||(LA37_65 >= '@' && LA37_65 <= 'Z')||(LA37_65 >= '_' && LA37_65 <= 'z')||LA37_65=='|'||(LA37_65 >= '\u007F' && LA37_65 <= '\u2FFF')||(LA37_65 >= '\u3001' && LA37_65 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA37_65=='\\') ) {s = 36;} + + else if ( (LA37_65=='-') ) {s = 37;} + + else if ( (LA37_65=='+') ) {s = 38;} + + else if ( (LA37_65=='*'||LA37_65=='?') ) {s = 27;} + + else s = 34; + + if ( s>=0 ) return s; + break; + case 15 : + int LA37_31 = input.LA(1); + + s = -1; + if ( (LA37_31=='\"') ) {s = 53;} + + else if ( (LA37_31=='\\') ) {s = 30;} + + else if ( ((LA37_31 >= '\u0000' && LA37_31 <= '!')||(LA37_31 >= '#' && LA37_31 <= ')')||(LA37_31 >= '+' && LA37_31 <= '>')||(LA37_31 >= '@' && LA37_31 <= '[')||(LA37_31 >= ']' && LA37_31 <= '\uFFFF')) ) {s = 31;} + + else if ( (LA37_31=='*'||LA37_31=='?') ) {s = 32;} + + if ( s>=0 ) return s; + break; + case 16 : + int LA37_33 = input.LA(1); + + s = -1; + if ( ((LA37_33 >= '\u0000' && LA37_33 <= '\b')||(LA37_33 >= '\u000B' && LA37_33 <= '\f')||(LA37_33 >= '\u000E' && LA37_33 <= '\u001F')||(LA37_33 >= '#' && LA37_33 <= '&')||LA37_33==','||(LA37_33 >= '.' && LA37_33 <= '9')||(LA37_33 >= ';' && LA37_33 <= '>')||(LA37_33 >= '@' && LA37_33 <= 'Z')||(LA37_33 >= '_' && LA37_33 <= 'z')||LA37_33=='|'||(LA37_33 >= '\u007F' && LA37_33 <= '\u2FFF')||(LA37_33 >= '\u3001' && LA37_33 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA37_33=='\\') ) {s = 36;} + + else if ( (LA37_33=='-') ) {s = 37;} + + else if ( (LA37_33=='+') ) {s = 38;} + + else if ( (LA37_33=='*'||LA37_33=='?') ) {s = 27;} + + else s = 54; + + if ( s>=0 ) return s; + break; + case 17 : + int LA37_56 = input.LA(1); + + s = -1; + if ( ((LA37_56 >= '\u0000' && LA37_56 <= '\b')||(LA37_56 >= '\u000B' && LA37_56 <= '\f')||(LA37_56 >= '\u000E' && LA37_56 <= '\u001F')||(LA37_56 >= '#' && LA37_56 <= '&')||LA37_56==','||(LA37_56 >= '.' && LA37_56 <= '9')||(LA37_56 >= ';' && LA37_56 <= '>')||(LA37_56 >= '@' && LA37_56 <= 'Z')||(LA37_56 >= '_' && LA37_56 <= 'z')||LA37_56=='|'||(LA37_56 >= '\u007F' && LA37_56 <= '\u2FFF')||(LA37_56 >= '\u3001' && LA37_56 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA37_56=='\\') ) {s = 36;} + + else if ( (LA37_56=='-') ) {s = 37;} + + else if ( (LA37_56=='+') ) {s = 38;} + + else if ( (LA37_56=='*'||LA37_56=='?') ) {s = 27;} + + else s = 40; + + if ( s>=0 ) return s; + break; + case 18 : + int LA37_63 = input.LA(1); + + s = -1; + if ( ((LA37_63 >= '0' && LA37_63 <= '9')) ) {s = 68;} + + else if ( ((LA37_63 >= '.' && LA37_63 <= '/')) ) {s = 64;} + + else if ( (LA37_63=='-') ) {s = 65;} + + else if ( ((LA37_63 >= '\u0000' && LA37_63 <= '\b')||(LA37_63 >= '\u000B' && LA37_63 <= '\f')||(LA37_63 >= '\u000E' && LA37_63 <= '\u001F')||(LA37_63 >= '#' && LA37_63 <= '&')||LA37_63==','||(LA37_63 >= ';' && LA37_63 <= '>')||(LA37_63 >= '@' && LA37_63 <= 'Z')||(LA37_63 >= '_' && LA37_63 <= 'z')||LA37_63=='|'||(LA37_63 >= '\u007F' && LA37_63 <= '\u2FFF')||(LA37_63 >= '\u3001' && LA37_63 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA37_63=='\\') ) {s = 36;} + + else if ( (LA37_63=='+') ) {s = 38;} + + else if ( (LA37_63=='*'||LA37_63=='?') ) {s = 27;} + + else s = 47; + + if ( s>=0 ) return s; + break; + case 19 : + int LA37_49 = input.LA(1); + + s = -1; + if ( ((LA37_49 >= '0' && LA37_49 <= '9')) ) {s = 60;} + + else if ( ((LA37_49 >= '\u0000' && LA37_49 <= '\b')||(LA37_49 >= '\u000B' && LA37_49 <= '\f')||(LA37_49 >= '\u000E' && LA37_49 <= '\u001F')||(LA37_49 >= '#' && LA37_49 <= '&')||LA37_49==','||(LA37_49 >= '.' && LA37_49 <= '/')||(LA37_49 >= ';' && LA37_49 <= '>')||(LA37_49 >= '@' && LA37_49 <= 'Z')||(LA37_49 >= '_' && LA37_49 <= 'z')||LA37_49=='|'||(LA37_49 >= '\u007F' && LA37_49 <= '\u2FFF')||(LA37_49 >= '\u3001' && LA37_49 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA37_49=='\\') ) {s = 36;} + + else if ( (LA37_49=='-') ) {s = 37;} + + else if ( (LA37_49=='+') ) {s = 38;} + + else if ( (LA37_49=='*'||LA37_49=='?') ) {s = 27;} + + else s = 34; + + if ( s>=0 ) return s; + break; + case 20 : + int LA37_67 = input.LA(1); + + s = -1; + if ( ((LA37_67 >= '.' && LA37_67 <= '/')) ) {s = 64;} + + else if ( (LA37_67=='-') ) {s = 65;} + + else if ( ((LA37_67 >= '\u0000' && LA37_67 <= '\b')||(LA37_67 >= '\u000B' && LA37_67 <= '\f')||(LA37_67 >= '\u000E' && LA37_67 <= '\u001F')||(LA37_67 >= '#' && LA37_67 <= '&')||LA37_67==','||(LA37_67 >= '0' && LA37_67 <= '9')||(LA37_67 >= ';' && LA37_67 <= '>')||(LA37_67 >= '@' && LA37_67 <= 'Z')||(LA37_67 >= '_' && LA37_67 <= 'z')||LA37_67=='|'||(LA37_67 >= '\u007F' && LA37_67 <= '\u2FFF')||(LA37_67 >= '\u3001' && LA37_67 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA37_67=='\\') ) {s = 36;} + + else if ( (LA37_67=='+') ) {s = 38;} + + else if ( (LA37_67=='*'||LA37_67=='?') ) {s = 27;} + + else s = 34; + + if ( s>=0 ) return s; + break; + case 21 : + int LA37_17 = input.LA(1); + + s = -1; + if ( (LA37_17=='N'||LA37_17=='n') ) {s = 39;} + + else if ( ((LA37_17 >= '\u0000' && LA37_17 <= '\b')||(LA37_17 >= '\u000B' && LA37_17 <= '\f')||(LA37_17 >= '\u000E' && LA37_17 <= '\u001F')||(LA37_17 >= '#' && LA37_17 <= '&')||LA37_17==','||(LA37_17 >= '.' && LA37_17 <= '9')||(LA37_17 >= ';' && LA37_17 <= '>')||(LA37_17 >= '@' && LA37_17 <= 'M')||(LA37_17 >= 'O' && LA37_17 <= 'Z')||(LA37_17 >= '_' && LA37_17 <= 'm')||(LA37_17 >= 'o' && LA37_17 <= 'z')||LA37_17=='|'||(LA37_17 >= '\u007F' && LA37_17 <= '\u2FFF')||(LA37_17 >= '\u3001' && LA37_17 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA37_17=='\\') ) {s = 36;} + + else if ( (LA37_17=='-') ) {s = 37;} + + else if ( (LA37_17=='+') ) {s = 38;} + + else if ( (LA37_17=='*'||LA37_17=='?') ) {s = 27;} + + else s = 34; + + if ( s>=0 ) return s; + break; + case 22 : + int LA37_64 = input.LA(1); + + s = -1; + if ( ((LA37_64 >= '0' && LA37_64 <= '9')) ) {s = 69;} + + else if ( ((LA37_64 >= '\u0000' && LA37_64 <= '\b')||(LA37_64 >= '\u000B' && LA37_64 <= '\f')||(LA37_64 >= '\u000E' && LA37_64 <= '\u001F')||(LA37_64 >= '#' && LA37_64 <= '&')||LA37_64==','||(LA37_64 >= '.' && LA37_64 <= '/')||(LA37_64 >= ';' && LA37_64 <= '>')||(LA37_64 >= '@' && LA37_64 <= 'Z')||(LA37_64 >= '_' && LA37_64 <= 'z')||LA37_64=='|'||(LA37_64 >= '\u007F' && LA37_64 <= '\u2FFF')||(LA37_64 >= '\u3001' && LA37_64 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA37_64=='\\') ) {s = 36;} + + else if ( (LA37_64=='-') ) {s = 37;} + + else if ( (LA37_64=='+') ) {s = 38;} + + else if ( (LA37_64=='*'||LA37_64=='?') ) {s = 27;} + + else s = 34; + + if ( s>=0 ) return s; + break; + case 23 : + int LA37_16 = input.LA(1); + + s = -1; + if ( (LA37_16=='O') ) {s = 33;} + + else if ( ((LA37_16 >= '\u0000' && LA37_16 <= '\b')||(LA37_16 >= '\u000B' && LA37_16 <= '\f')||(LA37_16 >= '\u000E' && LA37_16 <= '\u001F')||(LA37_16 >= '#' && LA37_16 <= '&')||LA37_16==','||(LA37_16 >= '.' && LA37_16 <= '9')||(LA37_16 >= ';' && LA37_16 <= '>')||(LA37_16 >= '@' && LA37_16 <= 'N')||(LA37_16 >= 'P' && LA37_16 <= 'Z')||(LA37_16 >= '_' && LA37_16 <= 'z')||LA37_16=='|'||(LA37_16 >= '\u007F' && LA37_16 <= '\u2FFF')||(LA37_16 >= '\u3001' && LA37_16 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA37_16=='\\') ) {s = 36;} + + else if ( (LA37_16=='-') ) {s = 37;} + + else if ( (LA37_16=='+') ) {s = 38;} + + else if ( (LA37_16=='*'||LA37_16=='?') ) {s = 27;} + + else s = 34; + + if ( s>=0 ) return s; + break; + case 24 : + int LA37_44 = input.LA(1); + + s = -1; + if ( ((LA37_44 >= '\u0000' && LA37_44 <= '\b')||(LA37_44 >= '\u000B' && LA37_44 <= '\f')||(LA37_44 >= '\u000E' && LA37_44 <= '\u001F')||(LA37_44 >= '#' && LA37_44 <= '&')||LA37_44==','||(LA37_44 >= '.' && LA37_44 <= '9')||(LA37_44 >= ';' && LA37_44 <= '>')||(LA37_44 >= '@' && LA37_44 <= 'Z')||(LA37_44 >= '_' && LA37_44 <= 'z')||LA37_44=='|'||(LA37_44 >= '\u007F' && LA37_44 <= '\u2FFF')||(LA37_44 >= '\u3001' && LA37_44 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA37_44=='\\') ) {s = 36;} + + else if ( (LA37_44=='-') ) {s = 37;} + + else if ( (LA37_44=='+') ) {s = 38;} + + else if ( (LA37_44=='*'||LA37_44=='?') ) {s = 27;} + + else s = 43; + + if ( s>=0 ) return s; + break; + case 25 : + int LA37_14 = input.LA(1); + + s = -1; + if ( (LA37_14=='\\') ) {s = 30;} + + else if ( ((LA37_14 >= '\u0000' && LA37_14 <= '!')||(LA37_14 >= '#' && LA37_14 <= ')')||(LA37_14 >= '+' && LA37_14 <= '>')||(LA37_14 >= '@' && LA37_14 <= '[')||(LA37_14 >= ']' && LA37_14 <= '\uFFFF')) ) {s = 31;} + + else if ( (LA37_14=='*'||LA37_14=='?') ) {s = 32;} + + else s = 29; + + if ( s>=0 ) return s; + break; + case 26 : + int LA37_30 = input.LA(1); + + s = -1; + if ( ((LA37_30 >= '\u0000' && LA37_30 <= '\uFFFF')) ) {s = 52;} + + if ( s>=0 ) return s; + break; + case 27 : + int LA37_19 = input.LA(1); + + s = -1; + if ( (LA37_19=='R'||LA37_19=='r') ) {s = 42;} + + else if ( ((LA37_19 >= '\u0000' && LA37_19 <= '\b')||(LA37_19 >= '\u000B' && LA37_19 <= '\f')||(LA37_19 >= '\u000E' && LA37_19 <= '\u001F')||(LA37_19 >= '#' && LA37_19 <= '&')||LA37_19==','||(LA37_19 >= '.' && LA37_19 <= '9')||(LA37_19 >= ';' && LA37_19 <= '>')||(LA37_19 >= '@' && LA37_19 <= 'Q')||(LA37_19 >= 'S' && LA37_19 <= 'Z')||(LA37_19 >= '_' && LA37_19 <= 'q')||(LA37_19 >= 's' && LA37_19 <= 'z')||LA37_19=='|'||(LA37_19 >= '\u007F' && LA37_19 <= '\u2FFF')||(LA37_19 >= '\u3001' && LA37_19 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA37_19=='\\') ) {s = 36;} + + else if ( (LA37_19=='-') ) {s = 37;} + + else if ( (LA37_19=='+') ) {s = 38;} + + else if ( (LA37_19=='*'||LA37_19=='?') ) {s = 27;} + + else s = 34; + + if ( s>=0 ) return s; + break; + case 28 : + int LA37_35 = input.LA(1); + + s = -1; + if ( ((LA37_35 >= '\u0000' && LA37_35 <= '\b')||(LA37_35 >= '\u000B' && LA37_35 <= '\f')||(LA37_35 >= '\u000E' && LA37_35 <= '\u001F')||(LA37_35 >= '#' && LA37_35 <= '&')||LA37_35==','||(LA37_35 >= '.' && LA37_35 <= '9')||(LA37_35 >= ';' && LA37_35 <= '>')||(LA37_35 >= '@' && LA37_35 <= 'Z')||(LA37_35 >= '_' && LA37_35 <= 'z')||LA37_35=='|'||(LA37_35 >= '\u007F' && LA37_35 <= '\u2FFF')||(LA37_35 >= '\u3001' && LA37_35 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA37_35=='\\') ) {s = 36;} + + else if ( (LA37_35=='-') ) {s = 37;} + + else if ( (LA37_35=='+') ) {s = 38;} + + else if ( (LA37_35=='*'||LA37_35=='?') ) {s = 27;} + + else s = 34; + + if ( s>=0 ) return s; + break; + case 29 : + int LA37_25 = input.LA(1); + + s = -1; + if ( ((LA37_25 >= '\u0000' && LA37_25 <= '\uFFFF')) ) {s = 51;} + + if ( s>=0 ) return s; + break; + case 30 : + int LA37_36 = input.LA(1); + + s = -1; + if ( ((LA37_36 >= '\u0000' && LA37_36 <= '\uFFFF')) ) {s = 55;} + + if ( s>=0 ) return s; + break; + case 31 : + int LA37_69 = input.LA(1); + + s = -1; + if ( ((LA37_69 >= '0' && LA37_69 <= '9')) ) {s = 70;} + + else if ( ((LA37_69 >= '\u0000' && LA37_69 <= '\b')||(LA37_69 >= '\u000B' && LA37_69 <= '\f')||(LA37_69 >= '\u000E' && LA37_69 <= '\u001F')||(LA37_69 >= '#' && LA37_69 <= '&')||LA37_69==','||(LA37_69 >= '.' && LA37_69 <= '/')||(LA37_69 >= ';' && LA37_69 <= '>')||(LA37_69 >= '@' && LA37_69 <= 'Z')||(LA37_69 >= '_' && LA37_69 <= 'z')||LA37_69=='|'||(LA37_69 >= '\u007F' && LA37_69 <= '\u2FFF')||(LA37_69 >= '\u3001' && LA37_69 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA37_69=='\\') ) {s = 36;} + + else if ( (LA37_69=='-') ) {s = 37;} + + else if ( (LA37_69=='+') ) {s = 38;} + + else if ( (LA37_69=='*'||LA37_69=='?') ) {s = 27;} + + else s = 34; + + if ( s>=0 ) return s; + break; + case 32 : + int LA37_21 = input.LA(1); + + s = -1; + if ( (LA37_21=='O'||LA37_21=='o') ) {s = 45;} + + else if ( ((LA37_21 >= '\u0000' && LA37_21 <= '\b')||(LA37_21 >= '\u000B' && LA37_21 <= '\f')||(LA37_21 >= '\u000E' && LA37_21 <= '\u001F')||(LA37_21 >= '#' && LA37_21 <= '&')||LA37_21==','||(LA37_21 >= '.' && LA37_21 <= '9')||(LA37_21 >= ';' && LA37_21 <= '>')||(LA37_21 >= '@' && LA37_21 <= 'N')||(LA37_21 >= 'P' && LA37_21 <= 'Z')||(LA37_21 >= '_' && LA37_21 <= 'n')||(LA37_21 >= 'p' && LA37_21 <= 'z')||LA37_21=='|'||(LA37_21 >= '\u007F' && LA37_21 <= '\u2FFF')||(LA37_21 >= '\u3001' && LA37_21 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA37_21=='\\') ) {s = 36;} + + else if ( (LA37_21=='-') ) {s = 37;} + + else if ( (LA37_21=='+') ) {s = 38;} + + else if ( (LA37_21=='*'||LA37_21=='?') ) {s = 27;} + + else s = 34; + + if ( s>=0 ) return s; + break; + case 33 : + int LA37_37 = input.LA(1); + + s = -1; + if ( ((LA37_37 >= '\u0000' && LA37_37 <= '\b')||(LA37_37 >= '\u000B' && LA37_37 <= '\f')||(LA37_37 >= '\u000E' && LA37_37 <= '\u001F')||(LA37_37 >= '#' && LA37_37 <= '&')||LA37_37==','||(LA37_37 >= '.' && LA37_37 <= '9')||(LA37_37 >= ';' && LA37_37 <= '>')||(LA37_37 >= '@' && LA37_37 <= 'Z')||(LA37_37 >= '_' && LA37_37 <= 'z')||LA37_37=='|'||(LA37_37 >= '\u007F' && LA37_37 <= '\u2FFF')||(LA37_37 >= '\u3001' && LA37_37 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA37_37=='\\') ) {s = 36;} + + else if ( (LA37_37=='-') ) {s = 37;} + + else if ( (LA37_37=='+') ) {s = 38;} + + else if ( (LA37_37=='*'||LA37_37=='?') ) {s = 27;} + + else s = 34; + + if ( s>=0 ) return s; + break; + case 34 : + int LA37_38 = input.LA(1); + + s = -1; + if ( ((LA37_38 >= '\u0000' && LA37_38 <= '\b')||(LA37_38 >= '\u000B' && LA37_38 <= '\f')||(LA37_38 >= '\u000E' && LA37_38 <= '\u001F')||(LA37_38 >= '#' && LA37_38 <= '&')||LA37_38==','||(LA37_38 >= '.' && LA37_38 <= '9')||(LA37_38 >= ';' && LA37_38 <= '>')||(LA37_38 >= '@' && LA37_38 <= 'Z')||(LA37_38 >= '_' && LA37_38 <= 'z')||LA37_38=='|'||(LA37_38 >= '\u007F' && LA37_38 <= '\u2FFF')||(LA37_38 >= '\u3001' && LA37_38 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA37_38=='\\') ) {s = 36;} + + else if ( (LA37_38=='-') ) {s = 37;} + + else if ( (LA37_38=='+') ) {s = 38;} + + else if ( (LA37_38=='*'||LA37_38=='?') ) {s = 27;} + + else s = 34; + + if ( s>=0 ) return s; + break; + case 35 : + int LA37_57 = input.LA(1); + + s = -1; + if ( ((LA37_57 >= '\u0000' && LA37_57 <= '\b')||(LA37_57 >= '\u000B' && LA37_57 <= '\f')||(LA37_57 >= '\u000E' && LA37_57 <= '\u001F')||(LA37_57 >= '#' && LA37_57 <= '&')||LA37_57==','||(LA37_57 >= '.' && LA37_57 <= '9')||(LA37_57 >= ';' && LA37_57 <= '>')||(LA37_57 >= '@' && LA37_57 <= 'Z')||(LA37_57 >= '_' && LA37_57 <= 'z')||LA37_57=='|'||(LA37_57 >= '\u007F' && LA37_57 <= '\u2FFF')||(LA37_57 >= '\u3001' && LA37_57 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA37_57=='\\') ) {s = 36;} + + else if ( (LA37_57=='-') ) {s = 37;} + + else if ( (LA37_57=='+') ) {s = 38;} + + else if ( (LA37_57=='*'||LA37_57=='?') ) {s = 27;} + + else s = 62; + + if ( s>=0 ) return s; + break; + case 36 : + int LA37_55 = input.LA(1); + + s = -1; + if ( ((LA37_55 >= '\u0000' && LA37_55 <= '\b')||(LA37_55 >= '\u000B' && LA37_55 <= '\f')||(LA37_55 >= '\u000E' && LA37_55 <= '\u001F')||(LA37_55 >= '#' && LA37_55 <= '&')||LA37_55==','||(LA37_55 >= '.' && LA37_55 <= '9')||(LA37_55 >= ';' && LA37_55 <= '>')||(LA37_55 >= '@' && LA37_55 <= 'Z')||(LA37_55 >= '_' && LA37_55 <= 'z')||LA37_55=='|'||(LA37_55 >= '\u007F' && LA37_55 <= '\u2FFF')||(LA37_55 >= '\u3001' && LA37_55 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA37_55=='\\') ) {s = 36;} + + else if ( (LA37_55=='-') ) {s = 37;} + + else if ( (LA37_55=='+') ) {s = 38;} + + else if ( (LA37_55=='*'||LA37_55=='?') ) {s = 27;} + + else s = 34; + + if ( s>=0 ) return s; + break; + case 37 : + int LA37_8 = input.LA(1); + + s = -1; + if ( ((LA37_8 >= '\u0000' && LA37_8 <= '\b')||(LA37_8 >= '\u000B' && LA37_8 <= '\f')||(LA37_8 >= '\u000E' && LA37_8 <= '\u001F')||(LA37_8 >= '#' && LA37_8 <= '&')||(LA37_8 >= '+' && LA37_8 <= '9')||(LA37_8 >= ';' && LA37_8 <= '>')||(LA37_8 >= '@' && LA37_8 <= 'Z')||LA37_8=='\\'||(LA37_8 >= '_' && LA37_8 <= 'z')||LA37_8=='|'||(LA37_8 >= '\u007F' && LA37_8 <= '\u2FFF')||(LA37_8 >= '\u3001' && LA37_8 <= '\uFFFF')) ) {s = 27;} + + else s = 26; + + if ( s>=0 ) return s; + break; + case 38 : + int LA37_70 = input.LA(1); + + s = -1; + if ( ((LA37_70 >= '0' && LA37_70 <= '9')) ) {s = 72;} + + else if ( ((LA37_70 >= '\u0000' && LA37_70 <= '\b')||(LA37_70 >= '\u000B' && LA37_70 <= '\f')||(LA37_70 >= '\u000E' && LA37_70 <= '\u001F')||(LA37_70 >= '#' && LA37_70 <= '&')||LA37_70==','||(LA37_70 >= '.' && LA37_70 <= '/')||(LA37_70 >= ';' && LA37_70 <= '>')||(LA37_70 >= '@' && LA37_70 <= 'Z')||(LA37_70 >= '_' && LA37_70 <= 'z')||LA37_70=='|'||(LA37_70 >= '\u007F' && LA37_70 <= '\u2FFF')||(LA37_70 >= '\u3001' && LA37_70 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA37_70=='\\') ) {s = 36;} + + else if ( (LA37_70=='-') ) {s = 37;} + + else if ( (LA37_70=='+') ) {s = 38;} + + else if ( (LA37_70=='*'||LA37_70=='?') ) {s = 27;} + + else s = 71; + + if ( s>=0 ) return s; + break; + case 39 : + int LA37_72 = input.LA(1); + + s = -1; + if ( ((LA37_72 >= '0' && LA37_72 <= '9')) ) {s = 73;} + + else if ( ((LA37_72 >= '\u0000' && LA37_72 <= '\b')||(LA37_72 >= '\u000B' && LA37_72 <= '\f')||(LA37_72 >= '\u000E' && LA37_72 <= '\u001F')||(LA37_72 >= '#' && LA37_72 <= '&')||LA37_72==','||(LA37_72 >= '.' && LA37_72 <= '/')||(LA37_72 >= ';' && LA37_72 <= '>')||(LA37_72 >= '@' && LA37_72 <= 'Z')||(LA37_72 >= '_' && LA37_72 <= 'z')||LA37_72=='|'||(LA37_72 >= '\u007F' && LA37_72 <= '\u2FFF')||(LA37_72 >= '\u3001' && LA37_72 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA37_72=='\\') ) {s = 36;} + + else if ( (LA37_72=='-') ) {s = 37;} + + else if ( (LA37_72=='+') ) {s = 38;} + + else if ( (LA37_72=='*'||LA37_72=='?') ) {s = 27;} + + else s = 34; + + if ( s>=0 ) return s; + break; + case 40 : + int LA37_45 = input.LA(1); + + s = -1; + if ( (LA37_45=='T'||LA37_45=='t') ) {s = 57;} + + else if ( ((LA37_45 >= '\u0000' && LA37_45 <= '\b')||(LA37_45 >= '\u000B' && LA37_45 <= '\f')||(LA37_45 >= '\u000E' && LA37_45 <= '\u001F')||(LA37_45 >= '#' && LA37_45 <= '&')||LA37_45==','||(LA37_45 >= '.' && LA37_45 <= '9')||(LA37_45 >= ';' && LA37_45 <= '>')||(LA37_45 >= '@' && LA37_45 <= 'S')||(LA37_45 >= 'U' && LA37_45 <= 'Z')||(LA37_45 >= '_' && LA37_45 <= 's')||(LA37_45 >= 'u' && LA37_45 <= 'z')||LA37_45=='|'||(LA37_45 >= '\u007F' && LA37_45 <= '\u2FFF')||(LA37_45 >= '\u3001' && LA37_45 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA37_45=='\\') ) {s = 36;} + + else if ( (LA37_45=='-') ) {s = 37;} + + else if ( (LA37_45=='+') ) {s = 38;} + + else if ( (LA37_45=='*'||LA37_45=='?') ) {s = 27;} + + else s = 34; + + if ( s>=0 ) return s; + break; + case 41 : + int LA37_46 = input.LA(1); + + s = -1; + if ( ((LA37_46 >= '0' && LA37_46 <= '9')) ) {s = 58;} + + else if ( ((LA37_46 >= '\u0000' && LA37_46 <= '\b')||(LA37_46 >= '\u000B' && LA37_46 <= '\f')||(LA37_46 >= '\u000E' && LA37_46 <= '\u001F')||(LA37_46 >= '#' && LA37_46 <= '&')||LA37_46==','||(LA37_46 >= '.' && LA37_46 <= '/')||(LA37_46 >= ';' && LA37_46 <= '>')||(LA37_46 >= '@' && LA37_46 <= 'Z')||(LA37_46 >= '_' && LA37_46 <= 'z')||LA37_46=='|'||(LA37_46 >= '\u007F' && LA37_46 <= '\u2FFF')||(LA37_46 >= '\u3001' && LA37_46 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA37_46=='\\') ) {s = 36;} + + else if ( (LA37_46=='-') ) {s = 37;} + + else if ( (LA37_46=='+') ) {s = 38;} + + else if ( (LA37_46=='*'||LA37_46=='?') ) {s = 27;} + + else s = 34; + + if ( s>=0 ) return s; + break; + case 42 : + int LA37_59 = input.LA(1); + + s = -1; + if ( (LA37_59=='.') ) {s = 66;} + + else if ( ((LA37_59 >= '0' && LA37_59 <= '9')) ) {s = 59;} + + else if ( ((LA37_59 >= '\u0000' && LA37_59 <= '\b')||(LA37_59 >= '\u000B' && LA37_59 <= '\f')||(LA37_59 >= '\u000E' && LA37_59 <= '\u001F')||(LA37_59 >= '#' && LA37_59 <= '&')||LA37_59==','||LA37_59=='/'||(LA37_59 >= ';' && LA37_59 <= '>')||(LA37_59 >= '@' && LA37_59 <= 'Z')||(LA37_59 >= '_' && LA37_59 <= 'z')||LA37_59=='|'||(LA37_59 >= '\u007F' && LA37_59 <= '\u2FFF')||(LA37_59 >= '\u3001' && LA37_59 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA37_59=='\\') ) {s = 36;} + + else if ( (LA37_59=='-') ) {s = 37;} + + else if ( (LA37_59=='+') ) {s = 38;} + + else if ( (LA37_59=='*'||LA37_59=='?') ) {s = 27;} + + else s = 47; + + if ( s>=0 ) return s; + break; + case 43 : + int LA37_60 = input.LA(1); + + s = -1; + if ( ((LA37_60 >= '0' && LA37_60 <= '9')) ) {s = 67;} + + else if ( ((LA37_60 >= '.' && LA37_60 <= '/')) ) {s = 64;} + + else if ( (LA37_60=='-') ) {s = 65;} + + else if ( ((LA37_60 >= '\u0000' && LA37_60 <= '\b')||(LA37_60 >= '\u000B' && LA37_60 <= '\f')||(LA37_60 >= '\u000E' && LA37_60 <= '\u001F')||(LA37_60 >= '#' && LA37_60 <= '&')||LA37_60==','||(LA37_60 >= ';' && LA37_60 <= '>')||(LA37_60 >= '@' && LA37_60 <= 'Z')||(LA37_60 >= '_' && LA37_60 <= 'z')||LA37_60=='|'||(LA37_60 >= '\u007F' && LA37_60 <= '\u2FFF')||(LA37_60 >= '\u3001' && LA37_60 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA37_60=='\\') ) {s = 36;} + + else if ( (LA37_60=='+') ) {s = 38;} + + else if ( (LA37_60=='*'||LA37_60=='?') ) {s = 27;} + + else s = 34; + + if ( s>=0 ) return s; + break; + case 44 : + int LA37_58 = input.LA(1); + + s = -1; + if ( ((LA37_58 >= '0' && LA37_58 <= '9')) ) {s = 63;} + + else if ( ((LA37_58 >= '.' && LA37_58 <= '/')) ) {s = 64;} + + else if ( (LA37_58=='-') ) {s = 65;} + + else if ( ((LA37_58 >= '\u0000' && LA37_58 <= '\b')||(LA37_58 >= '\u000B' && LA37_58 <= '\f')||(LA37_58 >= '\u000E' && LA37_58 <= '\u001F')||(LA37_58 >= '#' && LA37_58 <= '&')||LA37_58==','||(LA37_58 >= ';' && LA37_58 <= '>')||(LA37_58 >= '@' && LA37_58 <= 'Z')||(LA37_58 >= '_' && LA37_58 <= 'z')||LA37_58=='|'||(LA37_58 >= '\u007F' && LA37_58 <= '\u2FFF')||(LA37_58 >= '\u3001' && LA37_58 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA37_58=='\\') ) {s = 36;} + + else if ( (LA37_58=='+') ) {s = 38;} + + else if ( (LA37_58=='*'||LA37_58=='?') ) {s = 27;} + + else s = 47; + + if ( s>=0 ) return s; + break; + case 45 : + int LA37_51 = input.LA(1); + + s = -1; + if ( ((LA37_51 >= '\u0000' && LA37_51 <= '\b')||(LA37_51 >= '\u000B' && LA37_51 <= '\f')||(LA37_51 >= '\u000E' && LA37_51 <= '\u001F')||(LA37_51 >= '#' && LA37_51 <= '&')||LA37_51==','||(LA37_51 >= '.' && LA37_51 <= '9')||(LA37_51 >= ';' && LA37_51 <= '>')||(LA37_51 >= '@' && LA37_51 <= 'Z')||(LA37_51 >= '_' && LA37_51 <= 'z')||LA37_51=='|'||(LA37_51 >= '\u007F' && LA37_51 <= '\u2FFF')||(LA37_51 >= '\u3001' && LA37_51 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA37_51=='\\') ) {s = 36;} + + else if ( (LA37_51=='-') ) {s = 37;} + + else if ( (LA37_51=='+') ) {s = 38;} + + else if ( (LA37_51=='*'||LA37_51=='?') ) {s = 27;} + + else s = 34; + + if ( s>=0 ) return s; + break; + case 46 : + int LA37_41 = input.LA(1); + + s = -1; + if ( ((LA37_41 >= '\u0000' && LA37_41 <= '\b')||(LA37_41 >= '\u000B' && LA37_41 <= '\f')||(LA37_41 >= '\u000E' && LA37_41 <= '\u001F')||(LA37_41 >= '#' && LA37_41 <= '&')||LA37_41==','||(LA37_41 >= '.' && LA37_41 <= '9')||(LA37_41 >= ';' && LA37_41 <= '>')||(LA37_41 >= '@' && LA37_41 <= 'Z')||(LA37_41 >= '_' && LA37_41 <= 'z')||LA37_41=='|'||(LA37_41 >= '\u007F' && LA37_41 <= '\u2FFF')||(LA37_41 >= '\u3001' && LA37_41 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA37_41=='\\') ) {s = 36;} + + else if ( (LA37_41=='-') ) {s = 37;} + + else if ( (LA37_41=='+') ) {s = 38;} + + else if ( (LA37_41=='*'||LA37_41=='?') ) {s = 27;} + + else s = 40; + + if ( s>=0 ) return s; + break; + } + NoViableAltException nvae = + new NoViableAltException(getDescription(), 37, _s, input); + error(nvae); + throw nvae; + } + + } + + +} \ No newline at end of file Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/parser/ExtendedLuceneGrammarLexer.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/parser/ExtendedLuceneGrammarLexer.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/parser/ExtendedLuceneGrammarLexer.java (revision 0) @@ -0,0 +1,4153 @@ +// $ANTLR 3.4 /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g 2013-06-27 18:55:45 + + package org.apache.lucene.queryparser.flexible.aqp.parser; + + +import org.antlr.runtime.*; +import java.util.Stack; +import java.util.List; +import java.util.ArrayList; + +@SuppressWarnings({"all", "warnings", "unchecked"}) +public class ExtendedLuceneGrammarLexer extends Lexer { + public static final int EOF=-1; + public static final int AMPER=4; + public static final int AND=5; + public static final int ATOM=6; + public static final int BOOST=7; + public static final int CARAT=8; + public static final int CLAUSE=9; + public static final int COLON=10; + public static final int DATE_TOKEN=11; + public static final int DQUOTE=12; + public static final int ESC_CHAR=13; + public static final int FIELD=14; + public static final int FUZZY=15; + public static final int INT=16; + public static final int LBRACK=17; + public static final int LCURLY=18; + public static final int LPAREN=19; + public static final int MINUS=20; + public static final int MODIFIER=21; + public static final int NEAR=22; + public static final int NOT=23; + public static final int NUMBER=24; + public static final int OPERATOR=25; + public static final int OR=26; + public static final int PHRASE=27; + public static final int PHRASE_ANYTHING=28; + public static final int PLUS=29; + public static final int QANYTHING=30; + public static final int QDATE=31; + public static final int QMARK=32; + public static final int QNORMAL=33; + public static final int QPHRASE=34; + public static final int QPHRASETRUNC=35; + public static final int QRANGEEX=36; + public static final int QRANGEIN=37; + public static final int QTRUNCATED=38; + public static final int RBRACK=39; + public static final int RCURLY=40; + public static final int RPAREN=41; + public static final int SQUOTE=42; + public static final int STAR=43; + public static final int TERM_CHAR=44; + public static final int TERM_NORMAL=45; + public static final int TERM_START_CHAR=46; + public static final int TERM_TRUNCATED=47; + public static final int TILDE=48; + public static final int TMODIFIER=49; + public static final int TO=50; + public static final int VBAR=51; + public static final int WS=52; + + public void recover(RecognitionException re) { + // throw unchecked exception + throw new RuntimeException(re); + } + + + // delegates + // delegators + public Lexer[] getDelegates() { + return new Lexer[] {}; + } + + public ExtendedLuceneGrammarLexer() {} + public ExtendedLuceneGrammarLexer(CharStream input) { + this(input, new RecognizerSharedState()); + } + public ExtendedLuceneGrammarLexer(CharStream input, RecognizerSharedState state) { + super(input,state); + } + public String getGrammarFileName() { return "/dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g"; } + + // $ANTLR start "LPAREN" + public final void mLPAREN() throws RecognitionException { + try { + int _type = LPAREN; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:363:9: ( '(' ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:363:11: '(' + { + match('('); + + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "LPAREN" + + // $ANTLR start "RPAREN" + public final void mRPAREN() throws RecognitionException { + try { + int _type = RPAREN; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:365:9: ( ')' ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:365:11: ')' + { + match(')'); + + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "RPAREN" + + // $ANTLR start "LBRACK" + public final void mLBRACK() throws RecognitionException { + try { + int _type = LBRACK; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:367:9: ( '[' ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:367:11: '[' + { + match('['); + + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "LBRACK" + + // $ANTLR start "RBRACK" + public final void mRBRACK() throws RecognitionException { + try { + int _type = RBRACK; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:369:9: ( ']' ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:369:11: ']' + { + match(']'); + + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "RBRACK" + + // $ANTLR start "COLON" + public final void mCOLON() throws RecognitionException { + try { + int _type = COLON; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:371:9: ( ':' ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:371:11: ':' + { + match(':'); + + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "COLON" + + // $ANTLR start "PLUS" + public final void mPLUS() throws RecognitionException { + try { + int _type = PLUS; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:373:7: ( '+' ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:373:9: '+' + { + match('+'); + + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "PLUS" + + // $ANTLR start "MINUS" + public final void mMINUS() throws RecognitionException { + try { + int _type = MINUS; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:375:7: ( ( '-' | '\\!' ) ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g: + { + if ( input.LA(1)=='!'||input.LA(1)=='-' ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "MINUS" + + // $ANTLR start "STAR" + public final void mSTAR() throws RecognitionException { + try { + int _type = STAR; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:377:7: ( '*' ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:377:9: '*' + { + match('*'); + + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "STAR" + + // $ANTLR start "QMARK" + public final void mQMARK() throws RecognitionException { + try { + int _type = QMARK; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:379:8: ( ( '?' )+ ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:379:10: ( '?' )+ + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:379:10: ( '?' )+ + int cnt1=0; + loop1: + do { + int alt1=2; + int LA1_0 = input.LA(1); + + if ( (LA1_0=='?') ) { + alt1=1; + } + + + switch (alt1) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:379:10: '?' + { + match('?'); + + } + break; + + default : + if ( cnt1 >= 1 ) break loop1; + EarlyExitException eee = + new EarlyExitException(1, input); + throw eee; + } + cnt1++; + } while (true); + + + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "QMARK" + + // $ANTLR start "VBAR" + public final void mVBAR() throws RecognitionException { + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:381:16: ( '|' ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:381:18: '|' + { + match('|'); + + } + + + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "VBAR" + + // $ANTLR start "AMPER" + public final void mAMPER() throws RecognitionException { + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:383:16: ( '&' ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:383:18: '&' + { + match('&'); + + } + + + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "AMPER" + + // $ANTLR start "LCURLY" + public final void mLCURLY() throws RecognitionException { + try { + int _type = LCURLY; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:385:9: ( '{' ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:385:11: '{' + { + match('{'); + + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "LCURLY" + + // $ANTLR start "RCURLY" + public final void mRCURLY() throws RecognitionException { + try { + int _type = RCURLY; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:387:9: ( '}' ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:387:11: '}' + { + match('}'); + + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "RCURLY" + + // $ANTLR start "CARAT" + public final void mCARAT() throws RecognitionException { + try { + int _type = CARAT; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:389:7: ( '^' ( ( INT )+ ( '.' ( INT )+ )? )? ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:389:9: '^' ( ( INT )+ ( '.' ( INT )+ )? )? + { + match('^'); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:389:13: ( ( INT )+ ( '.' ( INT )+ )? )? + int alt5=2; + int LA5_0 = input.LA(1); + + if ( ((LA5_0 >= '0' && LA5_0 <= '9')) ) { + alt5=1; + } + switch (alt5) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:389:14: ( INT )+ ( '.' ( INT )+ )? + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:389:14: ( INT )+ + int cnt2=0; + loop2: + do { + int alt2=2; + int LA2_0 = input.LA(1); + + if ( ((LA2_0 >= '0' && LA2_0 <= '9')) ) { + alt2=1; + } + + + switch (alt2) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g: + { + if ( (input.LA(1) >= '0' && input.LA(1) <= '9') ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + } + break; + + default : + if ( cnt2 >= 1 ) break loop2; + EarlyExitException eee = + new EarlyExitException(2, input); + throw eee; + } + cnt2++; + } while (true); + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:389:19: ( '.' ( INT )+ )? + int alt4=2; + int LA4_0 = input.LA(1); + + if ( (LA4_0=='.') ) { + alt4=1; + } + switch (alt4) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:389:20: '.' ( INT )+ + { + match('.'); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:389:24: ( INT )+ + int cnt3=0; + loop3: + do { + int alt3=2; + int LA3_0 = input.LA(1); + + if ( ((LA3_0 >= '0' && LA3_0 <= '9')) ) { + alt3=1; + } + + + switch (alt3) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g: + { + if ( (input.LA(1) >= '0' && input.LA(1) <= '9') ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + } + break; + + default : + if ( cnt3 >= 1 ) break loop3; + EarlyExitException eee = + new EarlyExitException(3, input); + throw eee; + } + cnt3++; + } while (true); + + + } + break; + + } + + + } + break; + + } + + + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "CARAT" + + // $ANTLR start "TILDE" + public final void mTILDE() throws RecognitionException { + try { + int _type = TILDE; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:391:7: ( '~' ( ( INT )+ ( '.' ( INT )+ )? )? ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:391:9: '~' ( ( INT )+ ( '.' ( INT )+ )? )? + { + match('~'); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:391:13: ( ( INT )+ ( '.' ( INT )+ )? )? + int alt9=2; + int LA9_0 = input.LA(1); + + if ( ((LA9_0 >= '0' && LA9_0 <= '9')) ) { + alt9=1; + } + switch (alt9) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:391:14: ( INT )+ ( '.' ( INT )+ )? + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:391:14: ( INT )+ + int cnt6=0; + loop6: + do { + int alt6=2; + int LA6_0 = input.LA(1); + + if ( ((LA6_0 >= '0' && LA6_0 <= '9')) ) { + alt6=1; + } + + + switch (alt6) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g: + { + if ( (input.LA(1) >= '0' && input.LA(1) <= '9') ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + } + break; + + default : + if ( cnt6 >= 1 ) break loop6; + EarlyExitException eee = + new EarlyExitException(6, input); + throw eee; + } + cnt6++; + } while (true); + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:391:19: ( '.' ( INT )+ )? + int alt8=2; + int LA8_0 = input.LA(1); + + if ( (LA8_0=='.') ) { + alt8=1; + } + switch (alt8) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:391:20: '.' ( INT )+ + { + match('.'); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:391:24: ( INT )+ + int cnt7=0; + loop7: + do { + int alt7=2; + int LA7_0 = input.LA(1); + + if ( ((LA7_0 >= '0' && LA7_0 <= '9')) ) { + alt7=1; + } + + + switch (alt7) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g: + { + if ( (input.LA(1) >= '0' && input.LA(1) <= '9') ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + } + break; + + default : + if ( cnt7 >= 1 ) break loop7; + EarlyExitException eee = + new EarlyExitException(7, input); + throw eee; + } + cnt7++; + } while (true); + + + } + break; + + } + + + } + break; + + } + + + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "TILDE" + + // $ANTLR start "DQUOTE" + public final void mDQUOTE() throws RecognitionException { + try { + int _type = DQUOTE; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:394:2: ( '\\\"' ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:394:4: '\\\"' + { + match('\"'); + + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "DQUOTE" + + // $ANTLR start "SQUOTE" + public final void mSQUOTE() throws RecognitionException { + try { + int _type = SQUOTE; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:397:2: ( '\\'' ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:397:4: '\\'' + { + match('\''); + + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "SQUOTE" + + // $ANTLR start "TO" + public final void mTO() throws RecognitionException { + try { + int _type = TO; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:402:4: ( 'TO' ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:402:6: 'TO' + { + match("TO"); + + + + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "TO" + + // $ANTLR start "AND" + public final void mAND() throws RecognitionException { + try { + int _type = AND; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:405:7: ( ( ( 'a' | 'A' ) ( 'n' | 'N' ) ( 'd' | 'D' ) | ( AMPER ( AMPER )? ) ) ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:405:9: ( ( 'a' | 'A' ) ( 'n' | 'N' ) ( 'd' | 'D' ) | ( AMPER ( AMPER )? ) ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:405:9: ( ( 'a' | 'A' ) ( 'n' | 'N' ) ( 'd' | 'D' ) | ( AMPER ( AMPER )? ) ) + int alt11=2; + int LA11_0 = input.LA(1); + + if ( (LA11_0=='A'||LA11_0=='a') ) { + alt11=1; + } + else if ( (LA11_0=='&') ) { + alt11=2; + } + else { + NoViableAltException nvae = + new NoViableAltException("", 11, 0, input); + + throw nvae; + + } + switch (alt11) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:405:10: ( 'a' | 'A' ) ( 'n' | 'N' ) ( 'd' | 'D' ) + { + if ( input.LA(1)=='A'||input.LA(1)=='a' ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + if ( input.LA(1)=='N'||input.LA(1)=='n' ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + if ( input.LA(1)=='D'||input.LA(1)=='d' ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + } + break; + case 2 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:405:48: ( AMPER ( AMPER )? ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:405:48: ( AMPER ( AMPER )? ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:405:49: AMPER ( AMPER )? + { + mAMPER(); + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:405:55: ( AMPER )? + int alt10=2; + int LA10_0 = input.LA(1); + + if ( (LA10_0=='&') ) { + alt10=1; + } + switch (alt10) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g: + { + if ( input.LA(1)=='&' ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + } + break; + + } + + + } + + + } + break; + + } + + + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "AND" + + // $ANTLR start "OR" + public final void mOR() throws RecognitionException { + try { + int _type = OR; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:406:5: ( ( ( 'o' | 'O' ) ( 'r' | 'R' ) | ( VBAR ( VBAR )? ) ) ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:406:7: ( ( 'o' | 'O' ) ( 'r' | 'R' ) | ( VBAR ( VBAR )? ) ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:406:7: ( ( 'o' | 'O' ) ( 'r' | 'R' ) | ( VBAR ( VBAR )? ) ) + int alt13=2; + int LA13_0 = input.LA(1); + + if ( (LA13_0=='O'||LA13_0=='o') ) { + alt13=1; + } + else if ( (LA13_0=='|') ) { + alt13=2; + } + else { + NoViableAltException nvae = + new NoViableAltException("", 13, 0, input); + + throw nvae; + + } + switch (alt13) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:406:8: ( 'o' | 'O' ) ( 'r' | 'R' ) + { + if ( input.LA(1)=='O'||input.LA(1)=='o' ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + if ( input.LA(1)=='R'||input.LA(1)=='r' ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + } + break; + case 2 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:406:34: ( VBAR ( VBAR )? ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:406:34: ( VBAR ( VBAR )? ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:406:35: VBAR ( VBAR )? + { + mVBAR(); + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:406:40: ( VBAR )? + int alt12=2; + int LA12_0 = input.LA(1); + + if ( (LA12_0=='|') ) { + alt12=1; + } + switch (alt12) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g: + { + if ( input.LA(1)=='|' ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + } + break; + + } + + + } + + + } + break; + + } + + + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "OR" + + // $ANTLR start "NOT" + public final void mNOT() throws RecognitionException { + try { + int _type = NOT; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:407:7: ( ( 'n' | 'N' ) ( 'o' | 'O' ) ( 't' | 'T' ) ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:407:9: ( 'n' | 'N' ) ( 'o' | 'O' ) ( 't' | 'T' ) + { + if ( input.LA(1)=='N'||input.LA(1)=='n' ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + if ( input.LA(1)=='O'||input.LA(1)=='o' ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + if ( input.LA(1)=='T'||input.LA(1)=='t' ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "NOT" + + // $ANTLR start "NEAR" + public final void mNEAR() throws RecognitionException { + try { + int _type = NEAR; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:408:7: ( ( 'n' | 'N' ) ( 'e' | 'E' ) ( 'a' | 'A' ) ( 'r' | 'R' ) ( '0' .. '9' )* ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:408:9: ( 'n' | 'N' ) ( 'e' | 'E' ) ( 'a' | 'A' ) ( 'r' | 'R' ) ( '0' .. '9' )* + { + if ( input.LA(1)=='N'||input.LA(1)=='n' ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + if ( input.LA(1)=='E'||input.LA(1)=='e' ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + if ( input.LA(1)=='A'||input.LA(1)=='a' ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + if ( input.LA(1)=='R'||input.LA(1)=='r' ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:408:57: ( '0' .. '9' )* + loop14: + do { + int alt14=2; + int LA14_0 = input.LA(1); + + if ( ((LA14_0 >= '0' && LA14_0 <= '9')) ) { + alt14=1; + } + + + switch (alt14) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g: + { + if ( (input.LA(1) >= '0' && input.LA(1) <= '9') ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + } + break; + + default : + break loop14; + } + } while (true); + + + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "NEAR" + + // $ANTLR start "WS" + public final void mWS() throws RecognitionException { + try { + int _type = WS; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:411:5: ( ( ' ' | '\\t' | '\\r' | '\\n' | '\\u3000' ) ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:411:9: ( ' ' | '\\t' | '\\r' | '\\n' | '\\u3000' ) + { + if ( (input.LA(1) >= '\t' && input.LA(1) <= '\n')||input.LA(1)=='\r'||input.LA(1)==' '||input.LA(1)=='\u3000' ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + _channel=HIDDEN; + + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "WS" + + // $ANTLR start "INT" + public final void mINT() throws RecognitionException { + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:432:13: ( '0' .. '9' ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g: + { + if ( (input.LA(1) >= '0' && input.LA(1) <= '9') ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + } + + + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "INT" + + // $ANTLR start "ESC_CHAR" + public final void mESC_CHAR() throws RecognitionException { + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:435:18: ( '\\\\' . ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:435:21: '\\\\' . + { + match('\\'); + + matchAny(); + + } + + + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "ESC_CHAR" + + // $ANTLR start "TERM_START_CHAR" + public final void mTERM_START_CHAR() throws RecognitionException { + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:439:2: ( (~ ( ' ' | '\\t' | '\\n' | '\\r' | '\\u3000' | '\\'' | '\\\"' | '(' | ')' | '[' | ']' | '{' | '}' | '+' | '-' | '!' | ':' | '~' | '^' | '?' | '*' | '\\\\' ) | ESC_CHAR ) ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:440:2: (~ ( ' ' | '\\t' | '\\n' | '\\r' | '\\u3000' | '\\'' | '\\\"' | '(' | ')' | '[' | ']' | '{' | '}' | '+' | '-' | '!' | ':' | '~' | '^' | '?' | '*' | '\\\\' ) | ESC_CHAR ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:440:2: (~ ( ' ' | '\\t' | '\\n' | '\\r' | '\\u3000' | '\\'' | '\\\"' | '(' | ')' | '[' | ']' | '{' | '}' | '+' | '-' | '!' | ':' | '~' | '^' | '?' | '*' | '\\\\' ) | ESC_CHAR ) + int alt15=2; + int LA15_0 = input.LA(1); + + if ( ((LA15_0 >= '\u0000' && LA15_0 <= '\b')||(LA15_0 >= '\u000B' && LA15_0 <= '\f')||(LA15_0 >= '\u000E' && LA15_0 <= '\u001F')||(LA15_0 >= '#' && LA15_0 <= '&')||LA15_0==','||(LA15_0 >= '.' && LA15_0 <= '9')||(LA15_0 >= ';' && LA15_0 <= '>')||(LA15_0 >= '@' && LA15_0 <= 'Z')||(LA15_0 >= '_' && LA15_0 <= 'z')||LA15_0=='|'||(LA15_0 >= '\u007F' && LA15_0 <= '\u2FFF')||(LA15_0 >= '\u3001' && LA15_0 <= '\uFFFF')) ) { + alt15=1; + } + else if ( (LA15_0=='\\') ) { + alt15=2; + } + else { + NoViableAltException nvae = + new NoViableAltException("", 15, 0, input); + + throw nvae; + + } + switch (alt15) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:440:3: ~ ( ' ' | '\\t' | '\\n' | '\\r' | '\\u3000' | '\\'' | '\\\"' | '(' | ')' | '[' | ']' | '{' | '}' | '+' | '-' | '!' | ':' | '~' | '^' | '?' | '*' | '\\\\' ) + { + if ( (input.LA(1) >= '\u0000' && input.LA(1) <= '\b')||(input.LA(1) >= '\u000B' && input.LA(1) <= '\f')||(input.LA(1) >= '\u000E' && input.LA(1) <= '\u001F')||(input.LA(1) >= '#' && input.LA(1) <= '&')||input.LA(1)==','||(input.LA(1) >= '.' && input.LA(1) <= '9')||(input.LA(1) >= ';' && input.LA(1) <= '>')||(input.LA(1) >= '@' && input.LA(1) <= 'Z')||(input.LA(1) >= '_' && input.LA(1) <= 'z')||input.LA(1)=='|'||(input.LA(1) >= '\u007F' && input.LA(1) <= '\u2FFF')||(input.LA(1) >= '\u3001' && input.LA(1) <= '\uFFFF') ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + } + break; + case 2 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:446:5: ESC_CHAR + { + mESC_CHAR(); + + + } + break; + + } + + + } + + + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "TERM_START_CHAR" + + // $ANTLR start "TERM_CHAR" + public final void mTERM_CHAR() throws RecognitionException { + try { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:450:2: ( ( TERM_START_CHAR | '-' | '+' ) ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:451:2: ( TERM_START_CHAR | '-' | '+' ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:451:2: ( TERM_START_CHAR | '-' | '+' ) + int alt16=3; + int LA16_0 = input.LA(1); + + if ( ((LA16_0 >= '\u0000' && LA16_0 <= '\b')||(LA16_0 >= '\u000B' && LA16_0 <= '\f')||(LA16_0 >= '\u000E' && LA16_0 <= '\u001F')||(LA16_0 >= '#' && LA16_0 <= '&')||LA16_0==','||(LA16_0 >= '.' && LA16_0 <= '9')||(LA16_0 >= ';' && LA16_0 <= '>')||(LA16_0 >= '@' && LA16_0 <= 'Z')||LA16_0=='\\'||(LA16_0 >= '_' && LA16_0 <= 'z')||LA16_0=='|'||(LA16_0 >= '\u007F' && LA16_0 <= '\u2FFF')||(LA16_0 >= '\u3001' && LA16_0 <= '\uFFFF')) ) { + alt16=1; + } + else if ( (LA16_0=='-') ) { + alt16=2; + } + else if ( (LA16_0=='+') ) { + alt16=3; + } + else { + NoViableAltException nvae = + new NoViableAltException("", 16, 0, input); + + throw nvae; + + } + switch (alt16) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:451:3: TERM_START_CHAR + { + mTERM_START_CHAR(); + + + } + break; + case 2 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:451:21: '-' + { + match('-'); + + } + break; + case 3 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:451:27: '+' + { + match('+'); + + } + break; + + } + + + } + + + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "TERM_CHAR" + + // $ANTLR start "NUMBER" + public final void mNUMBER() throws RecognitionException { + try { + int _type = NUMBER; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:456:2: ( ( INT )+ ( '.' ( INT )+ )? ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:457:2: ( INT )+ ( '.' ( INT )+ )? + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:457:2: ( INT )+ + int cnt17=0; + loop17: + do { + int alt17=2; + int LA17_0 = input.LA(1); + + if ( ((LA17_0 >= '0' && LA17_0 <= '9')) ) { + alt17=1; + } + + + switch (alt17) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g: + { + if ( (input.LA(1) >= '0' && input.LA(1) <= '9') ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + } + break; + + default : + if ( cnt17 >= 1 ) break loop17; + EarlyExitException eee = + new EarlyExitException(17, input); + throw eee; + } + cnt17++; + } while (true); + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:457:7: ( '.' ( INT )+ )? + int alt19=2; + int LA19_0 = input.LA(1); + + if ( (LA19_0=='.') ) { + alt19=1; + } + switch (alt19) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:457:8: '.' ( INT )+ + { + match('.'); + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:457:12: ( INT )+ + int cnt18=0; + loop18: + do { + int alt18=2; + int LA18_0 = input.LA(1); + + if ( ((LA18_0 >= '0' && LA18_0 <= '9')) ) { + alt18=1; + } + + + switch (alt18) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g: + { + if ( (input.LA(1) >= '0' && input.LA(1) <= '9') ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + } + break; + + default : + if ( cnt18 >= 1 ) break loop18; + EarlyExitException eee = + new EarlyExitException(18, input); + throw eee; + } + cnt18++; + } while (true); + + + } + break; + + } + + + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "NUMBER" + + // $ANTLR start "DATE_TOKEN" + public final void mDATE_TOKEN() throws RecognitionException { + try { + int _type = DATE_TOKEN; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:461:2: ( INT ( INT )? ( '/' | '-' | '.' ) INT ( INT )? ( '/' | '-' | '.' ) INT INT ( INT INT )? ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:462:2: INT ( INT )? ( '/' | '-' | '.' ) INT ( INT )? ( '/' | '-' | '.' ) INT INT ( INT INT )? + { + mINT(); + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:462:6: ( INT )? + int alt20=2; + int LA20_0 = input.LA(1); + + if ( ((LA20_0 >= '0' && LA20_0 <= '9')) ) { + alt20=1; + } + switch (alt20) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g: + { + if ( (input.LA(1) >= '0' && input.LA(1) <= '9') ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + } + break; + + } + + + if ( (input.LA(1) >= '-' && input.LA(1) <= '/') ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + mINT(); + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:462:29: ( INT )? + int alt21=2; + int LA21_0 = input.LA(1); + + if ( ((LA21_0 >= '0' && LA21_0 <= '9')) ) { + alt21=1; + } + switch (alt21) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g: + { + if ( (input.LA(1) >= '0' && input.LA(1) <= '9') ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + } + break; + + } + + + if ( (input.LA(1) >= '-' && input.LA(1) <= '/') ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + mINT(); + + + mINT(); + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:462:56: ( INT INT )? + int alt22=2; + int LA22_0 = input.LA(1); + + if ( ((LA22_0 >= '0' && LA22_0 <= '9')) ) { + alt22=1; + } + switch (alt22) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:462:57: INT INT + { + mINT(); + + + mINT(); + + + } + break; + + } + + + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "DATE_TOKEN" + + // $ANTLR start "TERM_NORMAL" + public final void mTERM_NORMAL() throws RecognitionException { + try { + int _type = TERM_NORMAL; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:466:2: ( TERM_START_CHAR ( TERM_CHAR )* ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:467:2: TERM_START_CHAR ( TERM_CHAR )* + { + mTERM_START_CHAR(); + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:467:18: ( TERM_CHAR )* + loop23: + do { + int alt23=2; + int LA23_0 = input.LA(1); + + if ( ((LA23_0 >= '\u0000' && LA23_0 <= '\b')||(LA23_0 >= '\u000B' && LA23_0 <= '\f')||(LA23_0 >= '\u000E' && LA23_0 <= '\u001F')||(LA23_0 >= '#' && LA23_0 <= '&')||(LA23_0 >= '+' && LA23_0 <= '9')||(LA23_0 >= ';' && LA23_0 <= '>')||(LA23_0 >= '@' && LA23_0 <= 'Z')||LA23_0=='\\'||(LA23_0 >= '_' && LA23_0 <= 'z')||LA23_0=='|'||(LA23_0 >= '\u007F' && LA23_0 <= '\u2FFF')||(LA23_0 >= '\u3001' && LA23_0 <= '\uFFFF')) ) { + alt23=1; + } + + + switch (alt23) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:467:20: TERM_CHAR + { + mTERM_CHAR(); + + + } + break; + + default : + break loop23; + } + } while (true); + + + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "TERM_NORMAL" + + // $ANTLR start "TERM_TRUNCATED" + public final void mTERM_TRUNCATED() throws RecognitionException { + try { + int _type = TERM_TRUNCATED; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:471:15: ( ( STAR | QMARK ) ( ( TERM_CHAR )+ ( QMARK | STAR ) )+ ( TERM_CHAR )* | TERM_START_CHAR ( ( TERM_CHAR )* ( QMARK | STAR ) )+ ( TERM_CHAR )* | ( STAR | QMARK ) ( TERM_CHAR )+ ) + int alt35=3; + alt35 = dfa35.predict(input); + switch (alt35) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:472:2: ( STAR | QMARK ) ( ( TERM_CHAR )+ ( QMARK | STAR ) )+ ( TERM_CHAR )* + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:472:2: ( STAR | QMARK ) + int alt24=2; + int LA24_0 = input.LA(1); + + if ( (LA24_0=='*') ) { + alt24=1; + } + else if ( (LA24_0=='?') ) { + alt24=2; + } + else { + NoViableAltException nvae = + new NoViableAltException("", 24, 0, input); + + throw nvae; + + } + switch (alt24) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:472:3: STAR + { + mSTAR(); + + + } + break; + case 2 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:472:8: QMARK + { + mQMARK(); + + + } + break; + + } + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:472:15: ( ( TERM_CHAR )+ ( QMARK | STAR ) )+ + int cnt27=0; + loop27: + do { + int alt27=2; + alt27 = dfa27.predict(input); + switch (alt27) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:472:16: ( TERM_CHAR )+ ( QMARK | STAR ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:472:16: ( TERM_CHAR )+ + int cnt25=0; + loop25: + do { + int alt25=2; + int LA25_0 = input.LA(1); + + if ( ((LA25_0 >= '\u0000' && LA25_0 <= '\b')||(LA25_0 >= '\u000B' && LA25_0 <= '\f')||(LA25_0 >= '\u000E' && LA25_0 <= '\u001F')||(LA25_0 >= '#' && LA25_0 <= '&')||(LA25_0 >= '+' && LA25_0 <= '9')||(LA25_0 >= ';' && LA25_0 <= '>')||(LA25_0 >= '@' && LA25_0 <= 'Z')||LA25_0=='\\'||(LA25_0 >= '_' && LA25_0 <= 'z')||LA25_0=='|'||(LA25_0 >= '\u007F' && LA25_0 <= '\u2FFF')||(LA25_0 >= '\u3001' && LA25_0 <= '\uFFFF')) ) { + alt25=1; + } + + + switch (alt25) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:472:16: TERM_CHAR + { + mTERM_CHAR(); + + + } + break; + + default : + if ( cnt25 >= 1 ) break loop25; + EarlyExitException eee = + new EarlyExitException(25, input); + throw eee; + } + cnt25++; + } while (true); + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:472:27: ( QMARK | STAR ) + int alt26=2; + int LA26_0 = input.LA(1); + + if ( (LA26_0=='?') ) { + alt26=1; + } + else if ( (LA26_0=='*') ) { + alt26=2; + } + else { + NoViableAltException nvae = + new NoViableAltException("", 26, 0, input); + + throw nvae; + + } + switch (alt26) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:472:28: QMARK + { + mQMARK(); + + + } + break; + case 2 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:472:34: STAR + { + mSTAR(); + + + } + break; + + } + + + } + break; + + default : + if ( cnt27 >= 1 ) break loop27; + EarlyExitException eee = + new EarlyExitException(27, input); + throw eee; + } + cnt27++; + } while (true); + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:472:42: ( TERM_CHAR )* + loop28: + do { + int alt28=2; + int LA28_0 = input.LA(1); + + if ( ((LA28_0 >= '\u0000' && LA28_0 <= '\b')||(LA28_0 >= '\u000B' && LA28_0 <= '\f')||(LA28_0 >= '\u000E' && LA28_0 <= '\u001F')||(LA28_0 >= '#' && LA28_0 <= '&')||(LA28_0 >= '+' && LA28_0 <= '9')||(LA28_0 >= ';' && LA28_0 <= '>')||(LA28_0 >= '@' && LA28_0 <= 'Z')||LA28_0=='\\'||(LA28_0 >= '_' && LA28_0 <= 'z')||LA28_0=='|'||(LA28_0 >= '\u007F' && LA28_0 <= '\u2FFF')||(LA28_0 >= '\u3001' && LA28_0 <= '\uFFFF')) ) { + alt28=1; + } + + + switch (alt28) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:472:43: TERM_CHAR + { + mTERM_CHAR(); + + + } + break; + + default : + break loop28; + } + } while (true); + + + } + break; + case 2 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:473:4: TERM_START_CHAR ( ( TERM_CHAR )* ( QMARK | STAR ) )+ ( TERM_CHAR )* + { + mTERM_START_CHAR(); + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:473:20: ( ( TERM_CHAR )* ( QMARK | STAR ) )+ + int cnt31=0; + loop31: + do { + int alt31=2; + alt31 = dfa31.predict(input); + switch (alt31) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:473:21: ( TERM_CHAR )* ( QMARK | STAR ) + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:473:21: ( TERM_CHAR )* + loop29: + do { + int alt29=2; + int LA29_0 = input.LA(1); + + if ( ((LA29_0 >= '\u0000' && LA29_0 <= '\b')||(LA29_0 >= '\u000B' && LA29_0 <= '\f')||(LA29_0 >= '\u000E' && LA29_0 <= '\u001F')||(LA29_0 >= '#' && LA29_0 <= '&')||(LA29_0 >= '+' && LA29_0 <= '9')||(LA29_0 >= ';' && LA29_0 <= '>')||(LA29_0 >= '@' && LA29_0 <= 'Z')||LA29_0=='\\'||(LA29_0 >= '_' && LA29_0 <= 'z')||LA29_0=='|'||(LA29_0 >= '\u007F' && LA29_0 <= '\u2FFF')||(LA29_0 >= '\u3001' && LA29_0 <= '\uFFFF')) ) { + alt29=1; + } + + + switch (alt29) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:473:21: TERM_CHAR + { + mTERM_CHAR(); + + + } + break; + + default : + break loop29; + } + } while (true); + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:473:32: ( QMARK | STAR ) + int alt30=2; + int LA30_0 = input.LA(1); + + if ( (LA30_0=='?') ) { + alt30=1; + } + else if ( (LA30_0=='*') ) { + alt30=2; + } + else { + NoViableAltException nvae = + new NoViableAltException("", 30, 0, input); + + throw nvae; + + } + switch (alt30) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:473:33: QMARK + { + mQMARK(); + + + } + break; + case 2 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:473:39: STAR + { + mSTAR(); + + + } + break; + + } + + + } + break; + + default : + if ( cnt31 >= 1 ) break loop31; + EarlyExitException eee = + new EarlyExitException(31, input); + throw eee; + } + cnt31++; + } while (true); + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:473:47: ( TERM_CHAR )* + loop32: + do { + int alt32=2; + int LA32_0 = input.LA(1); + + if ( ((LA32_0 >= '\u0000' && LA32_0 <= '\b')||(LA32_0 >= '\u000B' && LA32_0 <= '\f')||(LA32_0 >= '\u000E' && LA32_0 <= '\u001F')||(LA32_0 >= '#' && LA32_0 <= '&')||(LA32_0 >= '+' && LA32_0 <= '9')||(LA32_0 >= ';' && LA32_0 <= '>')||(LA32_0 >= '@' && LA32_0 <= 'Z')||LA32_0=='\\'||(LA32_0 >= '_' && LA32_0 <= 'z')||LA32_0=='|'||(LA32_0 >= '\u007F' && LA32_0 <= '\u2FFF')||(LA32_0 >= '\u3001' && LA32_0 <= '\uFFFF')) ) { + alt32=1; + } + + + switch (alt32) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:473:48: TERM_CHAR + { + mTERM_CHAR(); + + + } + break; + + default : + break loop32; + } + } while (true); + + + } + break; + case 3 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:474:4: ( STAR | QMARK ) ( TERM_CHAR )+ + { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:474:4: ( STAR | QMARK ) + int alt33=2; + int LA33_0 = input.LA(1); + + if ( (LA33_0=='*') ) { + alt33=1; + } + else if ( (LA33_0=='?') ) { + alt33=2; + } + else { + NoViableAltException nvae = + new NoViableAltException("", 33, 0, input); + + throw nvae; + + } + switch (alt33) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:474:5: STAR + { + mSTAR(); + + + } + break; + case 2 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:474:10: QMARK + { + mQMARK(); + + + } + break; + + } + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:474:17: ( TERM_CHAR )+ + int cnt34=0; + loop34: + do { + int alt34=2; + int LA34_0 = input.LA(1); + + if ( ((LA34_0 >= '\u0000' && LA34_0 <= '\b')||(LA34_0 >= '\u000B' && LA34_0 <= '\f')||(LA34_0 >= '\u000E' && LA34_0 <= '\u001F')||(LA34_0 >= '#' && LA34_0 <= '&')||(LA34_0 >= '+' && LA34_0 <= '9')||(LA34_0 >= ';' && LA34_0 <= '>')||(LA34_0 >= '@' && LA34_0 <= 'Z')||LA34_0=='\\'||(LA34_0 >= '_' && LA34_0 <= 'z')||LA34_0=='|'||(LA34_0 >= '\u007F' && LA34_0 <= '\u2FFF')||(LA34_0 >= '\u3001' && LA34_0 <= '\uFFFF')) ) { + alt34=1; + } + + + switch (alt34) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:474:17: TERM_CHAR + { + mTERM_CHAR(); + + + } + break; + + default : + if ( cnt34 >= 1 ) break loop34; + EarlyExitException eee = + new EarlyExitException(34, input); + throw eee; + } + cnt34++; + } while (true); + + + } + break; + + } + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "TERM_TRUNCATED" + + // $ANTLR start "PHRASE" + public final void mPHRASE() throws RecognitionException { + try { + int _type = PHRASE; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:479:2: ( DQUOTE ( ESC_CHAR |~ ( '\\\"' | '\\\\' | '?' | '*' ) )+ DQUOTE ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:480:2: DQUOTE ( ESC_CHAR |~ ( '\\\"' | '\\\\' | '?' | '*' ) )+ DQUOTE + { + mDQUOTE(); + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:480:9: ( ESC_CHAR |~ ( '\\\"' | '\\\\' | '?' | '*' ) )+ + int cnt36=0; + loop36: + do { + int alt36=3; + int LA36_0 = input.LA(1); + + if ( (LA36_0=='\\') ) { + alt36=1; + } + else if ( ((LA36_0 >= '\u0000' && LA36_0 <= '!')||(LA36_0 >= '#' && LA36_0 <= ')')||(LA36_0 >= '+' && LA36_0 <= '>')||(LA36_0 >= '@' && LA36_0 <= '[')||(LA36_0 >= ']' && LA36_0 <= '\uFFFF')) ) { + alt36=2; + } + + + switch (alt36) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:480:10: ESC_CHAR + { + mESC_CHAR(); + + + } + break; + case 2 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:480:19: ~ ( '\\\"' | '\\\\' | '?' | '*' ) + { + if ( (input.LA(1) >= '\u0000' && input.LA(1) <= '!')||(input.LA(1) >= '#' && input.LA(1) <= ')')||(input.LA(1) >= '+' && input.LA(1) <= '>')||(input.LA(1) >= '@' && input.LA(1) <= '[')||(input.LA(1) >= ']' && input.LA(1) <= '\uFFFF') ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + } + break; + + default : + if ( cnt36 >= 1 ) break loop36; + EarlyExitException eee = + new EarlyExitException(36, input); + throw eee; + } + cnt36++; + } while (true); + + + mDQUOTE(); + + + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "PHRASE" + + // $ANTLR start "PHRASE_ANYTHING" + public final void mPHRASE_ANYTHING() throws RecognitionException { + try { + int _type = PHRASE_ANYTHING; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:483:17: ( DQUOTE ( ESC_CHAR |~ ( '\\\"' | '\\\\' ) )+ DQUOTE ) + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:484:2: DQUOTE ( ESC_CHAR |~ ( '\\\"' | '\\\\' ) )+ DQUOTE + { + mDQUOTE(); + + + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:484:9: ( ESC_CHAR |~ ( '\\\"' | '\\\\' ) )+ + int cnt37=0; + loop37: + do { + int alt37=3; + int LA37_0 = input.LA(1); + + if ( (LA37_0=='\\') ) { + alt37=1; + } + else if ( ((LA37_0 >= '\u0000' && LA37_0 <= '!')||(LA37_0 >= '#' && LA37_0 <= '[')||(LA37_0 >= ']' && LA37_0 <= '\uFFFF')) ) { + alt37=2; + } + + + switch (alt37) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:484:10: ESC_CHAR + { + mESC_CHAR(); + + + } + break; + case 2 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:484:19: ~ ( '\\\"' | '\\\\' ) + { + if ( (input.LA(1) >= '\u0000' && input.LA(1) <= '!')||(input.LA(1) >= '#' && input.LA(1) <= '[')||(input.LA(1) >= ']' && input.LA(1) <= '\uFFFF') ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + + + } + break; + + default : + if ( cnt37 >= 1 ) break loop37; + EarlyExitException eee = + new EarlyExitException(37, input); + throw eee; + } + cnt37++; + } while (true); + + + mDQUOTE(); + + + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "PHRASE_ANYTHING" + + public void mTokens() throws RecognitionException { + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:1:8: ( LPAREN | RPAREN | LBRACK | RBRACK | COLON | PLUS | MINUS | STAR | QMARK | LCURLY | RCURLY | CARAT | TILDE | DQUOTE | SQUOTE | TO | AND | OR | NOT | NEAR | WS | NUMBER | DATE_TOKEN | TERM_NORMAL | TERM_TRUNCATED | PHRASE | PHRASE_ANYTHING ) + int alt38=27; + alt38 = dfa38.predict(input); + switch (alt38) { + case 1 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:1:10: LPAREN + { + mLPAREN(); + + + } + break; + case 2 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:1:17: RPAREN + { + mRPAREN(); + + + } + break; + case 3 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:1:24: LBRACK + { + mLBRACK(); + + + } + break; + case 4 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:1:31: RBRACK + { + mRBRACK(); + + + } + break; + case 5 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:1:38: COLON + { + mCOLON(); + + + } + break; + case 6 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:1:44: PLUS + { + mPLUS(); + + + } + break; + case 7 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:1:49: MINUS + { + mMINUS(); + + + } + break; + case 8 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:1:55: STAR + { + mSTAR(); + + + } + break; + case 9 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:1:60: QMARK + { + mQMARK(); + + + } + break; + case 10 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:1:66: LCURLY + { + mLCURLY(); + + + } + break; + case 11 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:1:73: RCURLY + { + mRCURLY(); + + + } + break; + case 12 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:1:80: CARAT + { + mCARAT(); + + + } + break; + case 13 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:1:86: TILDE + { + mTILDE(); + + + } + break; + case 14 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:1:92: DQUOTE + { + mDQUOTE(); + + + } + break; + case 15 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:1:99: SQUOTE + { + mSQUOTE(); + + + } + break; + case 16 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:1:106: TO + { + mTO(); + + + } + break; + case 17 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:1:109: AND + { + mAND(); + + + } + break; + case 18 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:1:113: OR + { + mOR(); + + + } + break; + case 19 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:1:116: NOT + { + mNOT(); + + + } + break; + case 20 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:1:120: NEAR + { + mNEAR(); + + + } + break; + case 21 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:1:125: WS + { + mWS(); + + + } + break; + case 22 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:1:128: NUMBER + { + mNUMBER(); + + + } + break; + case 23 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:1:135: DATE_TOKEN + { + mDATE_TOKEN(); + + + } + break; + case 24 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:1:146: TERM_NORMAL + { + mTERM_NORMAL(); + + + } + break; + case 25 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:1:158: TERM_TRUNCATED + { + mTERM_TRUNCATED(); + + + } + break; + case 26 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:1:173: PHRASE + { + mPHRASE(); + + + } + break; + case 27 : + // /dvt/workspace/lucene_4x/lucene/queryparser/grammars/ExtendedLuceneGrammar.g:1:180: PHRASE_ANYTHING + { + mPHRASE_ANYTHING(); + + + } + break; + + } + + } + + + protected DFA35 dfa35 = new DFA35(this); + protected DFA27 dfa27 = new DFA27(this); + protected DFA31 dfa31 = new DFA31(this); + protected DFA38 dfa38 = new DFA38(this); + static final String DFA35_eotS = + "\4\uffff\1\10\1\uffff\2\10\2\uffff\1\10"; + static final String DFA35_eofS = + "\13\uffff"; + static final String DFA35_minS = + "\3\0\1\uffff\4\0\2\uffff\1\0"; + static final String DFA35_maxS = + "\3\uffff\1\uffff\4\uffff\2\uffff\1\uffff"; + static final String DFA35_acceptS = + "\3\uffff\1\2\4\uffff\1\3\1\1\1\uffff"; + static final String DFA35_specialS = + "\1\2\1\3\1\4\1\uffff\1\7\1\0\1\5\1\1\2\uffff\1\6}>"; + static final String[] DFA35_transitionS = { + "\11\3\2\uffff\2\3\1\uffff\22\3\3\uffff\4\3\3\uffff\1\1\1\uffff"+ + "\1\3\1\uffff\14\3\1\uffff\4\3\1\2\33\3\1\uffff\1\3\2\uffff\34"+ + "\3\1\uffff\1\3\2\uffff\u2f81\3\1\uffff\ucfff\3", + "\11\4\2\uffff\2\4\1\uffff\22\4\3\uffff\4\4\4\uffff\1\7\1\4"+ + "\1\6\14\4\1\uffff\4\4\1\uffff\33\4\1\uffff\1\5\2\uffff\34\4"+ + "\1\uffff\1\4\2\uffff\u2f81\4\1\uffff\ucfff\4", + "\11\4\2\uffff\2\4\1\uffff\22\4\3\uffff\4\4\4\uffff\1\7\1\4"+ + "\1\6\14\4\1\uffff\4\4\1\2\33\4\1\uffff\1\5\2\uffff\34\4\1\uffff"+ + "\1\4\2\uffff\u2f81\4\1\uffff\ucfff\4", + "", + "\11\4\2\uffff\2\4\1\uffff\22\4\3\uffff\4\4\3\uffff\1\11\1\7"+ + "\1\4\1\6\14\4\1\uffff\4\4\1\11\33\4\1\uffff\1\5\2\uffff\34\4"+ + "\1\uffff\1\4\2\uffff\u2f81\4\1\uffff\ucfff\4", + "\0\12", + "\11\4\2\uffff\2\4\1\uffff\22\4\3\uffff\4\4\3\uffff\1\11\1\7"+ + "\1\4\1\6\14\4\1\uffff\4\4\1\11\33\4\1\uffff\1\5\2\uffff\34\4"+ + "\1\uffff\1\4\2\uffff\u2f81\4\1\uffff\ucfff\4", + "\11\4\2\uffff\2\4\1\uffff\22\4\3\uffff\4\4\3\uffff\1\11\1\7"+ + "\1\4\1\6\14\4\1\uffff\4\4\1\11\33\4\1\uffff\1\5\2\uffff\34\4"+ + "\1\uffff\1\4\2\uffff\u2f81\4\1\uffff\ucfff\4", + "", + "", + "\11\4\2\uffff\2\4\1\uffff\22\4\3\uffff\4\4\3\uffff\1\11\1\7"+ + "\1\4\1\6\14\4\1\uffff\4\4\1\11\33\4\1\uffff\1\5\2\uffff\34\4"+ + "\1\uffff\1\4\2\uffff\u2f81\4\1\uffff\ucfff\4" + }; + + static final short[] DFA35_eot = DFA.unpackEncodedString(DFA35_eotS); + static final short[] DFA35_eof = DFA.unpackEncodedString(DFA35_eofS); + static final char[] DFA35_min = DFA.unpackEncodedStringToUnsignedChars(DFA35_minS); + static final char[] DFA35_max = DFA.unpackEncodedStringToUnsignedChars(DFA35_maxS); + static final short[] DFA35_accept = DFA.unpackEncodedString(DFA35_acceptS); + static final short[] DFA35_special = DFA.unpackEncodedString(DFA35_specialS); + static final short[][] DFA35_transition; + + static { + int numStates = DFA35_transitionS.length; + DFA35_transition = new short[numStates][]; + for (int i=0; i= '\u0000' && LA35_5 <= '\uFFFF')) ) {s = 10;} + + if ( s>=0 ) return s; + break; + case 1 : + int LA35_7 = input.LA(1); + + s = -1; + if ( (LA35_7=='*'||LA35_7=='?') ) {s = 9;} + + else if ( ((LA35_7 >= '\u0000' && LA35_7 <= '\b')||(LA35_7 >= '\u000B' && LA35_7 <= '\f')||(LA35_7 >= '\u000E' && LA35_7 <= '\u001F')||(LA35_7 >= '#' && LA35_7 <= '&')||LA35_7==','||(LA35_7 >= '.' && LA35_7 <= '9')||(LA35_7 >= ';' && LA35_7 <= '>')||(LA35_7 >= '@' && LA35_7 <= 'Z')||(LA35_7 >= '_' && LA35_7 <= 'z')||LA35_7=='|'||(LA35_7 >= '\u007F' && LA35_7 <= '\u2FFF')||(LA35_7 >= '\u3001' && LA35_7 <= '\uFFFF')) ) {s = 4;} + + else if ( (LA35_7=='\\') ) {s = 5;} + + else if ( (LA35_7=='-') ) {s = 6;} + + else if ( (LA35_7=='+') ) {s = 7;} + + else s = 8; + + if ( s>=0 ) return s; + break; + case 2 : + int LA35_0 = input.LA(1); + + s = -1; + if ( (LA35_0=='*') ) {s = 1;} + + else if ( (LA35_0=='?') ) {s = 2;} + + else if ( ((LA35_0 >= '\u0000' && LA35_0 <= '\b')||(LA35_0 >= '\u000B' && LA35_0 <= '\f')||(LA35_0 >= '\u000E' && LA35_0 <= '\u001F')||(LA35_0 >= '#' && LA35_0 <= '&')||LA35_0==','||(LA35_0 >= '.' && LA35_0 <= '9')||(LA35_0 >= ';' && LA35_0 <= '>')||(LA35_0 >= '@' && LA35_0 <= 'Z')||LA35_0=='\\'||(LA35_0 >= '_' && LA35_0 <= 'z')||LA35_0=='|'||(LA35_0 >= '\u007F' && LA35_0 <= '\u2FFF')||(LA35_0 >= '\u3001' && LA35_0 <= '\uFFFF')) ) {s = 3;} + + if ( s>=0 ) return s; + break; + case 3 : + int LA35_1 = input.LA(1); + + s = -1; + if ( ((LA35_1 >= '\u0000' && LA35_1 <= '\b')||(LA35_1 >= '\u000B' && LA35_1 <= '\f')||(LA35_1 >= '\u000E' && LA35_1 <= '\u001F')||(LA35_1 >= '#' && LA35_1 <= '&')||LA35_1==','||(LA35_1 >= '.' && LA35_1 <= '9')||(LA35_1 >= ';' && LA35_1 <= '>')||(LA35_1 >= '@' && LA35_1 <= 'Z')||(LA35_1 >= '_' && LA35_1 <= 'z')||LA35_1=='|'||(LA35_1 >= '\u007F' && LA35_1 <= '\u2FFF')||(LA35_1 >= '\u3001' && LA35_1 <= '\uFFFF')) ) {s = 4;} + + else if ( (LA35_1=='\\') ) {s = 5;} + + else if ( (LA35_1=='-') ) {s = 6;} + + else if ( (LA35_1=='+') ) {s = 7;} + + if ( s>=0 ) return s; + break; + case 4 : + int LA35_2 = input.LA(1); + + s = -1; + if ( ((LA35_2 >= '\u0000' && LA35_2 <= '\b')||(LA35_2 >= '\u000B' && LA35_2 <= '\f')||(LA35_2 >= '\u000E' && LA35_2 <= '\u001F')||(LA35_2 >= '#' && LA35_2 <= '&')||LA35_2==','||(LA35_2 >= '.' && LA35_2 <= '9')||(LA35_2 >= ';' && LA35_2 <= '>')||(LA35_2 >= '@' && LA35_2 <= 'Z')||(LA35_2 >= '_' && LA35_2 <= 'z')||LA35_2=='|'||(LA35_2 >= '\u007F' && LA35_2 <= '\u2FFF')||(LA35_2 >= '\u3001' && LA35_2 <= '\uFFFF')) ) {s = 4;} + + else if ( (LA35_2=='\\') ) {s = 5;} + + else if ( (LA35_2=='-') ) {s = 6;} + + else if ( (LA35_2=='+') ) {s = 7;} + + else if ( (LA35_2=='?') ) {s = 2;} + + if ( s>=0 ) return s; + break; + case 5 : + int LA35_6 = input.LA(1); + + s = -1; + if ( (LA35_6=='*'||LA35_6=='?') ) {s = 9;} + + else if ( ((LA35_6 >= '\u0000' && LA35_6 <= '\b')||(LA35_6 >= '\u000B' && LA35_6 <= '\f')||(LA35_6 >= '\u000E' && LA35_6 <= '\u001F')||(LA35_6 >= '#' && LA35_6 <= '&')||LA35_6==','||(LA35_6 >= '.' && LA35_6 <= '9')||(LA35_6 >= ';' && LA35_6 <= '>')||(LA35_6 >= '@' && LA35_6 <= 'Z')||(LA35_6 >= '_' && LA35_6 <= 'z')||LA35_6=='|'||(LA35_6 >= '\u007F' && LA35_6 <= '\u2FFF')||(LA35_6 >= '\u3001' && LA35_6 <= '\uFFFF')) ) {s = 4;} + + else if ( (LA35_6=='\\') ) {s = 5;} + + else if ( (LA35_6=='-') ) {s = 6;} + + else if ( (LA35_6=='+') ) {s = 7;} + + else s = 8; + + if ( s>=0 ) return s; + break; + case 6 : + int LA35_10 = input.LA(1); + + s = -1; + if ( (LA35_10=='*'||LA35_10=='?') ) {s = 9;} + + else if ( ((LA35_10 >= '\u0000' && LA35_10 <= '\b')||(LA35_10 >= '\u000B' && LA35_10 <= '\f')||(LA35_10 >= '\u000E' && LA35_10 <= '\u001F')||(LA35_10 >= '#' && LA35_10 <= '&')||LA35_10==','||(LA35_10 >= '.' && LA35_10 <= '9')||(LA35_10 >= ';' && LA35_10 <= '>')||(LA35_10 >= '@' && LA35_10 <= 'Z')||(LA35_10 >= '_' && LA35_10 <= 'z')||LA35_10=='|'||(LA35_10 >= '\u007F' && LA35_10 <= '\u2FFF')||(LA35_10 >= '\u3001' && LA35_10 <= '\uFFFF')) ) {s = 4;} + + else if ( (LA35_10=='\\') ) {s = 5;} + + else if ( (LA35_10=='-') ) {s = 6;} + + else if ( (LA35_10=='+') ) {s = 7;} + + else s = 8; + + if ( s>=0 ) return s; + break; + case 7 : + int LA35_4 = input.LA(1); + + s = -1; + if ( (LA35_4=='*'||LA35_4=='?') ) {s = 9;} + + else if ( ((LA35_4 >= '\u0000' && LA35_4 <= '\b')||(LA35_4 >= '\u000B' && LA35_4 <= '\f')||(LA35_4 >= '\u000E' && LA35_4 <= '\u001F')||(LA35_4 >= '#' && LA35_4 <= '&')||LA35_4==','||(LA35_4 >= '.' && LA35_4 <= '9')||(LA35_4 >= ';' && LA35_4 <= '>')||(LA35_4 >= '@' && LA35_4 <= 'Z')||(LA35_4 >= '_' && LA35_4 <= 'z')||LA35_4=='|'||(LA35_4 >= '\u007F' && LA35_4 <= '\u2FFF')||(LA35_4 >= '\u3001' && LA35_4 <= '\uFFFF')) ) {s = 4;} + + else if ( (LA35_4=='\\') ) {s = 5;} + + else if ( (LA35_4=='-') ) {s = 6;} + + else if ( (LA35_4=='+') ) {s = 7;} + + else s = 8; + + if ( s>=0 ) return s; + break; + } + NoViableAltException nvae = + new NoViableAltException(getDescription(), 35, _s, input); + error(nvae); + throw nvae; + } + + } + static final String DFA27_eotS = + "\2\5\1\uffff\2\5\2\uffff\1\5"; + static final String DFA27_eofS = + "\10\uffff"; + static final String DFA27_minS = + "\5\0\2\uffff\1\0"; + static final String DFA27_maxS = + "\5\uffff\2\uffff\1\uffff"; + static final String DFA27_acceptS = + "\5\uffff\1\2\1\1\1\uffff"; + static final String DFA27_specialS = + "\1\3\1\5\1\0\1\2\1\1\2\uffff\1\4}>"; + static final String[] DFA27_transitionS = { + "\11\1\2\uffff\2\1\1\uffff\22\1\3\uffff\4\1\4\uffff\1\4\1\1\1"+ + "\3\14\1\1\uffff\4\1\1\uffff\33\1\1\uffff\1\2\2\uffff\34\1\1"+ + "\uffff\1\1\2\uffff\u2f81\1\1\uffff\ucfff\1", + "\11\1\2\uffff\2\1\1\uffff\22\1\3\uffff\4\1\3\uffff\1\6\1\4"+ + "\1\1\1\3\14\1\1\uffff\4\1\1\6\33\1\1\uffff\1\2\2\uffff\34\1"+ + "\1\uffff\1\1\2\uffff\u2f81\1\1\uffff\ucfff\1", + "\0\7", + "\11\1\2\uffff\2\1\1\uffff\22\1\3\uffff\4\1\3\uffff\1\6\1\4"+ + "\1\1\1\3\14\1\1\uffff\4\1\1\6\33\1\1\uffff\1\2\2\uffff\34\1"+ + "\1\uffff\1\1\2\uffff\u2f81\1\1\uffff\ucfff\1", + "\11\1\2\uffff\2\1\1\uffff\22\1\3\uffff\4\1\3\uffff\1\6\1\4"+ + "\1\1\1\3\14\1\1\uffff\4\1\1\6\33\1\1\uffff\1\2\2\uffff\34\1"+ + "\1\uffff\1\1\2\uffff\u2f81\1\1\uffff\ucfff\1", + "", + "", + "\11\1\2\uffff\2\1\1\uffff\22\1\3\uffff\4\1\3\uffff\1\6\1\4"+ + "\1\1\1\3\14\1\1\uffff\4\1\1\6\33\1\1\uffff\1\2\2\uffff\34\1"+ + "\1\uffff\1\1\2\uffff\u2f81\1\1\uffff\ucfff\1" + }; + + static final short[] DFA27_eot = DFA.unpackEncodedString(DFA27_eotS); + static final short[] DFA27_eof = DFA.unpackEncodedString(DFA27_eofS); + static final char[] DFA27_min = DFA.unpackEncodedStringToUnsignedChars(DFA27_minS); + static final char[] DFA27_max = DFA.unpackEncodedStringToUnsignedChars(DFA27_maxS); + static final short[] DFA27_accept = DFA.unpackEncodedString(DFA27_acceptS); + static final short[] DFA27_special = DFA.unpackEncodedString(DFA27_specialS); + static final short[][] DFA27_transition; + + static { + int numStates = DFA27_transitionS.length; + DFA27_transition = new short[numStates][]; + for (int i=0; i= '\u0000' && LA27_2 <= '\uFFFF')) ) {s = 7;} + + if ( s>=0 ) return s; + break; + case 1 : + int LA27_4 = input.LA(1); + + s = -1; + if ( ((LA27_4 >= '\u0000' && LA27_4 <= '\b')||(LA27_4 >= '\u000B' && LA27_4 <= '\f')||(LA27_4 >= '\u000E' && LA27_4 <= '\u001F')||(LA27_4 >= '#' && LA27_4 <= '&')||LA27_4==','||(LA27_4 >= '.' && LA27_4 <= '9')||(LA27_4 >= ';' && LA27_4 <= '>')||(LA27_4 >= '@' && LA27_4 <= 'Z')||(LA27_4 >= '_' && LA27_4 <= 'z')||LA27_4=='|'||(LA27_4 >= '\u007F' && LA27_4 <= '\u2FFF')||(LA27_4 >= '\u3001' && LA27_4 <= '\uFFFF')) ) {s = 1;} + + else if ( (LA27_4=='\\') ) {s = 2;} + + else if ( (LA27_4=='-') ) {s = 3;} + + else if ( (LA27_4=='+') ) {s = 4;} + + else if ( (LA27_4=='*'||LA27_4=='?') ) {s = 6;} + + else s = 5; + + if ( s>=0 ) return s; + break; + case 2 : + int LA27_3 = input.LA(1); + + s = -1; + if ( ((LA27_3 >= '\u0000' && LA27_3 <= '\b')||(LA27_3 >= '\u000B' && LA27_3 <= '\f')||(LA27_3 >= '\u000E' && LA27_3 <= '\u001F')||(LA27_3 >= '#' && LA27_3 <= '&')||LA27_3==','||(LA27_3 >= '.' && LA27_3 <= '9')||(LA27_3 >= ';' && LA27_3 <= '>')||(LA27_3 >= '@' && LA27_3 <= 'Z')||(LA27_3 >= '_' && LA27_3 <= 'z')||LA27_3=='|'||(LA27_3 >= '\u007F' && LA27_3 <= '\u2FFF')||(LA27_3 >= '\u3001' && LA27_3 <= '\uFFFF')) ) {s = 1;} + + else if ( (LA27_3=='\\') ) {s = 2;} + + else if ( (LA27_3=='-') ) {s = 3;} + + else if ( (LA27_3=='+') ) {s = 4;} + + else if ( (LA27_3=='*'||LA27_3=='?') ) {s = 6;} + + else s = 5; + + if ( s>=0 ) return s; + break; + case 3 : + int LA27_0 = input.LA(1); + + s = -1; + if ( ((LA27_0 >= '\u0000' && LA27_0 <= '\b')||(LA27_0 >= '\u000B' && LA27_0 <= '\f')||(LA27_0 >= '\u000E' && LA27_0 <= '\u001F')||(LA27_0 >= '#' && LA27_0 <= '&')||LA27_0==','||(LA27_0 >= '.' && LA27_0 <= '9')||(LA27_0 >= ';' && LA27_0 <= '>')||(LA27_0 >= '@' && LA27_0 <= 'Z')||(LA27_0 >= '_' && LA27_0 <= 'z')||LA27_0=='|'||(LA27_0 >= '\u007F' && LA27_0 <= '\u2FFF')||(LA27_0 >= '\u3001' && LA27_0 <= '\uFFFF')) ) {s = 1;} + + else if ( (LA27_0=='\\') ) {s = 2;} + + else if ( (LA27_0=='-') ) {s = 3;} + + else if ( (LA27_0=='+') ) {s = 4;} + + else s = 5; + + if ( s>=0 ) return s; + break; + case 4 : + int LA27_7 = input.LA(1); + + s = -1; + if ( ((LA27_7 >= '\u0000' && LA27_7 <= '\b')||(LA27_7 >= '\u000B' && LA27_7 <= '\f')||(LA27_7 >= '\u000E' && LA27_7 <= '\u001F')||(LA27_7 >= '#' && LA27_7 <= '&')||LA27_7==','||(LA27_7 >= '.' && LA27_7 <= '9')||(LA27_7 >= ';' && LA27_7 <= '>')||(LA27_7 >= '@' && LA27_7 <= 'Z')||(LA27_7 >= '_' && LA27_7 <= 'z')||LA27_7=='|'||(LA27_7 >= '\u007F' && LA27_7 <= '\u2FFF')||(LA27_7 >= '\u3001' && LA27_7 <= '\uFFFF')) ) {s = 1;} + + else if ( (LA27_7=='\\') ) {s = 2;} + + else if ( (LA27_7=='-') ) {s = 3;} + + else if ( (LA27_7=='+') ) {s = 4;} + + else if ( (LA27_7=='*'||LA27_7=='?') ) {s = 6;} + + else s = 5; + + if ( s>=0 ) return s; + break; + case 5 : + int LA27_1 = input.LA(1); + + s = -1; + if ( ((LA27_1 >= '\u0000' && LA27_1 <= '\b')||(LA27_1 >= '\u000B' && LA27_1 <= '\f')||(LA27_1 >= '\u000E' && LA27_1 <= '\u001F')||(LA27_1 >= '#' && LA27_1 <= '&')||LA27_1==','||(LA27_1 >= '.' && LA27_1 <= '9')||(LA27_1 >= ';' && LA27_1 <= '>')||(LA27_1 >= '@' && LA27_1 <= 'Z')||(LA27_1 >= '_' && LA27_1 <= 'z')||LA27_1=='|'||(LA27_1 >= '\u007F' && LA27_1 <= '\u2FFF')||(LA27_1 >= '\u3001' && LA27_1 <= '\uFFFF')) ) {s = 1;} + + else if ( (LA27_1=='\\') ) {s = 2;} + + else if ( (LA27_1=='-') ) {s = 3;} + + else if ( (LA27_1=='+') ) {s = 4;} + + else if ( (LA27_1=='*'||LA27_1=='?') ) {s = 6;} + + else s = 5; + + if ( s>=0 ) return s; + break; + } + NoViableAltException nvae = + new NoViableAltException(getDescription(), 27, _s, input); + error(nvae); + throw nvae; + } + + } + static final String DFA31_eotS = + "\2\5\1\uffff\2\5\2\uffff\1\5"; + static final String DFA31_eofS = + "\10\uffff"; + static final String DFA31_minS = + "\5\0\2\uffff\1\0"; + static final String DFA31_maxS = + "\5\uffff\2\uffff\1\uffff"; + static final String DFA31_acceptS = + "\5\uffff\1\2\1\1\1\uffff"; + static final String DFA31_specialS = + "\1\5\1\3\1\0\1\2\1\4\2\uffff\1\1}>"; + static final String[] DFA31_transitionS = { + "\11\1\2\uffff\2\1\1\uffff\22\1\3\uffff\4\1\3\uffff\1\6\1\4\1"+ + "\1\1\3\14\1\1\uffff\4\1\1\6\33\1\1\uffff\1\2\2\uffff\34\1\1"+ + "\uffff\1\1\2\uffff\u2f81\1\1\uffff\ucfff\1", + "\11\1\2\uffff\2\1\1\uffff\22\1\3\uffff\4\1\3\uffff\1\6\1\4"+ + "\1\1\1\3\14\1\1\uffff\4\1\1\6\33\1\1\uffff\1\2\2\uffff\34\1"+ + "\1\uffff\1\1\2\uffff\u2f81\1\1\uffff\ucfff\1", + "\0\7", + "\11\1\2\uffff\2\1\1\uffff\22\1\3\uffff\4\1\3\uffff\1\6\1\4"+ + "\1\1\1\3\14\1\1\uffff\4\1\1\6\33\1\1\uffff\1\2\2\uffff\34\1"+ + "\1\uffff\1\1\2\uffff\u2f81\1\1\uffff\ucfff\1", + "\11\1\2\uffff\2\1\1\uffff\22\1\3\uffff\4\1\3\uffff\1\6\1\4"+ + "\1\1\1\3\14\1\1\uffff\4\1\1\6\33\1\1\uffff\1\2\2\uffff\34\1"+ + "\1\uffff\1\1\2\uffff\u2f81\1\1\uffff\ucfff\1", + "", + "", + "\11\1\2\uffff\2\1\1\uffff\22\1\3\uffff\4\1\3\uffff\1\6\1\4"+ + "\1\1\1\3\14\1\1\uffff\4\1\1\6\33\1\1\uffff\1\2\2\uffff\34\1"+ + "\1\uffff\1\1\2\uffff\u2f81\1\1\uffff\ucfff\1" + }; + + static final short[] DFA31_eot = DFA.unpackEncodedString(DFA31_eotS); + static final short[] DFA31_eof = DFA.unpackEncodedString(DFA31_eofS); + static final char[] DFA31_min = DFA.unpackEncodedStringToUnsignedChars(DFA31_minS); + static final char[] DFA31_max = DFA.unpackEncodedStringToUnsignedChars(DFA31_maxS); + static final short[] DFA31_accept = DFA.unpackEncodedString(DFA31_acceptS); + static final short[] DFA31_special = DFA.unpackEncodedString(DFA31_specialS); + static final short[][] DFA31_transition; + + static { + int numStates = DFA31_transitionS.length; + DFA31_transition = new short[numStates][]; + for (int i=0; i= '\u0000' && LA31_2 <= '\uFFFF')) ) {s = 7;} + + if ( s>=0 ) return s; + break; + case 1 : + int LA31_7 = input.LA(1); + + s = -1; + if ( ((LA31_7 >= '\u0000' && LA31_7 <= '\b')||(LA31_7 >= '\u000B' && LA31_7 <= '\f')||(LA31_7 >= '\u000E' && LA31_7 <= '\u001F')||(LA31_7 >= '#' && LA31_7 <= '&')||LA31_7==','||(LA31_7 >= '.' && LA31_7 <= '9')||(LA31_7 >= ';' && LA31_7 <= '>')||(LA31_7 >= '@' && LA31_7 <= 'Z')||(LA31_7 >= '_' && LA31_7 <= 'z')||LA31_7=='|'||(LA31_7 >= '\u007F' && LA31_7 <= '\u2FFF')||(LA31_7 >= '\u3001' && LA31_7 <= '\uFFFF')) ) {s = 1;} + + else if ( (LA31_7=='\\') ) {s = 2;} + + else if ( (LA31_7=='-') ) {s = 3;} + + else if ( (LA31_7=='+') ) {s = 4;} + + else if ( (LA31_7=='*'||LA31_7=='?') ) {s = 6;} + + else s = 5; + + if ( s>=0 ) return s; + break; + case 2 : + int LA31_3 = input.LA(1); + + s = -1; + if ( ((LA31_3 >= '\u0000' && LA31_3 <= '\b')||(LA31_3 >= '\u000B' && LA31_3 <= '\f')||(LA31_3 >= '\u000E' && LA31_3 <= '\u001F')||(LA31_3 >= '#' && LA31_3 <= '&')||LA31_3==','||(LA31_3 >= '.' && LA31_3 <= '9')||(LA31_3 >= ';' && LA31_3 <= '>')||(LA31_3 >= '@' && LA31_3 <= 'Z')||(LA31_3 >= '_' && LA31_3 <= 'z')||LA31_3=='|'||(LA31_3 >= '\u007F' && LA31_3 <= '\u2FFF')||(LA31_3 >= '\u3001' && LA31_3 <= '\uFFFF')) ) {s = 1;} + + else if ( (LA31_3=='\\') ) {s = 2;} + + else if ( (LA31_3=='-') ) {s = 3;} + + else if ( (LA31_3=='+') ) {s = 4;} + + else if ( (LA31_3=='*'||LA31_3=='?') ) {s = 6;} + + else s = 5; + + if ( s>=0 ) return s; + break; + case 3 : + int LA31_1 = input.LA(1); + + s = -1; + if ( ((LA31_1 >= '\u0000' && LA31_1 <= '\b')||(LA31_1 >= '\u000B' && LA31_1 <= '\f')||(LA31_1 >= '\u000E' && LA31_1 <= '\u001F')||(LA31_1 >= '#' && LA31_1 <= '&')||LA31_1==','||(LA31_1 >= '.' && LA31_1 <= '9')||(LA31_1 >= ';' && LA31_1 <= '>')||(LA31_1 >= '@' && LA31_1 <= 'Z')||(LA31_1 >= '_' && LA31_1 <= 'z')||LA31_1=='|'||(LA31_1 >= '\u007F' && LA31_1 <= '\u2FFF')||(LA31_1 >= '\u3001' && LA31_1 <= '\uFFFF')) ) {s = 1;} + + else if ( (LA31_1=='\\') ) {s = 2;} + + else if ( (LA31_1=='-') ) {s = 3;} + + else if ( (LA31_1=='+') ) {s = 4;} + + else if ( (LA31_1=='*'||LA31_1=='?') ) {s = 6;} + + else s = 5; + + if ( s>=0 ) return s; + break; + case 4 : + int LA31_4 = input.LA(1); + + s = -1; + if ( ((LA31_4 >= '\u0000' && LA31_4 <= '\b')||(LA31_4 >= '\u000B' && LA31_4 <= '\f')||(LA31_4 >= '\u000E' && LA31_4 <= '\u001F')||(LA31_4 >= '#' && LA31_4 <= '&')||LA31_4==','||(LA31_4 >= '.' && LA31_4 <= '9')||(LA31_4 >= ';' && LA31_4 <= '>')||(LA31_4 >= '@' && LA31_4 <= 'Z')||(LA31_4 >= '_' && LA31_4 <= 'z')||LA31_4=='|'||(LA31_4 >= '\u007F' && LA31_4 <= '\u2FFF')||(LA31_4 >= '\u3001' && LA31_4 <= '\uFFFF')) ) {s = 1;} + + else if ( (LA31_4=='\\') ) {s = 2;} + + else if ( (LA31_4=='-') ) {s = 3;} + + else if ( (LA31_4=='+') ) {s = 4;} + + else if ( (LA31_4=='*'||LA31_4=='?') ) {s = 6;} + + else s = 5; + + if ( s>=0 ) return s; + break; + case 5 : + int LA31_0 = input.LA(1); + + s = -1; + if ( ((LA31_0 >= '\u0000' && LA31_0 <= '\b')||(LA31_0 >= '\u000B' && LA31_0 <= '\f')||(LA31_0 >= '\u000E' && LA31_0 <= '\u001F')||(LA31_0 >= '#' && LA31_0 <= '&')||LA31_0==','||(LA31_0 >= '.' && LA31_0 <= '9')||(LA31_0 >= ';' && LA31_0 <= '>')||(LA31_0 >= '@' && LA31_0 <= 'Z')||(LA31_0 >= '_' && LA31_0 <= 'z')||LA31_0=='|'||(LA31_0 >= '\u007F' && LA31_0 <= '\u2FFF')||(LA31_0 >= '\u3001' && LA31_0 <= '\uFFFF')) ) {s = 1;} + + else if ( (LA31_0=='\\') ) {s = 2;} + + else if ( (LA31_0=='-') ) {s = 3;} + + else if ( (LA31_0=='+') ) {s = 4;} + + else if ( (LA31_0=='*'||LA31_0=='?') ) {s = 6;} + + else s = 5; + + if ( s>=0 ) return s; + break; + } + NoViableAltException nvae = + new NoViableAltException(getDescription(), 31, _s, input); + error(nvae); + throw nvae; + } + + } + static final String DFA38_eotS = + "\10\uffff\1\32\1\34\4\uffff\1\35\1\uffff\2\42\1\50\1\42\1\53\1\42"+ + "\1\uffff\1\60\1\42\10\uffff\1\67\1\uffff\1\42\1\uffff\3\42\1\uffff"+ + "\1\50\1\53\1\uffff\1\53\3\42\1\uffff\1\60\3\42\3\uffff\1\42\1\50"+ + "\1\100\1\42\2\60\1\42\2\uffff\1\110\1\60\4\42\1\110\1\uffff\1\60"+ + "\1\42\1\114\1\uffff\1\42\1\114"; + static final String DFA38_eofS = + "\117\uffff"; + static final String DFA38_minS = + "\1\0\7\uffff\2\0\4\uffff\1\0\1\uffff\6\0\1\uffff\3\0\4\uffff\2\0"+ + "\1\uffff\1\0\1\uffff\5\0\1\uffff\2\0\1\uffff\4\0\1\uffff\5\0\2\uffff"+ + "\7\0\2\uffff\7\0\1\uffff\3\0\1\uffff\2\0"; + static final String DFA38_maxS = + "\1\uffff\7\uffff\2\uffff\4\uffff\1\uffff\1\uffff\6\uffff\1\uffff"+ + "\3\uffff\4\uffff\2\uffff\1\uffff\1\uffff\1\uffff\5\uffff\1\uffff"+ + "\2\uffff\1\uffff\4\uffff\1\uffff\5\uffff\2\uffff\7\uffff\2\uffff"+ + "\7\uffff\1\uffff\3\uffff\1\uffff\2\uffff"; + static final String DFA38_acceptS = + "\1\uffff\1\1\1\2\1\3\1\4\1\5\1\6\1\7\2\uffff\1\12\1\13\1\14\1\15"+ + "\1\uffff\1\17\6\uffff\1\25\3\uffff\1\10\1\31\1\11\1\16\2\uffff\1"+ + "\33\1\uffff\1\30\5\uffff\1\21\2\uffff\1\22\4\uffff\1\26\5\uffff"+ + "\1\32\1\20\7\uffff\1\32\1\23\7\uffff\1\24\3\uffff\1\27\2\uffff"; + static final String DFA38_specialS = + "\1\32\7\uffff\1\62\1\43\4\uffff\1\4\1\uffff\1\35\1\25\1\31\1\15"+ + "\1\44\1\36\1\uffff\1\41\1\45\1\52\4\uffff\1\46\1\11\1\uffff\1\34"+ + "\1\uffff\1\21\1\53\1\14\1\16\1\51\1\uffff\1\37\1\60\1\uffff\1\10"+ + "\1\2\1\27\1\6\1\uffff\1\54\1\55\1\61\1\26\1\3\2\uffff\1\50\1\1\1"+ + "\17\1\30\1\42\1\23\1\0\2\uffff\1\22\1\40\1\47\1\57\1\33\1\7\1\13"+ + "\1\uffff\1\5\1\20\1\12\1\uffff\1\24\1\56}>"; + static final String[] DFA38_transitionS = { + "\11\30\2\26\2\30\1\26\22\30\1\26\1\7\1\16\3\30\1\22\1\17\1\1"+ + "\1\2\1\10\1\6\1\30\1\7\2\30\12\27\1\5\4\30\1\11\1\30\1\21\14"+ + "\30\1\25\1\23\4\30\1\20\6\30\1\3\1\31\1\4\1\14\2\30\1\21\14"+ + "\30\1\25\1\23\13\30\1\12\1\24\1\13\1\15\u2f81\30\1\26\ucfff"+ + "\30", + "", + "", + "", + "", + "", + "", + "", + "\11\33\2\uffff\2\33\1\uffff\22\33\3\uffff\4\33\4\uffff\17\33"+ + "\1\uffff\4\33\1\uffff\33\33\1\uffff\1\33\2\uffff\34\33\1\uffff"+ + "\1\33\2\uffff\u2f81\33\1\uffff\ucfff\33", + "\11\33\2\uffff\2\33\1\uffff\22\33\3\uffff\4\33\4\uffff\17\33"+ + "\1\uffff\4\33\1\11\33\33\1\uffff\1\33\2\uffff\34\33\1\uffff"+ + "\1\33\2\uffff\u2f81\33\1\uffff\ucfff\33", + "", + "", + "", + "", + "\42\37\1\uffff\7\37\1\40\24\37\1\40\34\37\1\36\uffa3\37", + "", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\14\43\1\uffff\4\43\1\33\17\43\1\41\13\43\1\uffff"+ + "\1\44\2\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff"+ + "\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\14\43\1\uffff\4\43\1\33\16\43\1\47\14\43\1\uffff"+ + "\1\44\2\uffff\17\43\1\47\14\43\1\uffff\1\43\2\uffff\u2f81\43"+ + "\1\uffff\ucfff\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\3\43\1\51\3\uffff"+ + "\1\33\1\46\1\43\1\45\14\43\1\uffff\4\43\1\33\33\43\1\uffff\1"+ + "\44\2\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff"+ + "\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\14\43\1\uffff\4\43\1\33\22\43\1\52\10\43\1\uffff"+ + "\1\44\2\uffff\23\43\1\52\10\43\1\uffff\1\43\2\uffff\u2f81\43"+ + "\1\uffff\ucfff\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\14\43\1\uffff\4\43\1\33\33\43\1\uffff\1\44\2"+ + "\uffff\34\43\1\uffff\1\54\2\uffff\u2f81\43\1\uffff\ucfff\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\14\43\1\uffff\4\43\1\33\5\43\1\56\11\43\1\55"+ + "\13\43\1\uffff\1\44\2\uffff\6\43\1\56\11\43\1\55\13\43\1\uffff"+ + "\1\43\2\uffff\u2f81\43\1\uffff\ucfff\43", + "", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\63\1\57\1\62\12\61\1\uffff\4\43\1\33\33\43\1\uffff"+ + "\1\44\2\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff"+ + "\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\14\43\1\uffff\4\43\1\33\33\43\1\uffff\1\44\2"+ + "\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff\43", + "\0\64", + "", + "", + "", + "", + "\0\65", + "\42\37\1\66\7\37\1\40\24\37\1\40\34\37\1\36\uffa3\37", + "", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\14\43\1\uffff\4\43\1\33\33\43\1\uffff\1\44\2"+ + "\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff\43", + "", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\14\43\1\uffff\4\43\1\33\33\43\1\uffff\1\44\2"+ + "\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff\43", + "\0\70", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\14\43\1\uffff\4\43\1\33\33\43\1\uffff\1\44\2"+ + "\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\14\43\1\uffff\4\43\1\33\33\43\1\uffff\1\44\2"+ + "\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\14\43\1\uffff\4\43\1\33\4\43\1\71\26\43\1\uffff"+ + "\1\44\2\uffff\5\43\1\71\26\43\1\uffff\1\43\2\uffff\u2f81\43"+ + "\1\uffff\ucfff\43", + "", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\14\43\1\uffff\4\43\1\33\33\43\1\uffff\1\44\2"+ + "\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\14\43\1\uffff\4\43\1\33\33\43\1\uffff\1\44\2"+ + "\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff\43", + "", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\14\43\1\uffff\4\43\1\33\33\43\1\uffff\1\44\2"+ + "\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\14\43\1\uffff\4\43\1\33\24\43\1\72\6\43\1\uffff"+ + "\1\44\2\uffff\25\43\1\72\6\43\1\uffff\1\43\2\uffff\u2f81\43"+ + "\1\uffff\ucfff\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\14\43\1\uffff\4\43\1\33\1\43\1\73\31\43\1\uffff"+ + "\1\44\2\uffff\2\43\1\73\31\43\1\uffff\1\43\2\uffff\u2f81\43"+ + "\1\uffff\ucfff\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\2\43\12\74\1\uffff\4\43\1\33\33\43\1\uffff\1"+ + "\44\2\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff"+ + "\43", + "", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\63\1\57\1\62\12\75\1\uffff\4\43\1\33\33\43\1\uffff"+ + "\1\44\2\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff"+ + "\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\2\43\12\76\1\uffff\4\43\1\33\33\43\1\uffff\1"+ + "\44\2\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff"+ + "\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\2\43\12\76\1\uffff\4\43\1\33\33\43\1\uffff\1"+ + "\44\2\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff"+ + "\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\14\43\1\uffff\4\43\1\33\33\43\1\uffff\1\44\2"+ + "\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff\43", + "\42\37\1\66\7\37\1\40\24\37\1\40\34\37\1\36\uffa3\37", + "", + "", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\14\43\1\uffff\4\43\1\33\33\43\1\uffff\1\44\2"+ + "\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\14\43\1\uffff\4\43\1\33\33\43\1\uffff\1\44\2"+ + "\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\14\43\1\uffff\4\43\1\33\33\43\1\uffff\1\44\2"+ + "\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\14\43\1\uffff\4\43\1\33\22\43\1\101\10\43\1"+ + "\uffff\1\44\2\uffff\23\43\1\101\10\43\1\uffff\1\43\2\uffff\u2f81"+ + "\43\1\uffff\ucfff\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\104\2\103\12\102\1\uffff\4\43\1\33\33\43\1\uffff"+ + "\1\44\2\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff"+ + "\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\1\105\1\43\12\75\1\uffff\4\43\1\33\33\43\1\uffff"+ + "\1\44\2\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff"+ + "\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\104\2\103\12\106\1\uffff\4\43\1\33\33\43\1\uffff"+ + "\1\44\2\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff"+ + "\43", + "", + "", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\2\43\12\107\1\uffff\4\43\1\33\33\43\1\uffff"+ + "\1\44\2\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff"+ + "\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\104\2\103\12\111\1\uffff\4\43\1\33\33\43\1\uffff"+ + "\1\44\2\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff"+ + "\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\2\43\12\112\1\uffff\4\43\1\33\33\43\1\uffff"+ + "\1\44\2\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff"+ + "\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\2\43\12\112\1\uffff\4\43\1\33\33\43\1\uffff"+ + "\1\44\2\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff"+ + "\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\2\43\12\111\1\uffff\4\43\1\33\33\43\1\uffff"+ + "\1\44\2\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff"+ + "\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\104\2\103\12\43\1\uffff\4\43\1\33\33\43\1\uffff"+ + "\1\44\2\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff"+ + "\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\2\43\12\107\1\uffff\4\43\1\33\33\43\1\uffff"+ + "\1\44\2\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff"+ + "\43", + "", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\2\43\12\111\1\uffff\4\43\1\33\33\43\1\uffff"+ + "\1\44\2\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff"+ + "\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\2\43\12\113\1\uffff\4\43\1\33\33\43\1\uffff"+ + "\1\44\2\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff"+ + "\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\2\43\12\115\1\uffff\4\43\1\33\33\43\1\uffff"+ + "\1\44\2\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff"+ + "\43", + "", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\2\43\12\116\1\uffff\4\43\1\33\33\43\1\uffff"+ + "\1\44\2\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff"+ + "\43", + "\11\43\2\uffff\2\43\1\uffff\22\43\3\uffff\4\43\3\uffff\1\33"+ + "\1\46\1\43\1\45\14\43\1\uffff\4\43\1\33\33\43\1\uffff\1\44\2"+ + "\uffff\34\43\1\uffff\1\43\2\uffff\u2f81\43\1\uffff\ucfff\43" + }; + + static final short[] DFA38_eot = DFA.unpackEncodedString(DFA38_eotS); + static final short[] DFA38_eof = DFA.unpackEncodedString(DFA38_eofS); + static final char[] DFA38_min = DFA.unpackEncodedStringToUnsignedChars(DFA38_minS); + static final char[] DFA38_max = DFA.unpackEncodedStringToUnsignedChars(DFA38_maxS); + static final short[] DFA38_accept = DFA.unpackEncodedString(DFA38_acceptS); + static final short[] DFA38_special = DFA.unpackEncodedString(DFA38_specialS); + static final short[][] DFA38_transition; + + static { + int numStates = DFA38_transitionS.length; + DFA38_transition = new short[numStates][]; + for (int i=0; i= '0' && LA38_62 <= '9')) ) {s = 70;} + + else if ( ((LA38_62 >= '.' && LA38_62 <= '/')) ) {s = 67;} + + else if ( (LA38_62=='-') ) {s = 68;} + + else if ( ((LA38_62 >= '\u0000' && LA38_62 <= '\b')||(LA38_62 >= '\u000B' && LA38_62 <= '\f')||(LA38_62 >= '\u000E' && LA38_62 <= '\u001F')||(LA38_62 >= '#' && LA38_62 <= '&')||LA38_62==','||(LA38_62 >= ';' && LA38_62 <= '>')||(LA38_62 >= '@' && LA38_62 <= 'Z')||(LA38_62 >= '_' && LA38_62 <= 'z')||LA38_62=='|'||(LA38_62 >= '\u007F' && LA38_62 <= '\u2FFF')||(LA38_62 >= '\u3001' && LA38_62 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA38_62=='\\') ) {s = 36;} + + else if ( (LA38_62=='+') ) {s = 38;} + + else if ( (LA38_62=='*'||LA38_62=='?') ) {s = 27;} + + else s = 34; + + if ( s>=0 ) return s; + break; + case 1 : + int LA38_57 = input.LA(1); + + s = -1; + if ( ((LA38_57 >= '\u0000' && LA38_57 <= '\b')||(LA38_57 >= '\u000B' && LA38_57 <= '\f')||(LA38_57 >= '\u000E' && LA38_57 <= '\u001F')||(LA38_57 >= '#' && LA38_57 <= '&')||LA38_57==','||(LA38_57 >= '.' && LA38_57 <= '9')||(LA38_57 >= ';' && LA38_57 <= '>')||(LA38_57 >= '@' && LA38_57 <= 'Z')||(LA38_57 >= '_' && LA38_57 <= 'z')||LA38_57=='|'||(LA38_57 >= '\u007F' && LA38_57 <= '\u2FFF')||(LA38_57 >= '\u3001' && LA38_57 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA38_57=='\\') ) {s = 36;} + + else if ( (LA38_57=='-') ) {s = 37;} + + else if ( (LA38_57=='+') ) {s = 38;} + + else if ( (LA38_57=='*'||LA38_57=='?') ) {s = 27;} + + else s = 40; + + if ( s>=0 ) return s; + break; + case 2 : + int LA38_45 = input.LA(1); + + s = -1; + if ( (LA38_45=='T'||LA38_45=='t') ) {s = 58;} + + else if ( ((LA38_45 >= '\u0000' && LA38_45 <= '\b')||(LA38_45 >= '\u000B' && LA38_45 <= '\f')||(LA38_45 >= '\u000E' && LA38_45 <= '\u001F')||(LA38_45 >= '#' && LA38_45 <= '&')||LA38_45==','||(LA38_45 >= '.' && LA38_45 <= '9')||(LA38_45 >= ';' && LA38_45 <= '>')||(LA38_45 >= '@' && LA38_45 <= 'S')||(LA38_45 >= 'U' && LA38_45 <= 'Z')||(LA38_45 >= '_' && LA38_45 <= 's')||(LA38_45 >= 'u' && LA38_45 <= 'z')||LA38_45=='|'||(LA38_45 >= '\u007F' && LA38_45 <= '\u2FFF')||(LA38_45 >= '\u3001' && LA38_45 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA38_45=='\\') ) {s = 36;} + + else if ( (LA38_45=='-') ) {s = 37;} + + else if ( (LA38_45=='+') ) {s = 38;} + + else if ( (LA38_45=='*'||LA38_45=='?') ) {s = 27;} + + else s = 34; + + if ( s>=0 ) return s; + break; + case 3 : + int LA38_53 = input.LA(1); + + s = -1; + if ( (LA38_53=='\"') ) {s = 54;} + + else if ( (LA38_53=='\\') ) {s = 30;} + + else if ( ((LA38_53 >= '\u0000' && LA38_53 <= '!')||(LA38_53 >= '#' && LA38_53 <= ')')||(LA38_53 >= '+' && LA38_53 <= '>')||(LA38_53 >= '@' && LA38_53 <= '[')||(LA38_53 >= ']' && LA38_53 <= '\uFFFF')) ) {s = 31;} + + else if ( (LA38_53=='*'||LA38_53=='?') ) {s = 32;} + + if ( s>=0 ) return s; + break; + case 4 : + int LA38_14 = input.LA(1); + + s = -1; + if ( (LA38_14=='\\') ) {s = 30;} + + else if ( ((LA38_14 >= '\u0000' && LA38_14 <= '!')||(LA38_14 >= '#' && LA38_14 <= ')')||(LA38_14 >= '+' && LA38_14 <= '>')||(LA38_14 >= '@' && LA38_14 <= '[')||(LA38_14 >= ']' && LA38_14 <= '\uFFFF')) ) {s = 31;} + + else if ( (LA38_14=='*'||LA38_14=='?') ) {s = 32;} + + else s = 29; + + if ( s>=0 ) return s; + break; + case 5 : + int LA38_73 = input.LA(1); + + s = -1; + if ( ((LA38_73 >= '0' && LA38_73 <= '9')) ) {s = 73;} + + else if ( ((LA38_73 >= '\u0000' && LA38_73 <= '\b')||(LA38_73 >= '\u000B' && LA38_73 <= '\f')||(LA38_73 >= '\u000E' && LA38_73 <= '\u001F')||(LA38_73 >= '#' && LA38_73 <= '&')||LA38_73==','||(LA38_73 >= '.' && LA38_73 <= '/')||(LA38_73 >= ';' && LA38_73 <= '>')||(LA38_73 >= '@' && LA38_73 <= 'Z')||(LA38_73 >= '_' && LA38_73 <= 'z')||LA38_73=='|'||(LA38_73 >= '\u007F' && LA38_73 <= '\u2FFF')||(LA38_73 >= '\u3001' && LA38_73 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA38_73=='\\') ) {s = 36;} + + else if ( (LA38_73=='-') ) {s = 37;} + + else if ( (LA38_73=='+') ) {s = 38;} + + else if ( (LA38_73=='*'||LA38_73=='?') ) {s = 27;} + + else s = 48; + + if ( s>=0 ) return s; + break; + case 6 : + int LA38_47 = input.LA(1); + + s = -1; + if ( ((LA38_47 >= '0' && LA38_47 <= '9')) ) {s = 60;} + + else if ( ((LA38_47 >= '\u0000' && LA38_47 <= '\b')||(LA38_47 >= '\u000B' && LA38_47 <= '\f')||(LA38_47 >= '\u000E' && LA38_47 <= '\u001F')||(LA38_47 >= '#' && LA38_47 <= '&')||LA38_47==','||(LA38_47 >= '.' && LA38_47 <= '/')||(LA38_47 >= ';' && LA38_47 <= '>')||(LA38_47 >= '@' && LA38_47 <= 'Z')||(LA38_47 >= '_' && LA38_47 <= 'z')||LA38_47=='|'||(LA38_47 >= '\u007F' && LA38_47 <= '\u2FFF')||(LA38_47 >= '\u3001' && LA38_47 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA38_47=='\\') ) {s = 36;} + + else if ( (LA38_47=='-') ) {s = 37;} + + else if ( (LA38_47=='+') ) {s = 38;} + + else if ( (LA38_47=='*'||LA38_47=='?') ) {s = 27;} + + else s = 34; + + if ( s>=0 ) return s; + break; + case 7 : + int LA38_70 = input.LA(1); + + s = -1; + if ( ((LA38_70 >= '.' && LA38_70 <= '/')) ) {s = 67;} + + else if ( (LA38_70=='-') ) {s = 68;} + + else if ( ((LA38_70 >= '\u0000' && LA38_70 <= '\b')||(LA38_70 >= '\u000B' && LA38_70 <= '\f')||(LA38_70 >= '\u000E' && LA38_70 <= '\u001F')||(LA38_70 >= '#' && LA38_70 <= '&')||LA38_70==','||(LA38_70 >= '0' && LA38_70 <= '9')||(LA38_70 >= ';' && LA38_70 <= '>')||(LA38_70 >= '@' && LA38_70 <= 'Z')||(LA38_70 >= '_' && LA38_70 <= 'z')||LA38_70=='|'||(LA38_70 >= '\u007F' && LA38_70 <= '\u2FFF')||(LA38_70 >= '\u3001' && LA38_70 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA38_70=='\\') ) {s = 36;} + + else if ( (LA38_70=='+') ) {s = 38;} + + else if ( (LA38_70=='*'||LA38_70=='?') ) {s = 27;} + + else s = 34; + + if ( s>=0 ) return s; + break; + case 8 : + int LA38_44 = input.LA(1); + + s = -1; + if ( ((LA38_44 >= '\u0000' && LA38_44 <= '\b')||(LA38_44 >= '\u000B' && LA38_44 <= '\f')||(LA38_44 >= '\u000E' && LA38_44 <= '\u001F')||(LA38_44 >= '#' && LA38_44 <= '&')||LA38_44==','||(LA38_44 >= '.' && LA38_44 <= '9')||(LA38_44 >= ';' && LA38_44 <= '>')||(LA38_44 >= '@' && LA38_44 <= 'Z')||(LA38_44 >= '_' && LA38_44 <= 'z')||LA38_44=='|'||(LA38_44 >= '\u007F' && LA38_44 <= '\u2FFF')||(LA38_44 >= '\u3001' && LA38_44 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA38_44=='\\') ) {s = 36;} + + else if ( (LA38_44=='-') ) {s = 37;} + + else if ( (LA38_44=='+') ) {s = 38;} + + else if ( (LA38_44=='*'||LA38_44=='?') ) {s = 27;} + + else s = 43; + + if ( s>=0 ) return s; + break; + case 9 : + int LA38_31 = input.LA(1); + + s = -1; + if ( (LA38_31=='\"') ) {s = 54;} + + else if ( (LA38_31=='\\') ) {s = 30;} + + else if ( ((LA38_31 >= '\u0000' && LA38_31 <= '!')||(LA38_31 >= '#' && LA38_31 <= ')')||(LA38_31 >= '+' && LA38_31 <= '>')||(LA38_31 >= '@' && LA38_31 <= '[')||(LA38_31 >= ']' && LA38_31 <= '\uFFFF')) ) {s = 31;} + + else if ( (LA38_31=='*'||LA38_31=='?') ) {s = 32;} + + if ( s>=0 ) return s; + break; + case 10 : + int LA38_75 = input.LA(1); + + s = -1; + if ( ((LA38_75 >= '0' && LA38_75 <= '9')) ) {s = 77;} + + else if ( ((LA38_75 >= '\u0000' && LA38_75 <= '\b')||(LA38_75 >= '\u000B' && LA38_75 <= '\f')||(LA38_75 >= '\u000E' && LA38_75 <= '\u001F')||(LA38_75 >= '#' && LA38_75 <= '&')||LA38_75==','||(LA38_75 >= '.' && LA38_75 <= '/')||(LA38_75 >= ';' && LA38_75 <= '>')||(LA38_75 >= '@' && LA38_75 <= 'Z')||(LA38_75 >= '_' && LA38_75 <= 'z')||LA38_75=='|'||(LA38_75 >= '\u007F' && LA38_75 <= '\u2FFF')||(LA38_75 >= '\u3001' && LA38_75 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA38_75=='\\') ) {s = 36;} + + else if ( (LA38_75=='-') ) {s = 37;} + + else if ( (LA38_75=='+') ) {s = 38;} + + else if ( (LA38_75=='*'||LA38_75=='?') ) {s = 27;} + + else s = 76; + + if ( s>=0 ) return s; + break; + case 11 : + int LA38_71 = input.LA(1); + + s = -1; + if ( ((LA38_71 >= '0' && LA38_71 <= '9')) ) {s = 71;} + + else if ( ((LA38_71 >= '\u0000' && LA38_71 <= '\b')||(LA38_71 >= '\u000B' && LA38_71 <= '\f')||(LA38_71 >= '\u000E' && LA38_71 <= '\u001F')||(LA38_71 >= '#' && LA38_71 <= '&')||LA38_71==','||(LA38_71 >= '.' && LA38_71 <= '/')||(LA38_71 >= ';' && LA38_71 <= '>')||(LA38_71 >= '@' && LA38_71 <= 'Z')||(LA38_71 >= '_' && LA38_71 <= 'z')||LA38_71=='|'||(LA38_71 >= '\u007F' && LA38_71 <= '\u2FFF')||(LA38_71 >= '\u3001' && LA38_71 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA38_71=='\\') ) {s = 36;} + + else if ( (LA38_71=='-') ) {s = 37;} + + else if ( (LA38_71=='+') ) {s = 38;} + + else if ( (LA38_71=='*'||LA38_71=='?') ) {s = 27;} + + else s = 72; + + if ( s>=0 ) return s; + break; + case 12 : + int LA38_37 = input.LA(1); + + s = -1; + if ( ((LA38_37 >= '\u0000' && LA38_37 <= '\b')||(LA38_37 >= '\u000B' && LA38_37 <= '\f')||(LA38_37 >= '\u000E' && LA38_37 <= '\u001F')||(LA38_37 >= '#' && LA38_37 <= '&')||LA38_37==','||(LA38_37 >= '.' && LA38_37 <= '9')||(LA38_37 >= ';' && LA38_37 <= '>')||(LA38_37 >= '@' && LA38_37 <= 'Z')||(LA38_37 >= '_' && LA38_37 <= 'z')||LA38_37=='|'||(LA38_37 >= '\u007F' && LA38_37 <= '\u2FFF')||(LA38_37 >= '\u3001' && LA38_37 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA38_37=='\\') ) {s = 36;} + + else if ( (LA38_37=='-') ) {s = 37;} + + else if ( (LA38_37=='+') ) {s = 38;} + + else if ( (LA38_37=='*'||LA38_37=='?') ) {s = 27;} + + else s = 34; + + if ( s>=0 ) return s; + break; + case 13 : + int LA38_19 = input.LA(1); + + s = -1; + if ( (LA38_19=='R'||LA38_19=='r') ) {s = 42;} + + else if ( ((LA38_19 >= '\u0000' && LA38_19 <= '\b')||(LA38_19 >= '\u000B' && LA38_19 <= '\f')||(LA38_19 >= '\u000E' && LA38_19 <= '\u001F')||(LA38_19 >= '#' && LA38_19 <= '&')||LA38_19==','||(LA38_19 >= '.' && LA38_19 <= '9')||(LA38_19 >= ';' && LA38_19 <= '>')||(LA38_19 >= '@' && LA38_19 <= 'Q')||(LA38_19 >= 'S' && LA38_19 <= 'Z')||(LA38_19 >= '_' && LA38_19 <= 'q')||(LA38_19 >= 's' && LA38_19 <= 'z')||LA38_19=='|'||(LA38_19 >= '\u007F' && LA38_19 <= '\u2FFF')||(LA38_19 >= '\u3001' && LA38_19 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA38_19=='\\') ) {s = 36;} + + else if ( (LA38_19=='-') ) {s = 37;} + + else if ( (LA38_19=='+') ) {s = 38;} + + else if ( (LA38_19=='*'||LA38_19=='?') ) {s = 27;} + + else s = 34; + + if ( s>=0 ) return s; + break; + case 14 : + int LA38_38 = input.LA(1); + + s = -1; + if ( ((LA38_38 >= '\u0000' && LA38_38 <= '\b')||(LA38_38 >= '\u000B' && LA38_38 <= '\f')||(LA38_38 >= '\u000E' && LA38_38 <= '\u001F')||(LA38_38 >= '#' && LA38_38 <= '&')||LA38_38==','||(LA38_38 >= '.' && LA38_38 <= '9')||(LA38_38 >= ';' && LA38_38 <= '>')||(LA38_38 >= '@' && LA38_38 <= 'Z')||(LA38_38 >= '_' && LA38_38 <= 'z')||LA38_38=='|'||(LA38_38 >= '\u007F' && LA38_38 <= '\u2FFF')||(LA38_38 >= '\u3001' && LA38_38 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA38_38=='\\') ) {s = 36;} + + else if ( (LA38_38=='-') ) {s = 37;} + + else if ( (LA38_38=='+') ) {s = 38;} + + else if ( (LA38_38=='*'||LA38_38=='?') ) {s = 27;} + + else s = 34; + + if ( s>=0 ) return s; + break; + case 15 : + int LA38_58 = input.LA(1); + + s = -1; + if ( ((LA38_58 >= '\u0000' && LA38_58 <= '\b')||(LA38_58 >= '\u000B' && LA38_58 <= '\f')||(LA38_58 >= '\u000E' && LA38_58 <= '\u001F')||(LA38_58 >= '#' && LA38_58 <= '&')||LA38_58==','||(LA38_58 >= '.' && LA38_58 <= '9')||(LA38_58 >= ';' && LA38_58 <= '>')||(LA38_58 >= '@' && LA38_58 <= 'Z')||(LA38_58 >= '_' && LA38_58 <= 'z')||LA38_58=='|'||(LA38_58 >= '\u007F' && LA38_58 <= '\u2FFF')||(LA38_58 >= '\u3001' && LA38_58 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA38_58=='\\') ) {s = 36;} + + else if ( (LA38_58=='-') ) {s = 37;} + + else if ( (LA38_58=='+') ) {s = 38;} + + else if ( (LA38_58=='*'||LA38_58=='?') ) {s = 27;} + + else s = 64; + + if ( s>=0 ) return s; + break; + case 16 : + int LA38_74 = input.LA(1); + + s = -1; + if ( ((LA38_74 >= '0' && LA38_74 <= '9')) ) {s = 75;} + + else if ( ((LA38_74 >= '\u0000' && LA38_74 <= '\b')||(LA38_74 >= '\u000B' && LA38_74 <= '\f')||(LA38_74 >= '\u000E' && LA38_74 <= '\u001F')||(LA38_74 >= '#' && LA38_74 <= '&')||LA38_74==','||(LA38_74 >= '.' && LA38_74 <= '/')||(LA38_74 >= ';' && LA38_74 <= '>')||(LA38_74 >= '@' && LA38_74 <= 'Z')||(LA38_74 >= '_' && LA38_74 <= 'z')||LA38_74=='|'||(LA38_74 >= '\u007F' && LA38_74 <= '\u2FFF')||(LA38_74 >= '\u3001' && LA38_74 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA38_74=='\\') ) {s = 36;} + + else if ( (LA38_74=='-') ) {s = 37;} + + else if ( (LA38_74=='+') ) {s = 38;} + + else if ( (LA38_74=='*'||LA38_74=='?') ) {s = 27;} + + else s = 34; + + if ( s>=0 ) return s; + break; + case 17 : + int LA38_35 = input.LA(1); + + s = -1; + if ( ((LA38_35 >= '\u0000' && LA38_35 <= '\b')||(LA38_35 >= '\u000B' && LA38_35 <= '\f')||(LA38_35 >= '\u000E' && LA38_35 <= '\u001F')||(LA38_35 >= '#' && LA38_35 <= '&')||LA38_35==','||(LA38_35 >= '.' && LA38_35 <= '9')||(LA38_35 >= ';' && LA38_35 <= '>')||(LA38_35 >= '@' && LA38_35 <= 'Z')||(LA38_35 >= '_' && LA38_35 <= 'z')||LA38_35=='|'||(LA38_35 >= '\u007F' && LA38_35 <= '\u2FFF')||(LA38_35 >= '\u3001' && LA38_35 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA38_35=='\\') ) {s = 36;} + + else if ( (LA38_35=='-') ) {s = 37;} + + else if ( (LA38_35=='+') ) {s = 38;} + + else if ( (LA38_35=='*'||LA38_35=='?') ) {s = 27;} + + else s = 34; + + if ( s>=0 ) return s; + break; + case 18 : + int LA38_65 = input.LA(1); + + s = -1; + if ( ((LA38_65 >= '0' && LA38_65 <= '9')) ) {s = 71;} + + else if ( ((LA38_65 >= '\u0000' && LA38_65 <= '\b')||(LA38_65 >= '\u000B' && LA38_65 <= '\f')||(LA38_65 >= '\u000E' && LA38_65 <= '\u001F')||(LA38_65 >= '#' && LA38_65 <= '&')||LA38_65==','||(LA38_65 >= '.' && LA38_65 <= '/')||(LA38_65 >= ';' && LA38_65 <= '>')||(LA38_65 >= '@' && LA38_65 <= 'Z')||(LA38_65 >= '_' && LA38_65 <= 'z')||LA38_65=='|'||(LA38_65 >= '\u007F' && LA38_65 <= '\u2FFF')||(LA38_65 >= '\u3001' && LA38_65 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA38_65=='\\') ) {s = 36;} + + else if ( (LA38_65=='-') ) {s = 37;} + + else if ( (LA38_65=='+') ) {s = 38;} + + else if ( (LA38_65=='*'||LA38_65=='?') ) {s = 27;} + + else s = 72; + + if ( s>=0 ) return s; + break; + case 19 : + int LA38_61 = input.LA(1); + + s = -1; + if ( (LA38_61=='.') ) {s = 69;} + + else if ( ((LA38_61 >= '0' && LA38_61 <= '9')) ) {s = 61;} + + else if ( ((LA38_61 >= '\u0000' && LA38_61 <= '\b')||(LA38_61 >= '\u000B' && LA38_61 <= '\f')||(LA38_61 >= '\u000E' && LA38_61 <= '\u001F')||(LA38_61 >= '#' && LA38_61 <= '&')||LA38_61==','||LA38_61=='/'||(LA38_61 >= ';' && LA38_61 <= '>')||(LA38_61 >= '@' && LA38_61 <= 'Z')||(LA38_61 >= '_' && LA38_61 <= 'z')||LA38_61=='|'||(LA38_61 >= '\u007F' && LA38_61 <= '\u2FFF')||(LA38_61 >= '\u3001' && LA38_61 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA38_61=='\\') ) {s = 36;} + + else if ( (LA38_61=='-') ) {s = 37;} + + else if ( (LA38_61=='+') ) {s = 38;} + + else if ( (LA38_61=='*'||LA38_61=='?') ) {s = 27;} + + else s = 48; + + if ( s>=0 ) return s; + break; + case 20 : + int LA38_77 = input.LA(1); + + s = -1; + if ( ((LA38_77 >= '0' && LA38_77 <= '9')) ) {s = 78;} + + else if ( ((LA38_77 >= '\u0000' && LA38_77 <= '\b')||(LA38_77 >= '\u000B' && LA38_77 <= '\f')||(LA38_77 >= '\u000E' && LA38_77 <= '\u001F')||(LA38_77 >= '#' && LA38_77 <= '&')||LA38_77==','||(LA38_77 >= '.' && LA38_77 <= '/')||(LA38_77 >= ';' && LA38_77 <= '>')||(LA38_77 >= '@' && LA38_77 <= 'Z')||(LA38_77 >= '_' && LA38_77 <= 'z')||LA38_77=='|'||(LA38_77 >= '\u007F' && LA38_77 <= '\u2FFF')||(LA38_77 >= '\u3001' && LA38_77 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA38_77=='\\') ) {s = 36;} + + else if ( (LA38_77=='-') ) {s = 37;} + + else if ( (LA38_77=='+') ) {s = 38;} + + else if ( (LA38_77=='*'||LA38_77=='?') ) {s = 27;} + + else s = 34; + + if ( s>=0 ) return s; + break; + case 21 : + int LA38_17 = input.LA(1); + + s = -1; + if ( (LA38_17=='N'||LA38_17=='n') ) {s = 39;} + + else if ( ((LA38_17 >= '\u0000' && LA38_17 <= '\b')||(LA38_17 >= '\u000B' && LA38_17 <= '\f')||(LA38_17 >= '\u000E' && LA38_17 <= '\u001F')||(LA38_17 >= '#' && LA38_17 <= '&')||LA38_17==','||(LA38_17 >= '.' && LA38_17 <= '9')||(LA38_17 >= ';' && LA38_17 <= '>')||(LA38_17 >= '@' && LA38_17 <= 'M')||(LA38_17 >= 'O' && LA38_17 <= 'Z')||(LA38_17 >= '_' && LA38_17 <= 'm')||(LA38_17 >= 'o' && LA38_17 <= 'z')||LA38_17=='|'||(LA38_17 >= '\u007F' && LA38_17 <= '\u2FFF')||(LA38_17 >= '\u3001' && LA38_17 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA38_17=='\\') ) {s = 36;} + + else if ( (LA38_17=='-') ) {s = 37;} + + else if ( (LA38_17=='+') ) {s = 38;} + + else if ( (LA38_17=='*'||LA38_17=='?') ) {s = 27;} + + else s = 34; + + if ( s>=0 ) return s; + break; + case 22 : + int LA38_52 = input.LA(1); + + s = -1; + if ( ((LA38_52 >= '\u0000' && LA38_52 <= '\b')||(LA38_52 >= '\u000B' && LA38_52 <= '\f')||(LA38_52 >= '\u000E' && LA38_52 <= '\u001F')||(LA38_52 >= '#' && LA38_52 <= '&')||LA38_52==','||(LA38_52 >= '.' && LA38_52 <= '9')||(LA38_52 >= ';' && LA38_52 <= '>')||(LA38_52 >= '@' && LA38_52 <= 'Z')||(LA38_52 >= '_' && LA38_52 <= 'z')||LA38_52=='|'||(LA38_52 >= '\u007F' && LA38_52 <= '\u2FFF')||(LA38_52 >= '\u3001' && LA38_52 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA38_52=='\\') ) {s = 36;} + + else if ( (LA38_52=='-') ) {s = 37;} + + else if ( (LA38_52=='+') ) {s = 38;} + + else if ( (LA38_52=='*'||LA38_52=='?') ) {s = 27;} + + else s = 34; + + if ( s>=0 ) return s; + break; + case 23 : + int LA38_46 = input.LA(1); + + s = -1; + if ( (LA38_46=='A'||LA38_46=='a') ) {s = 59;} + + else if ( ((LA38_46 >= '\u0000' && LA38_46 <= '\b')||(LA38_46 >= '\u000B' && LA38_46 <= '\f')||(LA38_46 >= '\u000E' && LA38_46 <= '\u001F')||(LA38_46 >= '#' && LA38_46 <= '&')||LA38_46==','||(LA38_46 >= '.' && LA38_46 <= '9')||(LA38_46 >= ';' && LA38_46 <= '>')||LA38_46=='@'||(LA38_46 >= 'B' && LA38_46 <= 'Z')||(LA38_46 >= '_' && LA38_46 <= '`')||(LA38_46 >= 'b' && LA38_46 <= 'z')||LA38_46=='|'||(LA38_46 >= '\u007F' && LA38_46 <= '\u2FFF')||(LA38_46 >= '\u3001' && LA38_46 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA38_46=='\\') ) {s = 36;} + + else if ( (LA38_46=='-') ) {s = 37;} + + else if ( (LA38_46=='+') ) {s = 38;} + + else if ( (LA38_46=='*'||LA38_46=='?') ) {s = 27;} + + else s = 34; + + if ( s>=0 ) return s; + break; + case 24 : + int LA38_59 = input.LA(1); + + s = -1; + if ( (LA38_59=='R'||LA38_59=='r') ) {s = 65;} + + else if ( ((LA38_59 >= '\u0000' && LA38_59 <= '\b')||(LA38_59 >= '\u000B' && LA38_59 <= '\f')||(LA38_59 >= '\u000E' && LA38_59 <= '\u001F')||(LA38_59 >= '#' && LA38_59 <= '&')||LA38_59==','||(LA38_59 >= '.' && LA38_59 <= '9')||(LA38_59 >= ';' && LA38_59 <= '>')||(LA38_59 >= '@' && LA38_59 <= 'Q')||(LA38_59 >= 'S' && LA38_59 <= 'Z')||(LA38_59 >= '_' && LA38_59 <= 'q')||(LA38_59 >= 's' && LA38_59 <= 'z')||LA38_59=='|'||(LA38_59 >= '\u007F' && LA38_59 <= '\u2FFF')||(LA38_59 >= '\u3001' && LA38_59 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA38_59=='\\') ) {s = 36;} + + else if ( (LA38_59=='-') ) {s = 37;} + + else if ( (LA38_59=='+') ) {s = 38;} + + else if ( (LA38_59=='*'||LA38_59=='?') ) {s = 27;} + + else s = 34; + + if ( s>=0 ) return s; + break; + case 25 : + int LA38_18 = input.LA(1); + + s = -1; + if ( (LA38_18=='&') ) {s = 41;} + + else if ( ((LA38_18 >= '\u0000' && LA38_18 <= '\b')||(LA38_18 >= '\u000B' && LA38_18 <= '\f')||(LA38_18 >= '\u000E' && LA38_18 <= '\u001F')||(LA38_18 >= '#' && LA38_18 <= '%')||LA38_18==','||(LA38_18 >= '.' && LA38_18 <= '9')||(LA38_18 >= ';' && LA38_18 <= '>')||(LA38_18 >= '@' && LA38_18 <= 'Z')||(LA38_18 >= '_' && LA38_18 <= 'z')||LA38_18=='|'||(LA38_18 >= '\u007F' && LA38_18 <= '\u2FFF')||(LA38_18 >= '\u3001' && LA38_18 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA38_18=='\\') ) {s = 36;} + + else if ( (LA38_18=='-') ) {s = 37;} + + else if ( (LA38_18=='+') ) {s = 38;} + + else if ( (LA38_18=='*'||LA38_18=='?') ) {s = 27;} + + else s = 40; + + if ( s>=0 ) return s; + break; + case 26 : + int LA38_0 = input.LA(1); + + s = -1; + if ( (LA38_0=='(') ) {s = 1;} + + else if ( (LA38_0==')') ) {s = 2;} + + else if ( (LA38_0=='[') ) {s = 3;} + + else if ( (LA38_0==']') ) {s = 4;} + + else if ( (LA38_0==':') ) {s = 5;} + + else if ( (LA38_0=='+') ) {s = 6;} + + else if ( (LA38_0=='!'||LA38_0=='-') ) {s = 7;} + + else if ( (LA38_0=='*') ) {s = 8;} + + else if ( (LA38_0=='?') ) {s = 9;} + + else if ( (LA38_0=='{') ) {s = 10;} + + else if ( (LA38_0=='}') ) {s = 11;} + + else if ( (LA38_0=='^') ) {s = 12;} + + else if ( (LA38_0=='~') ) {s = 13;} + + else if ( (LA38_0=='\"') ) {s = 14;} + + else if ( (LA38_0=='\'') ) {s = 15;} + + else if ( (LA38_0=='T') ) {s = 16;} + + else if ( (LA38_0=='A'||LA38_0=='a') ) {s = 17;} + + else if ( (LA38_0=='&') ) {s = 18;} + + else if ( (LA38_0=='O'||LA38_0=='o') ) {s = 19;} + + else if ( (LA38_0=='|') ) {s = 20;} + + else if ( (LA38_0=='N'||LA38_0=='n') ) {s = 21;} + + else if ( ((LA38_0 >= '\t' && LA38_0 <= '\n')||LA38_0=='\r'||LA38_0==' '||LA38_0=='\u3000') ) {s = 22;} + + else if ( ((LA38_0 >= '0' && LA38_0 <= '9')) ) {s = 23;} + + else if ( ((LA38_0 >= '\u0000' && LA38_0 <= '\b')||(LA38_0 >= '\u000B' && LA38_0 <= '\f')||(LA38_0 >= '\u000E' && LA38_0 <= '\u001F')||(LA38_0 >= '#' && LA38_0 <= '%')||LA38_0==','||(LA38_0 >= '.' && LA38_0 <= '/')||(LA38_0 >= ';' && LA38_0 <= '>')||LA38_0=='@'||(LA38_0 >= 'B' && LA38_0 <= 'M')||(LA38_0 >= 'P' && LA38_0 <= 'S')||(LA38_0 >= 'U' && LA38_0 <= 'Z')||(LA38_0 >= '_' && LA38_0 <= '`')||(LA38_0 >= 'b' && LA38_0 <= 'm')||(LA38_0 >= 'p' && LA38_0 <= 'z')||(LA38_0 >= '\u007F' && LA38_0 <= '\u2FFF')||(LA38_0 >= '\u3001' && LA38_0 <= '\uFFFF')) ) {s = 24;} + + else if ( (LA38_0=='\\') ) {s = 25;} + + if ( s>=0 ) return s; + break; + case 27 : + int LA38_69 = input.LA(1); + + s = -1; + if ( ((LA38_69 >= '0' && LA38_69 <= '9')) ) {s = 73;} + + else if ( ((LA38_69 >= '\u0000' && LA38_69 <= '\b')||(LA38_69 >= '\u000B' && LA38_69 <= '\f')||(LA38_69 >= '\u000E' && LA38_69 <= '\u001F')||(LA38_69 >= '#' && LA38_69 <= '&')||LA38_69==','||(LA38_69 >= '.' && LA38_69 <= '/')||(LA38_69 >= ';' && LA38_69 <= '>')||(LA38_69 >= '@' && LA38_69 <= 'Z')||(LA38_69 >= '_' && LA38_69 <= 'z')||LA38_69=='|'||(LA38_69 >= '\u007F' && LA38_69 <= '\u2FFF')||(LA38_69 >= '\u3001' && LA38_69 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA38_69=='\\') ) {s = 36;} + + else if ( (LA38_69=='-') ) {s = 37;} + + else if ( (LA38_69=='+') ) {s = 38;} + + else if ( (LA38_69=='*'||LA38_69=='?') ) {s = 27;} + + else s = 34; + + if ( s>=0 ) return s; + break; + case 28 : + int LA38_33 = input.LA(1); + + s = -1; + if ( ((LA38_33 >= '\u0000' && LA38_33 <= '\b')||(LA38_33 >= '\u000B' && LA38_33 <= '\f')||(LA38_33 >= '\u000E' && LA38_33 <= '\u001F')||(LA38_33 >= '#' && LA38_33 <= '&')||LA38_33==','||(LA38_33 >= '.' && LA38_33 <= '9')||(LA38_33 >= ';' && LA38_33 <= '>')||(LA38_33 >= '@' && LA38_33 <= 'Z')||(LA38_33 >= '_' && LA38_33 <= 'z')||LA38_33=='|'||(LA38_33 >= '\u007F' && LA38_33 <= '\u2FFF')||(LA38_33 >= '\u3001' && LA38_33 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA38_33=='\\') ) {s = 36;} + + else if ( (LA38_33=='-') ) {s = 37;} + + else if ( (LA38_33=='+') ) {s = 38;} + + else if ( (LA38_33=='*'||LA38_33=='?') ) {s = 27;} + + else s = 55; + + if ( s>=0 ) return s; + break; + case 29 : + int LA38_16 = input.LA(1); + + s = -1; + if ( (LA38_16=='O') ) {s = 33;} + + else if ( ((LA38_16 >= '\u0000' && LA38_16 <= '\b')||(LA38_16 >= '\u000B' && LA38_16 <= '\f')||(LA38_16 >= '\u000E' && LA38_16 <= '\u001F')||(LA38_16 >= '#' && LA38_16 <= '&')||LA38_16==','||(LA38_16 >= '.' && LA38_16 <= '9')||(LA38_16 >= ';' && LA38_16 <= '>')||(LA38_16 >= '@' && LA38_16 <= 'N')||(LA38_16 >= 'P' && LA38_16 <= 'Z')||(LA38_16 >= '_' && LA38_16 <= 'z')||LA38_16=='|'||(LA38_16 >= '\u007F' && LA38_16 <= '\u2FFF')||(LA38_16 >= '\u3001' && LA38_16 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA38_16=='\\') ) {s = 36;} + + else if ( (LA38_16=='-') ) {s = 37;} + + else if ( (LA38_16=='+') ) {s = 38;} + + else if ( (LA38_16=='*'||LA38_16=='?') ) {s = 27;} + + else s = 34; + + if ( s>=0 ) return s; + break; + case 30 : + int LA38_21 = input.LA(1); + + s = -1; + if ( (LA38_21=='O'||LA38_21=='o') ) {s = 45;} + + else if ( (LA38_21=='E'||LA38_21=='e') ) {s = 46;} + + else if ( ((LA38_21 >= '\u0000' && LA38_21 <= '\b')||(LA38_21 >= '\u000B' && LA38_21 <= '\f')||(LA38_21 >= '\u000E' && LA38_21 <= '\u001F')||(LA38_21 >= '#' && LA38_21 <= '&')||LA38_21==','||(LA38_21 >= '.' && LA38_21 <= '9')||(LA38_21 >= ';' && LA38_21 <= '>')||(LA38_21 >= '@' && LA38_21 <= 'D')||(LA38_21 >= 'F' && LA38_21 <= 'N')||(LA38_21 >= 'P' && LA38_21 <= 'Z')||(LA38_21 >= '_' && LA38_21 <= 'd')||(LA38_21 >= 'f' && LA38_21 <= 'n')||(LA38_21 >= 'p' && LA38_21 <= 'z')||LA38_21=='|'||(LA38_21 >= '\u007F' && LA38_21 <= '\u2FFF')||(LA38_21 >= '\u3001' && LA38_21 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA38_21=='\\') ) {s = 36;} + + else if ( (LA38_21=='-') ) {s = 37;} + + else if ( (LA38_21=='+') ) {s = 38;} + + else if ( (LA38_21=='*'||LA38_21=='?') ) {s = 27;} + + else s = 34; + + if ( s>=0 ) return s; + break; + case 31 : + int LA38_41 = input.LA(1); + + s = -1; + if ( ((LA38_41 >= '\u0000' && LA38_41 <= '\b')||(LA38_41 >= '\u000B' && LA38_41 <= '\f')||(LA38_41 >= '\u000E' && LA38_41 <= '\u001F')||(LA38_41 >= '#' && LA38_41 <= '&')||LA38_41==','||(LA38_41 >= '.' && LA38_41 <= '9')||(LA38_41 >= ';' && LA38_41 <= '>')||(LA38_41 >= '@' && LA38_41 <= 'Z')||(LA38_41 >= '_' && LA38_41 <= 'z')||LA38_41=='|'||(LA38_41 >= '\u007F' && LA38_41 <= '\u2FFF')||(LA38_41 >= '\u3001' && LA38_41 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA38_41=='\\') ) {s = 36;} + + else if ( (LA38_41=='-') ) {s = 37;} + + else if ( (LA38_41=='+') ) {s = 38;} + + else if ( (LA38_41=='*'||LA38_41=='?') ) {s = 27;} + + else s = 40; + + if ( s>=0 ) return s; + break; + case 32 : + int LA38_66 = input.LA(1); + + s = -1; + if ( ((LA38_66 >= '0' && LA38_66 <= '9')) ) {s = 73;} + + else if ( ((LA38_66 >= '.' && LA38_66 <= '/')) ) {s = 67;} + + else if ( (LA38_66=='-') ) {s = 68;} + + else if ( ((LA38_66 >= '\u0000' && LA38_66 <= '\b')||(LA38_66 >= '\u000B' && LA38_66 <= '\f')||(LA38_66 >= '\u000E' && LA38_66 <= '\u001F')||(LA38_66 >= '#' && LA38_66 <= '&')||LA38_66==','||(LA38_66 >= ';' && LA38_66 <= '>')||(LA38_66 >= '@' && LA38_66 <= 'Z')||(LA38_66 >= '_' && LA38_66 <= 'z')||LA38_66=='|'||(LA38_66 >= '\u007F' && LA38_66 <= '\u2FFF')||(LA38_66 >= '\u3001' && LA38_66 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA38_66=='\\') ) {s = 36;} + + else if ( (LA38_66=='+') ) {s = 38;} + + else if ( (LA38_66=='*'||LA38_66=='?') ) {s = 27;} + + else s = 48; + + if ( s>=0 ) return s; + break; + case 33 : + int LA38_23 = input.LA(1); + + s = -1; + if ( (LA38_23=='.') ) {s = 47;} + + else if ( ((LA38_23 >= '0' && LA38_23 <= '9')) ) {s = 49;} + + else if ( (LA38_23=='/') ) {s = 50;} + + else if ( (LA38_23=='-') ) {s = 51;} + + else if ( ((LA38_23 >= '\u0000' && LA38_23 <= '\b')||(LA38_23 >= '\u000B' && LA38_23 <= '\f')||(LA38_23 >= '\u000E' && LA38_23 <= '\u001F')||(LA38_23 >= '#' && LA38_23 <= '&')||LA38_23==','||(LA38_23 >= ';' && LA38_23 <= '>')||(LA38_23 >= '@' && LA38_23 <= 'Z')||(LA38_23 >= '_' && LA38_23 <= 'z')||LA38_23=='|'||(LA38_23 >= '\u007F' && LA38_23 <= '\u2FFF')||(LA38_23 >= '\u3001' && LA38_23 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA38_23=='\\') ) {s = 36;} + + else if ( (LA38_23=='+') ) {s = 38;} + + else if ( (LA38_23=='*'||LA38_23=='?') ) {s = 27;} + + else s = 48; + + if ( s>=0 ) return s; + break; + case 34 : + int LA38_60 = input.LA(1); + + s = -1; + if ( ((LA38_60 >= '0' && LA38_60 <= '9')) ) {s = 66;} + + else if ( ((LA38_60 >= '.' && LA38_60 <= '/')) ) {s = 67;} + + else if ( (LA38_60=='-') ) {s = 68;} + + else if ( ((LA38_60 >= '\u0000' && LA38_60 <= '\b')||(LA38_60 >= '\u000B' && LA38_60 <= '\f')||(LA38_60 >= '\u000E' && LA38_60 <= '\u001F')||(LA38_60 >= '#' && LA38_60 <= '&')||LA38_60==','||(LA38_60 >= ';' && LA38_60 <= '>')||(LA38_60 >= '@' && LA38_60 <= 'Z')||(LA38_60 >= '_' && LA38_60 <= 'z')||LA38_60=='|'||(LA38_60 >= '\u007F' && LA38_60 <= '\u2FFF')||(LA38_60 >= '\u3001' && LA38_60 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA38_60=='\\') ) {s = 36;} + + else if ( (LA38_60=='+') ) {s = 38;} + + else if ( (LA38_60=='*'||LA38_60=='?') ) {s = 27;} + + else s = 48; + + if ( s>=0 ) return s; + break; + case 35 : + int LA38_9 = input.LA(1); + + s = -1; + if ( (LA38_9=='?') ) {s = 9;} + + else if ( ((LA38_9 >= '\u0000' && LA38_9 <= '\b')||(LA38_9 >= '\u000B' && LA38_9 <= '\f')||(LA38_9 >= '\u000E' && LA38_9 <= '\u001F')||(LA38_9 >= '#' && LA38_9 <= '&')||(LA38_9 >= '+' && LA38_9 <= '9')||(LA38_9 >= ';' && LA38_9 <= '>')||(LA38_9 >= '@' && LA38_9 <= 'Z')||LA38_9=='\\'||(LA38_9 >= '_' && LA38_9 <= 'z')||LA38_9=='|'||(LA38_9 >= '\u007F' && LA38_9 <= '\u2FFF')||(LA38_9 >= '\u3001' && LA38_9 <= '\uFFFF')) ) {s = 27;} + + else s = 28; + + if ( s>=0 ) return s; + break; + case 36 : + int LA38_20 = input.LA(1); + + s = -1; + if ( (LA38_20=='|') ) {s = 44;} + + else if ( ((LA38_20 >= '\u0000' && LA38_20 <= '\b')||(LA38_20 >= '\u000B' && LA38_20 <= '\f')||(LA38_20 >= '\u000E' && LA38_20 <= '\u001F')||(LA38_20 >= '#' && LA38_20 <= '&')||LA38_20==','||(LA38_20 >= '.' && LA38_20 <= '9')||(LA38_20 >= ';' && LA38_20 <= '>')||(LA38_20 >= '@' && LA38_20 <= 'Z')||(LA38_20 >= '_' && LA38_20 <= 'z')||(LA38_20 >= '\u007F' && LA38_20 <= '\u2FFF')||(LA38_20 >= '\u3001' && LA38_20 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA38_20=='\\') ) {s = 36;} + + else if ( (LA38_20=='-') ) {s = 37;} + + else if ( (LA38_20=='+') ) {s = 38;} + + else if ( (LA38_20=='*'||LA38_20=='?') ) {s = 27;} + + else s = 43; + + if ( s>=0 ) return s; + break; + case 37 : + int LA38_24 = input.LA(1); + + s = -1; + if ( ((LA38_24 >= '\u0000' && LA38_24 <= '\b')||(LA38_24 >= '\u000B' && LA38_24 <= '\f')||(LA38_24 >= '\u000E' && LA38_24 <= '\u001F')||(LA38_24 >= '#' && LA38_24 <= '&')||LA38_24==','||(LA38_24 >= '.' && LA38_24 <= '9')||(LA38_24 >= ';' && LA38_24 <= '>')||(LA38_24 >= '@' && LA38_24 <= 'Z')||(LA38_24 >= '_' && LA38_24 <= 'z')||LA38_24=='|'||(LA38_24 >= '\u007F' && LA38_24 <= '\u2FFF')||(LA38_24 >= '\u3001' && LA38_24 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA38_24=='\\') ) {s = 36;} + + else if ( (LA38_24=='-') ) {s = 37;} + + else if ( (LA38_24=='+') ) {s = 38;} + + else if ( (LA38_24=='*'||LA38_24=='?') ) {s = 27;} + + else s = 34; + + if ( s>=0 ) return s; + break; + case 38 : + int LA38_30 = input.LA(1); + + s = -1; + if ( ((LA38_30 >= '\u0000' && LA38_30 <= '\uFFFF')) ) {s = 53;} + + if ( s>=0 ) return s; + break; + case 39 : + int LA38_67 = input.LA(1); + + s = -1; + if ( ((LA38_67 >= '0' && LA38_67 <= '9')) ) {s = 74;} + + else if ( ((LA38_67 >= '\u0000' && LA38_67 <= '\b')||(LA38_67 >= '\u000B' && LA38_67 <= '\f')||(LA38_67 >= '\u000E' && LA38_67 <= '\u001F')||(LA38_67 >= '#' && LA38_67 <= '&')||LA38_67==','||(LA38_67 >= '.' && LA38_67 <= '/')||(LA38_67 >= ';' && LA38_67 <= '>')||(LA38_67 >= '@' && LA38_67 <= 'Z')||(LA38_67 >= '_' && LA38_67 <= 'z')||LA38_67=='|'||(LA38_67 >= '\u007F' && LA38_67 <= '\u2FFF')||(LA38_67 >= '\u3001' && LA38_67 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA38_67=='\\') ) {s = 36;} + + else if ( (LA38_67=='-') ) {s = 37;} + + else if ( (LA38_67=='+') ) {s = 38;} + + else if ( (LA38_67=='*'||LA38_67=='?') ) {s = 27;} + + else s = 34; + + if ( s>=0 ) return s; + break; + case 40 : + int LA38_56 = input.LA(1); + + s = -1; + if ( ((LA38_56 >= '\u0000' && LA38_56 <= '\b')||(LA38_56 >= '\u000B' && LA38_56 <= '\f')||(LA38_56 >= '\u000E' && LA38_56 <= '\u001F')||(LA38_56 >= '#' && LA38_56 <= '&')||LA38_56==','||(LA38_56 >= '.' && LA38_56 <= '9')||(LA38_56 >= ';' && LA38_56 <= '>')||(LA38_56 >= '@' && LA38_56 <= 'Z')||(LA38_56 >= '_' && LA38_56 <= 'z')||LA38_56=='|'||(LA38_56 >= '\u007F' && LA38_56 <= '\u2FFF')||(LA38_56 >= '\u3001' && LA38_56 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA38_56=='\\') ) {s = 36;} + + else if ( (LA38_56=='-') ) {s = 37;} + + else if ( (LA38_56=='+') ) {s = 38;} + + else if ( (LA38_56=='*'||LA38_56=='?') ) {s = 27;} + + else s = 34; + + if ( s>=0 ) return s; + break; + case 41 : + int LA38_39 = input.LA(1); + + s = -1; + if ( (LA38_39=='D'||LA38_39=='d') ) {s = 57;} + + else if ( ((LA38_39 >= '\u0000' && LA38_39 <= '\b')||(LA38_39 >= '\u000B' && LA38_39 <= '\f')||(LA38_39 >= '\u000E' && LA38_39 <= '\u001F')||(LA38_39 >= '#' && LA38_39 <= '&')||LA38_39==','||(LA38_39 >= '.' && LA38_39 <= '9')||(LA38_39 >= ';' && LA38_39 <= '>')||(LA38_39 >= '@' && LA38_39 <= 'C')||(LA38_39 >= 'E' && LA38_39 <= 'Z')||(LA38_39 >= '_' && LA38_39 <= 'c')||(LA38_39 >= 'e' && LA38_39 <= 'z')||LA38_39=='|'||(LA38_39 >= '\u007F' && LA38_39 <= '\u2FFF')||(LA38_39 >= '\u3001' && LA38_39 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA38_39=='\\') ) {s = 36;} + + else if ( (LA38_39=='-') ) {s = 37;} + + else if ( (LA38_39=='+') ) {s = 38;} + + else if ( (LA38_39=='*'||LA38_39=='?') ) {s = 27;} + + else s = 34; + + if ( s>=0 ) return s; + break; + case 42 : + int LA38_25 = input.LA(1); + + s = -1; + if ( ((LA38_25 >= '\u0000' && LA38_25 <= '\uFFFF')) ) {s = 52;} + + if ( s>=0 ) return s; + break; + case 43 : + int LA38_36 = input.LA(1); + + s = -1; + if ( ((LA38_36 >= '\u0000' && LA38_36 <= '\uFFFF')) ) {s = 56;} + + if ( s>=0 ) return s; + break; + case 44 : + int LA38_49 = input.LA(1); + + s = -1; + if ( (LA38_49=='.') ) {s = 47;} + + else if ( ((LA38_49 >= '0' && LA38_49 <= '9')) ) {s = 61;} + + else if ( (LA38_49=='/') ) {s = 50;} + + else if ( (LA38_49=='-') ) {s = 51;} + + else if ( ((LA38_49 >= '\u0000' && LA38_49 <= '\b')||(LA38_49 >= '\u000B' && LA38_49 <= '\f')||(LA38_49 >= '\u000E' && LA38_49 <= '\u001F')||(LA38_49 >= '#' && LA38_49 <= '&')||LA38_49==','||(LA38_49 >= ';' && LA38_49 <= '>')||(LA38_49 >= '@' && LA38_49 <= 'Z')||(LA38_49 >= '_' && LA38_49 <= 'z')||LA38_49=='|'||(LA38_49 >= '\u007F' && LA38_49 <= '\u2FFF')||(LA38_49 >= '\u3001' && LA38_49 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA38_49=='\\') ) {s = 36;} + + else if ( (LA38_49=='+') ) {s = 38;} + + else if ( (LA38_49=='*'||LA38_49=='?') ) {s = 27;} + + else s = 48; + + if ( s>=0 ) return s; + break; + case 45 : + int LA38_50 = input.LA(1); + + s = -1; + if ( ((LA38_50 >= '0' && LA38_50 <= '9')) ) {s = 62;} + + else if ( ((LA38_50 >= '\u0000' && LA38_50 <= '\b')||(LA38_50 >= '\u000B' && LA38_50 <= '\f')||(LA38_50 >= '\u000E' && LA38_50 <= '\u001F')||(LA38_50 >= '#' && LA38_50 <= '&')||LA38_50==','||(LA38_50 >= '.' && LA38_50 <= '/')||(LA38_50 >= ';' && LA38_50 <= '>')||(LA38_50 >= '@' && LA38_50 <= 'Z')||(LA38_50 >= '_' && LA38_50 <= 'z')||LA38_50=='|'||(LA38_50 >= '\u007F' && LA38_50 <= '\u2FFF')||(LA38_50 >= '\u3001' && LA38_50 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA38_50=='\\') ) {s = 36;} + + else if ( (LA38_50=='-') ) {s = 37;} + + else if ( (LA38_50=='+') ) {s = 38;} + + else if ( (LA38_50=='*'||LA38_50=='?') ) {s = 27;} + + else s = 34; + + if ( s>=0 ) return s; + break; + case 46 : + int LA38_78 = input.LA(1); + + s = -1; + if ( ((LA38_78 >= '\u0000' && LA38_78 <= '\b')||(LA38_78 >= '\u000B' && LA38_78 <= '\f')||(LA38_78 >= '\u000E' && LA38_78 <= '\u001F')||(LA38_78 >= '#' && LA38_78 <= '&')||LA38_78==','||(LA38_78 >= '.' && LA38_78 <= '9')||(LA38_78 >= ';' && LA38_78 <= '>')||(LA38_78 >= '@' && LA38_78 <= 'Z')||(LA38_78 >= '_' && LA38_78 <= 'z')||LA38_78=='|'||(LA38_78 >= '\u007F' && LA38_78 <= '\u2FFF')||(LA38_78 >= '\u3001' && LA38_78 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA38_78=='\\') ) {s = 36;} + + else if ( (LA38_78=='-') ) {s = 37;} + + else if ( (LA38_78=='+') ) {s = 38;} + + else if ( (LA38_78=='*'||LA38_78=='?') ) {s = 27;} + + else s = 76; + + if ( s>=0 ) return s; + break; + case 47 : + int LA38_68 = input.LA(1); + + s = -1; + if ( ((LA38_68 >= '0' && LA38_68 <= '9')) ) {s = 74;} + + else if ( ((LA38_68 >= '\u0000' && LA38_68 <= '\b')||(LA38_68 >= '\u000B' && LA38_68 <= '\f')||(LA38_68 >= '\u000E' && LA38_68 <= '\u001F')||(LA38_68 >= '#' && LA38_68 <= '&')||LA38_68==','||(LA38_68 >= '.' && LA38_68 <= '/')||(LA38_68 >= ';' && LA38_68 <= '>')||(LA38_68 >= '@' && LA38_68 <= 'Z')||(LA38_68 >= '_' && LA38_68 <= 'z')||LA38_68=='|'||(LA38_68 >= '\u007F' && LA38_68 <= '\u2FFF')||(LA38_68 >= '\u3001' && LA38_68 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA38_68=='\\') ) {s = 36;} + + else if ( (LA38_68=='-') ) {s = 37;} + + else if ( (LA38_68=='+') ) {s = 38;} + + else if ( (LA38_68=='*'||LA38_68=='?') ) {s = 27;} + + else s = 34; + + if ( s>=0 ) return s; + break; + case 48 : + int LA38_42 = input.LA(1); + + s = -1; + if ( ((LA38_42 >= '\u0000' && LA38_42 <= '\b')||(LA38_42 >= '\u000B' && LA38_42 <= '\f')||(LA38_42 >= '\u000E' && LA38_42 <= '\u001F')||(LA38_42 >= '#' && LA38_42 <= '&')||LA38_42==','||(LA38_42 >= '.' && LA38_42 <= '9')||(LA38_42 >= ';' && LA38_42 <= '>')||(LA38_42 >= '@' && LA38_42 <= 'Z')||(LA38_42 >= '_' && LA38_42 <= 'z')||LA38_42=='|'||(LA38_42 >= '\u007F' && LA38_42 <= '\u2FFF')||(LA38_42 >= '\u3001' && LA38_42 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA38_42=='\\') ) {s = 36;} + + else if ( (LA38_42=='-') ) {s = 37;} + + else if ( (LA38_42=='+') ) {s = 38;} + + else if ( (LA38_42=='*'||LA38_42=='?') ) {s = 27;} + + else s = 43; + + if ( s>=0 ) return s; + break; + case 49 : + int LA38_51 = input.LA(1); + + s = -1; + if ( ((LA38_51 >= '0' && LA38_51 <= '9')) ) {s = 62;} + + else if ( ((LA38_51 >= '\u0000' && LA38_51 <= '\b')||(LA38_51 >= '\u000B' && LA38_51 <= '\f')||(LA38_51 >= '\u000E' && LA38_51 <= '\u001F')||(LA38_51 >= '#' && LA38_51 <= '&')||LA38_51==','||(LA38_51 >= '.' && LA38_51 <= '/')||(LA38_51 >= ';' && LA38_51 <= '>')||(LA38_51 >= '@' && LA38_51 <= 'Z')||(LA38_51 >= '_' && LA38_51 <= 'z')||LA38_51=='|'||(LA38_51 >= '\u007F' && LA38_51 <= '\u2FFF')||(LA38_51 >= '\u3001' && LA38_51 <= '\uFFFF')) ) {s = 35;} + + else if ( (LA38_51=='\\') ) {s = 36;} + + else if ( (LA38_51=='-') ) {s = 37;} + + else if ( (LA38_51=='+') ) {s = 38;} + + else if ( (LA38_51=='*'||LA38_51=='?') ) {s = 27;} + + else s = 34; + + if ( s>=0 ) return s; + break; + case 50 : + int LA38_8 = input.LA(1); + + s = -1; + if ( ((LA38_8 >= '\u0000' && LA38_8 <= '\b')||(LA38_8 >= '\u000B' && LA38_8 <= '\f')||(LA38_8 >= '\u000E' && LA38_8 <= '\u001F')||(LA38_8 >= '#' && LA38_8 <= '&')||(LA38_8 >= '+' && LA38_8 <= '9')||(LA38_8 >= ';' && LA38_8 <= '>')||(LA38_8 >= '@' && LA38_8 <= 'Z')||LA38_8=='\\'||(LA38_8 >= '_' && LA38_8 <= 'z')||LA38_8=='|'||(LA38_8 >= '\u007F' && LA38_8 <= '\u2FFF')||(LA38_8 >= '\u3001' && LA38_8 <= '\uFFFF')) ) {s = 27;} + + else s = 26; + + if ( s>=0 ) return s; + break; + } + NoViableAltException nvae = + new NoViableAltException(getDescription(), 38, _s, input); + error(nvae); + throw nvae; + } + + } + + +} \ No newline at end of file Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/parser/UnforgivingParser.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/parser/UnforgivingParser.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/parser/UnforgivingParser.java (revision 0) @@ -0,0 +1,33 @@ +package org.apache.lucene.queryparser.flexible.aqp.parser; + +import org.antlr.runtime.BitSet; +import org.antlr.runtime.IntStream; +import org.antlr.runtime.MismatchedTokenException; +import org.antlr.runtime.Parser; +import org.antlr.runtime.RecognitionException; +import org.antlr.runtime.RecognizerSharedState; +import org.antlr.runtime.TokenStream; + +public class UnforgivingParser extends Parser { + + public UnforgivingParser(TokenStream input) { + super(input); + // TODO Auto-generated constructor stub + } + + public UnforgivingParser(TokenStream input, RecognizerSharedState state) { + super(input, state); + // TODO Auto-generated constructor stub + } + + @Override + protected Object recoverFromMismatchedToken(IntStream input, int ttype, BitSet follow) + throws RecognitionException + { + //do not try to recover + MismatchedTokenException e = new MismatchedTokenException(ttype, input); + throw e; + } + + +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/parser/AqpStandardLuceneParser.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/parser/AqpStandardLuceneParser.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/parser/AqpStandardLuceneParser.java (revision 0) @@ -0,0 +1,60 @@ +package org.apache.lucene.queryparser.flexible.aqp.parser; + +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.queryparser.flexible.aqp.AqpQueryParser; +import org.apache.lucene.queryparser.flexible.aqp.AqpSyntaxParserLoadableImpl; +import org.apache.lucene.queryparser.flexible.core.QueryNodeParseException; +import org.apache.lucene.queryparser.flexible.standard.StandardQueryParser; + +public class AqpStandardLuceneParser { + + /** + * Constructs a {@link StandardQueryParser} object. The default grammar used + * is "LuceneGrammar" {@see AqpQueryParser#AqpQueryParser(String)} + * + * @throws Exception + */ + + public static AqpQueryParser init(String grammarName) + throws QueryNodeParseException { + return new AqpQueryParser(new AqpStandardQueryConfigHandler(), + new AqpSyntaxParserLoadableImpl().initializeGrammar(grammarName), + new AqpStandardQueryNodeProcessorPipeline(null), + new AqpStandardQueryTreeBuilder()); + } + + /** + * Instantiates {@link StandardLuceneGrammarSyntaxParser}, this method + * is using a dedicated parser class, instead of loading the parser + * by its grammar name + * + * @return + * @throws QueryNodeParseException + */ + public static AqpQueryParser init() throws QueryNodeParseException { + return new AqpQueryParser(new AqpStandardQueryConfigHandler(), + new StandardLuceneGrammarSyntaxParser(), + new AqpStandardQueryNodeProcessorPipeline(null), + new AqpStandardQueryTreeBuilder()); + + } + + /** + * Constructs a {@link StandardQueryParser} object and sets an + * {@link Analyzer} to it. The same as: + * + *
    + * StandardQueryParser qp = new StandardQueryParser(); + * qp.getQueryConfigHandler().setAnalyzer(analyzer); + *
+ * + * @param analyzer + * the analyzer to be used by this query parser helper + * @throws Exception + */ + public AqpQueryParser init(Analyzer analyzer) throws Exception { + AqpQueryParser p = AqpStandardLuceneParser.init("StandardLuceneGrammar"); + p.setAnalyzer(analyzer); + return p; + } +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/parser/AqpExtendedLuceneParser.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/parser/AqpExtendedLuceneParser.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/parser/AqpExtendedLuceneParser.java (revision 0) @@ -0,0 +1,60 @@ +package org.apache.lucene.queryparser.flexible.aqp.parser; + +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.queryparser.flexible.aqp.AqpQueryParser; +import org.apache.lucene.queryparser.flexible.aqp.AqpSyntaxParserLoadableImpl; +import org.apache.lucene.queryparser.flexible.core.QueryNodeParseException; +import org.apache.lucene.queryparser.flexible.standard.StandardQueryParser; + +public class AqpExtendedLuceneParser { + + /** + * Constructs a {@link StandardQueryParser} object. The default grammar used + * is "LuceneGrammar" {@see AqpQueryParser#AqpQueryParser(String)} + * + * @throws Exception + */ + + public static AqpQueryParser init(String grammarName) + throws QueryNodeParseException { + return new AqpQueryParser(new AqpStandardQueryConfigHandler(), + new AqpSyntaxParserLoadableImpl().initializeGrammar(grammarName), + new AqpStandardQueryNodeProcessorPipeline(null), + new AqpExtendedLuceneQueryTreeBuilder()); + } + + /** + * Instantiates {@link StandardLuceneGrammarSyntaxParser}, this method + * is using a dedicated parser class, instead of loading the parser + * by its grammar name + * + * @return + * @throws QueryNodeParseException + */ + public static AqpQueryParser init() throws QueryNodeParseException { + return new AqpQueryParser(new AqpStandardQueryConfigHandler(), + new ExtendedLuceneGrammarSyntaxParser(), + new AqpStandardQueryNodeProcessorPipeline(null), + new AqpExtendedLuceneQueryTreeBuilder()); + + } + + /** + * Constructs a {@link StandardQueryParser} object and sets an + * {@link Analyzer} to it. The same as: + * + *
    + * StandardQueryParser qp = new StandardQueryParser(); + * qp.getQueryConfigHandler().setAnalyzer(analyzer); + *
+ * + * @param analyzer + * the analyzer to be used by this query parser helper + * @throws Exception + */ + public AqpQueryParser init(Analyzer analyzer) throws Exception { + AqpQueryParser p = AqpStandardLuceneParser.init("ExtendedLuceneGrammar"); + p.setAnalyzer(analyzer); + return p; + } +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/parser/AqpExtendedLuceneQueryTreeBuilder.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/parser/AqpExtendedLuceneQueryTreeBuilder.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/parser/AqpExtendedLuceneQueryTreeBuilder.java (revision 0) @@ -0,0 +1,14 @@ +package org.apache.lucene.queryparser.flexible.aqp.parser; + +import org.apache.lucene.queryparser.flexible.aqp.builders.AqpNearQueryNodeBuilder; +import org.apache.lucene.queryparser.flexible.aqp.nodes.AqpNearQueryNode; + +public class AqpExtendedLuceneQueryTreeBuilder extends + AqpStandardQueryTreeBuilder { + + public void init() { + super.init(); + setBuilder(AqpNearQueryNode.class, new AqpNearQueryNodeBuilder()); + } + +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/parser/AqpStandardQueryConfigHandler.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/parser/AqpStandardQueryConfigHandler.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/parser/AqpStandardQueryConfigHandler.java (revision 0) @@ -0,0 +1,126 @@ +package org.apache.lucene.queryparser.flexible.aqp.parser; + +import java.util.HashMap; +import java.util.Map; + +import org.apache.lucene.queryparser.flexible.core.config.ConfigurationKey; +import org.apache.lucene.queryparser.flexible.core.config.QueryConfigHandler; +import org.apache.lucene.queryparser.flexible.standard.config.StandardQueryConfigHandler; +import org.apache.lucene.queryparser.flexible.standard.processors.PhraseSlopQueryNodeProcessor; +import org.apache.lucene.queryparser.flexible.aqp.config.AqpFeedback; +import org.apache.lucene.queryparser.flexible.aqp.config.AqpFeedbackEvent; +import org.apache.lucene.queryparser.flexible.aqp.config.AqpFeedbackEventHandler; +import org.apache.lucene.queryparser.flexible.aqp.config.AqpFeedbackImpl; +import org.apache.lucene.queryparser.flexible.aqp.nodes.AqpFuzzyModifierNode; +import org.apache.lucene.queryparser.flexible.aqp.processors.AqpBOOSTProcessor; +import org.apache.lucene.queryparser.flexible.aqp.processors.AqpDEFOPProcessor; +import org.apache.lucene.queryparser.flexible.aqp.processors.AqpFUZZYProcessor; +import org.apache.lucene.queryparser.flexible.aqp.processors.AqpFuzzyModifierProcessor; +import org.apache.lucene.queryparser.flexible.aqp.processors.AqpQNORMALProcessor; + +public class AqpStandardQueryConfigHandler extends StandardQueryConfigHandler { + + final public static class ConfigurationKeys { + + /** + * This attribute is used by {@link AqpQNORMALProcessor} processor and must + * be defined in the {@link QueryConfigHandler}. This attribute tells the + * processor what is the default field when no field is defined in a phrase.
+ * + */ + final public static ConfigurationKey DEFAULT_FIELD = ConfigurationKey + .newInstance(); + + /** + * This attribute is used by {@link PhraseSlopQueryNodeProcessor} processor + * and must be defined in the {@link QueryConfigHandler}. This attribute + * tells the processor what is the default phrase slop when no slop is + * defined in a phrase.
+ * + */ + final public static ConfigurationKey DEFAULT_PROXIMITY = ConfigurationKey + .newInstance(); + + /** + * This attribute is used by {@link AqpDEFOPProcessor} processor and must be + * defined in the {@link QueryConfigHandler}. This attribute tells the + * processor what are the allowed values when user submits them.
+ * + */ + final public static ConfigurationKey ALLOWED_PROXIMITY_RANGE = ConfigurationKey + .newInstance(); + + /** + * Value of the default boost, to be used when user specified '^' without a + * value + * + * @see AqpBOOSTProcessor + */ + final public static ConfigurationKey IMPLICIT_BOOST = ConfigurationKey + .newInstance(); + + /** + * This attribute is used to collect feedback messages and suggestions from + * the query parser + * + * @see AqpFeedback#registerEventHandler(AqpFeedbackEventHandler) + * @see AqpFeedback#sendEvent(AqpFeedbackEvent) + */ + final public static ConfigurationKey FEEDBACK = ConfigurationKey + .newInstance(); + + /** + * Default fuzzy value when user specified only 'term~' + * + * @see AqpFUZZYProcessor + * @see AqpFuzzyModifierNode + * @see AqpFuzzyModifierProcessor + */ + final public static ConfigurationKey IMPLICIT_FUZZY = ConfigurationKey + .newInstance(); + + /** + * Allow to use the old-style 0.0-1.0f fuzzy value and let it be handled by + * the SlowFuzzyQuery + * + * @see AqpFuzzyModifierProcessor + * + */ + final public static ConfigurationKey ALLOW_SLOW_FUZZY = ConfigurationKey + .newInstance(); + + /** + * Translation mapping for index names + */ + final public static ConfigurationKey> FIELD_MAPPER = ConfigurationKey + .newInstance(); + + /** + * Translation mapping for index names (after analysis was done) + */ + final public static ConfigurationKey> FIELD_MAPPER_POST_ANALYSIS = ConfigurationKey + .newInstance(); + + } + + public AqpStandardQueryConfigHandler() { + super(); + + // Add listener that will build the FieldConfig attributes. + // None for now + + // Default Values (besides the standard ones) + set(ConfigurationKeys.DEFAULT_FIELD, null); + set(ConfigurationKeys.DEFAULT_PROXIMITY, 5); + set(ConfigurationKeys.ALLOWED_PROXIMITY_RANGE, new int[] { 1, 5 }); + set(ConfigurationKeys.IMPLICIT_BOOST, 1.0f); + set(ConfigurationKeys.IMPLICIT_FUZZY, 0.5f); + set(ConfigurationKeys.FEEDBACK, new AqpFeedbackImpl()); + set(ConfigurationKeys.ALLOW_SLOW_FUZZY, false); + set(ConfigurationKeys.FIELD_MAPPER, new HashMap()); + set(ConfigurationKeys.FIELD_MAPPER_POST_ANALYSIS, + new HashMap()); + + } + +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/parser/AqpStandardQueryTreeBuilder.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/parser/AqpStandardQueryTreeBuilder.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/parser/AqpStandardQueryTreeBuilder.java (revision 0) @@ -0,0 +1,96 @@ +package org.apache.lucene.queryparser.flexible.aqp.parser; + +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import org.apache.lucene.queryparser.flexible.core.nodes.BooleanQueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.BoostQueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.FieldQueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.FuzzyQueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.GroupQueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.MatchAllDocsQueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.MatchNoDocsQueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.ModifierQueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.SlopQueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.TokenizedPhraseQueryNode; +import org.apache.lucene.queryparser.flexible.standard.builders.BooleanQueryNodeBuilder; +import org.apache.lucene.queryparser.flexible.standard.builders.BoostQueryNodeBuilder; +import org.apache.lucene.queryparser.flexible.standard.builders.DummyQueryNodeBuilder; +import org.apache.lucene.queryparser.flexible.standard.builders.FuzzyQueryNodeBuilder; +import org.apache.lucene.queryparser.flexible.standard.builders.GroupQueryNodeBuilder; +import org.apache.lucene.queryparser.flexible.standard.builders.MatchAllDocsQueryNodeBuilder; +import org.apache.lucene.queryparser.flexible.standard.builders.MatchNoDocsQueryNodeBuilder; +import org.apache.lucene.queryparser.flexible.standard.builders.ModifierQueryNodeBuilder; +import org.apache.lucene.queryparser.flexible.standard.builders.MultiPhraseQueryNodeBuilder; +import org.apache.lucene.queryparser.flexible.standard.builders.NumericRangeQueryNodeBuilder; +import org.apache.lucene.queryparser.flexible.standard.builders.PhraseQueryNodeBuilder; +import org.apache.lucene.queryparser.flexible.standard.builders.PrefixWildcardQueryNodeBuilder; +import org.apache.lucene.queryparser.flexible.standard.builders.RegexpQueryNodeBuilder; +import org.apache.lucene.queryparser.flexible.standard.builders.SlopQueryNodeBuilder; +import org.apache.lucene.queryparser.flexible.standard.builders.StandardBooleanQueryNodeBuilder; +import org.apache.lucene.queryparser.flexible.standard.builders.StandardQueryBuilder; +import org.apache.lucene.queryparser.flexible.standard.builders.TermRangeQueryNodeBuilder; +import org.apache.lucene.queryparser.flexible.standard.builders.WildcardQueryNodeBuilder; +import org.apache.lucene.queryparser.flexible.standard.nodes.MultiPhraseQueryNode; +import org.apache.lucene.queryparser.flexible.standard.nodes.NumericQueryNode; +import org.apache.lucene.queryparser.flexible.standard.nodes.NumericRangeQueryNode; +import org.apache.lucene.queryparser.flexible.standard.nodes.PrefixWildcardQueryNode; +import org.apache.lucene.queryparser.flexible.standard.nodes.RegexpQueryNode; +import org.apache.lucene.queryparser.flexible.standard.nodes.StandardBooleanQueryNode; +import org.apache.lucene.queryparser.flexible.standard.nodes.TermRangeQueryNode; +import org.apache.lucene.queryparser.flexible.standard.nodes.WildcardQueryNode; +import org.apache.lucene.queryparser.flexible.aqp.builders.AqpFieldQueryNodeBuilder; +import org.apache.lucene.queryparser.flexible.aqp.builders.AqpQueryTreeBuilder; +import org.apache.lucene.queryparser.flexible.aqp.builders.AqpSlowFuzzyQueryNodeBuilder; +import org.apache.lucene.queryparser.flexible.aqp.nodes.SlowFuzzyQueryNode; + +/** + * This query tree builder provides configuration for the standard + * lucene syntax.
+ * + * @see AqpStandardLuceneParser + */ +public class AqpStandardQueryTreeBuilder extends AqpQueryTreeBuilder implements + StandardQueryBuilder { + + + public void init() { + setBuilder(GroupQueryNode.class, new GroupQueryNodeBuilder()); + setBuilder(FieldQueryNode.class, new AqpFieldQueryNodeBuilder()); + setBuilder(BooleanQueryNode.class, new BooleanQueryNodeBuilder()); + setBuilder(SlowFuzzyQueryNode.class, new AqpSlowFuzzyQueryNodeBuilder()); + setBuilder(FuzzyQueryNode.class, new FuzzyQueryNodeBuilder()); + setBuilder(NumericQueryNode.class, new DummyQueryNodeBuilder()); + setBuilder(NumericRangeQueryNode.class, new NumericRangeQueryNodeBuilder()); + setBuilder(BoostQueryNode.class, new BoostQueryNodeBuilder()); + setBuilder(ModifierQueryNode.class, new ModifierQueryNodeBuilder()); + setBuilder(WildcardQueryNode.class, new WildcardQueryNodeBuilder()); + setBuilder(TokenizedPhraseQueryNode.class, new PhraseQueryNodeBuilder()); + setBuilder(MatchNoDocsQueryNode.class, new MatchNoDocsQueryNodeBuilder()); + setBuilder(PrefixWildcardQueryNode.class, + new PrefixWildcardQueryNodeBuilder()); + setBuilder(TermRangeQueryNode.class, new TermRangeQueryNodeBuilder()); + setBuilder(RegexpQueryNode.class, new RegexpQueryNodeBuilder()); + setBuilder(SlopQueryNode.class, new SlopQueryNodeBuilder()); + setBuilder(StandardBooleanQueryNode.class, + new StandardBooleanQueryNodeBuilder()); + setBuilder(MultiPhraseQueryNode.class, new MultiPhraseQueryNodeBuilder()); + setBuilder(MatchAllDocsQueryNode.class, new MatchAllDocsQueryNodeBuilder()); + } + + +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/util/AqpDebuggingQueryNodeProcessorPipeline.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/util/AqpDebuggingQueryNodeProcessorPipeline.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/util/AqpDebuggingQueryNodeProcessorPipeline.java (revision 0) @@ -0,0 +1,118 @@ +package org.apache.lucene.queryparser.flexible.aqp.util; + +import java.util.Iterator; +import java.util.List; + +import org.apache.lucene.queryparser.flexible.core.QueryNodeException; +import org.apache.lucene.queryparser.flexible.core.config.QueryConfigHandler; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; +import org.apache.lucene.queryparser.flexible.core.parser.EscapeQuerySyntax; +import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessor; +import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessorPipeline; +import org.apache.lucene.queryparser.flexible.standard.parser.EscapeQuerySyntaxImpl; + +/** + * This class is used for debugging purposes (eg. from unittests + * or when the query parser was invoked with debuqQuery=true) + * + * The debugging output shows the stage of the AST tree after + * each processing stage completed. Including the changes in + * the internal 'map'. + * + */ +public class AqpDebuggingQueryNodeProcessorPipeline extends + QueryNodeProcessorPipeline { + + EscapeQuerySyntax escaper = new EscapeQuerySyntaxImpl(); + private Class originalProcessorClass; + + public AqpDebuggingQueryNodeProcessorPipeline(QueryConfigHandler queryConfig, + Class originalClass) { + super(queryConfig); + originalProcessorClass = originalClass; + } + + public QueryNode process(QueryNode queryTree) throws QueryNodeException { + String oldVal = null; + String newVal = null; + String oldMap = null; + String newMap = null; + + oldVal = queryTree.toString(); + oldMap = harvestTagMap(queryTree); + newMap = oldMap; + + int i = 1; + System.out.println(" 0. starting"); + System.out.println("--------------------------------------------"); + System.out.println(oldVal); + + Iterator it = this.iterator(); + + QueryNodeProcessor processor; + while (it.hasNext()) { + processor = it.next(); + + System.out.println(" " + i + ". step " + + processor.getClass().toString()); + queryTree = processor.process(queryTree); + newVal = queryTree.toString(); + newMap = harvestTagMap(queryTree); + System.out.println(" Tree changed: " + + (newVal.equals(oldVal) ? "NO" : "YES")); + + if (!newMap.equals(oldMap)) { + System.out.println(" Tags changed: YES"); + System.out.println(" -----------------"); + System.out.println(newMap); + System.out.println(" -----------------"); + } + + System.out.println("--------------------------------------------"); + + System.out.println(newVal.equals(oldVal) ? "" : newVal); + + oldVal = newVal; + oldMap = newMap; + i += 1; + } + + System.out.println(""); + System.out.println("final result:"); + System.out.println("--------------------------------------------"); + System.out.println(queryTree.toString()); + return queryTree; + + } + + private String harvestTagMap(QueryNode queryTree) { + StringBuffer output = new StringBuffer(); + harvestTagMapDesc(queryTree, output, 0); + return output.toString().trim(); + } + + private void harvestTagMapDesc(QueryNode queryTree, StringBuffer output, + int level) { + if (queryTree.getTagMap().size() > 0) { + for (int i = 0; i < level; i++) { + output.append(""); + } + output.append(queryTree.toQueryString(escaper)); + output.append(" : "); + // output.append(queryTree.getClass().getSimpleName()); + // output.append(" : "); + output.append(queryTree.getTagMap()); + output.append("\n"); + } + List children = queryTree.getChildren(); + if (children != null) { + for (QueryNode child : queryTree.getChildren()) { + harvestTagMapDesc(child, output, level + 1); + } + } + } + + public Class getOriginalProcessorClass() { + return originalProcessorClass; + } +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/util/AqpUtils.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/util/AqpUtils.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/util/AqpUtils.java (revision 0) @@ -0,0 +1,57 @@ +package org.apache.lucene.queryparser.flexible.aqp.util; + +import org.apache.lucene.queryparser.flexible.core.nodes.FieldQueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; +import org.apache.lucene.queryparser.flexible.aqp.nodes.AqpANTLRNode; + +public class AqpUtils { + + public enum Modifier { + PLUS, MINUS, UNKNOWN; + } + + public static String getFirstChildInputString(QueryNode node) { + return getFirstChildInputString((AqpANTLRNode) node); + } + + public static String getFirstChildInputString(AqpANTLRNode node) { + if (node != null && node.getChildren() != null) { + return ((AqpANTLRNode) node.getChildren().get(0)).getTokenInput(); + } + return null; + } + + public static Float getFirstChildInputFloat(AqpANTLRNode node) { + if (node != null && node.getChildren() != null) { + return Float.valueOf(((AqpANTLRNode) node.getChildren().get(0)) + .getTokenInput()); + } + return null; + } + + public static Modifier getFirstChildInputModifier(AqpANTLRNode node) { + if (node != null && node.getChildren() != null) { + String val = ((AqpANTLRNode) node.getChildren().get(0)).getTokenName(); + if (val.equals("PLUS")) { + return Modifier.PLUS; + } else if (val.equals("MINUS")) { + return Modifier.MINUS; + } else { + return Modifier.UNKNOWN; + } + } + return null; + } + + public static void applyFieldToAllChildren(String field, QueryNode node) { + + if (node instanceof FieldQueryNode) { + ((FieldQueryNode) node).setField(field); + } + if (node.getChildren() != null) { + for (QueryNode child : node.getChildren()) { + applyFieldToAllChildren(field, child); + } + } + } +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/util/AqpQueryParserUtil.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/util/AqpQueryParserUtil.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/util/AqpQueryParserUtil.java (revision 0) @@ -0,0 +1,199 @@ +package org.apache.lucene.queryparser.flexible.aqp.util; + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.queryparser.flexible.aqp.AqpQueryParser; +import org.apache.lucene.queryparser.flexible.core.QueryNodeException; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.Query; + +/** + * This class defines utility methods to (help) parse query strings into + * {@link Query} objects. + */ +final public class AqpQueryParserUtil { + + /** + * Parses a query which searches on the fields specified. + *

+ * If x fields are specified, this effectively constructs: + * + *

+   * <code>
+   * (field1:query1) (field2:query2) (field3:query3)...(fieldx:queryx)
+   * </code>
+   * 
+ * + * @param queries + * Queries strings to parse + * @param fields + * Fields to search on + * @param analyzer + * Analyzer to use + * @throws IllegalArgumentException + * if the length of the queries array differs from the length of the + * fields array + */ + public static Query parse(AqpQueryParser qp, String[] queries, String[] fields) + throws QueryNodeException { + if (queries.length != fields.length) + throw new IllegalArgumentException("queries.length != fields.length"); + BooleanQuery bQuery = new BooleanQuery(); + + for (int i = 0; i < fields.length; i++) { + Query q = qp.parse(queries[i], fields[i]); + + if (q != null && // q never null, just being defensive + (!(q instanceof BooleanQuery) || ((BooleanQuery) q).getClauses().length > 0)) { + bQuery.add(q, BooleanClause.Occur.SHOULD); + } + } + return bQuery; + } + + /** + * Parses a query, searching on the fields specified. Use this if you need to + * specify certain fields as required, and others as prohibited. + *

+ * + *

+   * Usage:
+   * <code>
+   * String[] fields = {"filename", "contents", "description"};
+   * BooleanClause.Occur[] flags = {BooleanClause.Occur.SHOULD,
+   *                BooleanClause.Occur.MUST,
+   *                BooleanClause.Occur.MUST_NOT};
+   * MultiFieldQueryParser.parse("query", fields, flags, analyzer);
+   * </code>
+   * 
+ *

+ * The code above would construct a query: + * + *

+   * <code>
+   * (filename:query) +(contents:query) -(description:query)
+   * </code>
+   * 
+ * + * @param query + * Query string to parse + * @param fields + * Fields to search on + * @param flags + * Flags describing the fields + * @param analyzer + * Analyzer to use + * @throws IllegalArgumentException + * if the length of the fields array differs from the length of the + * flags array + */ + public static Query parse(AqpQueryParser qp, String query, String[] fields, + BooleanClause.Occur[] flags, Analyzer analyzer) throws QueryNodeException { + if (fields.length != flags.length) + throw new IllegalArgumentException("fields.length != flags.length"); + BooleanQuery bQuery = new BooleanQuery(); + + for (int i = 0; i < fields.length; i++) { + Query q = qp.parse(query, fields[i]); + + if (q != null && // q never null, just being defensive + (!(q instanceof BooleanQuery) || ((BooleanQuery) q).getClauses().length > 0)) { + bQuery.add(q, flags[i]); + } + } + return bQuery; + } + + /** + * Parses a query, searching on the fields specified. Use this if you need to + * specify certain fields as required, and others as prohibited. + *

+ * + *

+   * Usage:
+   * <code>
+   * String[] query = {"query1", "query2", "query3"};
+   * String[] fields = {"filename", "contents", "description"};
+   * BooleanClause.Occur[] flags = {BooleanClause.Occur.SHOULD,
+   *                BooleanClause.Occur.MUST,
+   *                BooleanClause.Occur.MUST_NOT};
+   * MultiFieldQueryParser.parse(query, fields, flags, analyzer);
+   * </code>
+   * 
+ *

+ * The code above would construct a query: + * + *

+   * <code>
+   * (filename:query1) +(contents:query2) -(description:query3)
+   * </code>
+   * 
+ * + * @param queries + * Queries string to parse + * @param fields + * Fields to search on + * @param flags + * Flags describing the fields + * @param analyzer + * Analyzer to use + * @throws IllegalArgumentException + * if the length of the queries, fields, and flags array differ + */ + public static Query parse(AqpQueryParser qp, String[] queries, + String[] fields, BooleanClause.Occur[] flags, Analyzer analyzer) + throws QueryNodeException { + if (!(queries.length == fields.length && queries.length == flags.length)) + throw new IllegalArgumentException( + "queries, fields, and flags array have have different length"); + BooleanQuery bQuery = new BooleanQuery(); + + for (int i = 0; i < fields.length; i++) { + Query q = qp.parse(queries[i], fields[i]); + + if (q != null && // q never null, just being defensive + (!(q instanceof BooleanQuery) || ((BooleanQuery) q).getClauses().length > 0)) { + bQuery.add(q, flags[i]); + } + } + return bQuery; + } + + /** + * Returns a String where those characters that TextParser expects to be + * escaped are escaped by a preceding \. + */ + public static String escape(String s) { + StringBuilder sb = new StringBuilder(); + for (int i = 0; i < s.length(); i++) { + char c = s.charAt(i); + // These characters are part of the query syntax and must be escaped + if (c == '\\' || c == '+' || c == '-' || c == '!' || c == '(' || c == ')' + || c == ':' || c == '^' || c == '[' || c == ']' || c == '\"' + || c == '{' || c == '}' || c == '~' || c == '*' || c == '?' + || c == '|' || c == '&') { + sb.append('\\'); + } + sb.append(c); + } + return sb.toString(); + } + +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/util/AqpCommonTreeAdaptor.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/util/AqpCommonTreeAdaptor.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/util/AqpCommonTreeAdaptor.java (revision 0) @@ -0,0 +1,46 @@ +package org.apache.lucene.queryparser.flexible.aqp.util; + +import java.util.HashMap; +import java.util.Map; + +import org.antlr.runtime.Token; +import org.antlr.runtime.tree.CommonTreeAdaptor; + + +public class AqpCommonTreeAdaptor extends CommonTreeAdaptor { + + private Map typeToNameMap; + + public AqpCommonTreeAdaptor(String[] tokenNames) { + typeToNameMap = computeTypeToNameMap(tokenNames); + } + + public Object create(Token payload) { + AqpCommonTree tree = new AqpCommonTree(payload); + tree.setTypeToNameMap(this.typeToNameMap); + return tree; + } + + public Token getToken(Object t) { + if (t instanceof AqpCommonTree) { + return ((AqpCommonTree) t).getToken(); + } + return null; // no idea what to do + } + + /* + * translate token types into meaningful names it will be used later on + */ + public Map computeTypeToNameMap(String[] tokenNames) { + Map m = new HashMap(); + if (tokenNames == null) { + return m; + } + for (int ttype = Token.MIN_TOKEN_TYPE; ttype < tokenNames.length; ttype++) { + String name = tokenNames[ttype]; + m.put(new Integer(ttype), name); + } + return m; + } + +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/util/AqpCommonTree.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/util/AqpCommonTree.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/util/AqpCommonTree.java (revision 0) @@ -0,0 +1,126 @@ +package org.apache.lucene.queryparser.flexible.aqp.util; + +import java.util.Map; + +import org.antlr.runtime.CharStream; +import org.antlr.runtime.CommonToken; +import org.antlr.runtime.RecognitionException; +import org.antlr.runtime.Token; +import org.antlr.runtime.tree.CommonErrorNode; +import org.antlr.runtime.tree.CommonTree; +import org.antlr.runtime.tree.Tree; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; +import org.apache.lucene.queryparser.flexible.aqp.nodes.AqpANTLRNode; + + +/** + * A wrapper for the objects produced by ANTLR. These + * will be consumed by the aqp parser. + * + */ +public class AqpCommonTree extends CommonTree { + + protected Map typeToNameMap; + + public AqpCommonTree() { + super(); + } + + public AqpCommonTree(CommonTree node) { + super(node); + } + + public AqpCommonTree(AqpCommonTree node) { + super(node); + } + + public AqpCommonTree(Token t) { + this.token = t; + } + + public Tree dupNode() { + AqpCommonTree r = new AqpCommonTree(this); + r.setTypeToNameMap(this.typeToNameMap); + return r; + } + + /** Return the whole tree converted to QueryNode tree */ + public QueryNode toQueryNodeTree() throws RecognitionException { + if (children == null || children.size() == 0) { + return this.toQueryNode(); + } + + QueryNode buf = toQueryNode(); + for (int i = 0; children != null && i < children.size(); i++) { + Object child = children.get(i); + if (child instanceof CommonErrorNode) { + throw ((CommonErrorNode) child).trappedException; + } + AqpCommonTree t = (AqpCommonTree) child; + buf.add(t.toQueryNodeTree()); + } + return buf; + } + + public QueryNode toQueryNode() { + return (QueryNode) new AqpANTLRNode(this); + } + + public String getTokenLabel() { + if (isNil()) { + return "nil"; + } + if (getType() == Token.INVALID_TOKEN_TYPE) { + return ""; + } + if (token == null) { + return null; + } + return token.getText(); + } + + public String getTokenInput() { + CharStream is = token.getInputStream(); + if (is == null) { + return null; + } + int n = is.size(); + if (getStartIndex() < n && getStopIndex() < n) { + return is.substring(getStartIndex(), getStopIndex()); + } else { + return ""; + } + } + + public int getTokenType() { + return token.getType(); + } + + public String getTypeLabel() { + int t = getTokenType(); + return typeToNameMap.get(t); + } + + public int getStartIndex() { + return ((CommonToken) token).getStartIndex(); + // return startIndex; + } + + public int getStopIndex() { + // return stopIndex; + return ((CommonToken) token).getStopIndex(); + } + + public void setStartIndex(int start) { + ((CommonToken) token).setStartIndex(start); + } + + public void setStopIndex(int end) { + ((CommonToken) token).setStopIndex(end); + } + + public void setTypeToNameMap(Map typeMap) { + typeToNameMap = typeMap; + } + +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpGroupQueryOptimizerProcessor.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpGroupQueryOptimizerProcessor.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpGroupQueryOptimizerProcessor.java (revision 0) @@ -0,0 +1,171 @@ +package org.apache.lucene.queryparser.flexible.aqp.processors; + +import java.util.ArrayList; +import java.util.List; + +import org.apache.lucene.queryparser.flexible.core.QueryNodeException; +import org.apache.lucene.queryparser.flexible.core.nodes.BooleanQueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.BoostQueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.GroupQueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.ModifierQueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; +import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessor; +import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessorImpl; +import org.apache.lucene.queryparser.flexible.standard.nodes.BooleanModifierNode; + +/** + * Optimizes the query by removing the superfluous GroupQuery nodes. We harvest + * all parameters from fuzzy, boost, and modifier nodes and apply those that are + * closest to the actual query. + * + *
+ * Example: + * + *
+ * this (+(-(+(-(that thus))^0.1))^0.3)
+ * 
+ * + * Will be optimized into (when DEFOP = AND): + * + *
+ * +field:this -((+field:that +field:thus)^0.1)
+ * 
+ * + * + */ +public class AqpGroupQueryOptimizerProcessor extends QueryNodeProcessorImpl + implements QueryNodeProcessor { + + @Override + protected QueryNode preProcessNode(QueryNode node) throws QueryNodeException { + if (node instanceof GroupQueryNode) { + QueryNode immediateChild = node.getChildren().get(0); + ClauseData data = harvestData(immediateChild); + QueryNode changedNode = data.getLastChild(); + + if (data.getLevelsDeep() > 0) { + boolean modified = false; + if (data.getBoost() != null) { + changedNode = new BoostQueryNode(changedNode, data.getBoost()); + modified = true; + } + if (data.getModifier() != null) { + changedNode = new BooleanModifierNode(changedNode, data.getModifier()); + modified = true; + /* + * Why was I doing this? Firstly, it is buggy, the second branch + * always executes - why am i creating new BooleanNode? + * List children = new ArrayList(); if + * (children.size() == 1) { return children.get(0); } else { + * children.add(new ModifierQueryNode(node, data.getModifier())); node + * = new BooleanQueryNode(children); } + */ + } + /* + * if (modified && node.getParent()==null) { List children = + * new ArrayList(); children.add(node); changedNode = new + * BooleanQueryNode(children); } + */ + return changedNode; + } + return immediateChild; + } + return node; + } + + @Override + protected QueryNode postProcessNode(QueryNode node) throws QueryNodeException { + return node; + } + + @Override + protected List setChildrenOrder(List children) + throws QueryNodeException { + return children; + } + + /* + * methods below not used now, but might be added - it tries to compact + * consecutive CLAUSE nodes into one clause, taking only the last + * modifier/tmodifier values + */ + private ClauseData harvestData(QueryNode clauseNode) { + ClauseData data = new ClauseData(); + harvestData(clauseNode, data); + return data; + } + + private void harvestData(QueryNode node, ClauseData data) { + + if (node instanceof ModifierQueryNode) { + data.setModifier(((ModifierQueryNode) node).getModifier()); + } else if (node instanceof BoostQueryNode) { + data.setBoost(((BoostQueryNode) node).getValue()); + } else if (node instanceof GroupQueryNode) { + data.addLevelsDeep(); + } else { + data.setLastChild(node); + return; // break processing + } + if (!node.isLeaf() && node.getChildren().size() == 1) { + harvestData(node.getChildren().get(0), data); + } + + } + + class ClauseData { + private ModifierQueryNode.Modifier modifier; + private Float boost; + private QueryNode lastValidNode; + private boolean keepOutmost = true; // change this to false if you want that + // modifiers that are closer to the clause are applied to ti + private int levelsDeep = 0; + + ClauseData() { + } + + ClauseData(ModifierQueryNode.Modifier mod, Float boost) { + this.modifier = mod; + this.boost = boost; + } + + public ModifierQueryNode.Modifier getModifier() { + return modifier; + } + + public void setModifier(ModifierQueryNode.Modifier modifier) { + if (keepOutmost && this.modifier != null) { + return; + } + this.modifier = modifier; + } + + public Float getBoost() { + return boost; + } + + public void setBoost(Float boost) { + if (keepOutmost && this.boost != null) { + return; + } + this.boost = boost; + } + + public QueryNode getLastChild() { + return lastValidNode; + } + + public void setLastChild(QueryNode lastNonClause) { + this.lastValidNode = lastNonClause; + } + + public int getLevelsDeep() { + return levelsDeep; + } + + public void addLevelsDeep() { + this.levelsDeep++; + } + } + +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpBOOSTProcessor.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpBOOSTProcessor.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpBOOSTProcessor.java (revision 0) @@ -0,0 +1,103 @@ +package org.apache.lucene.queryparser.flexible.aqp.processors; + +import java.util.List; + +import org.apache.lucene.queryparser.flexible.messages.MessageImpl; +import org.apache.lucene.queryparser.flexible.core.QueryNodeException; +import org.apache.lucene.queryparser.flexible.core.config.QueryConfigHandler; +import org.apache.lucene.queryparser.flexible.core.messages.QueryParserMessages; +import org.apache.lucene.queryparser.flexible.core.nodes.BoostQueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; +import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessor; +import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessorImpl; +import org.apache.lucene.queryparser.flexible.aqp.nodes.AqpANTLRNode; +import org.apache.lucene.queryparser.flexible.aqp.parser.AqpStandardQueryConfigHandler; + +/** + * Sets the node into the BoostQueryNode, this processor requires that + * {@link AqpTMODIFIERProcessor} ran before. Because we depend on the proper + * tree shape.
+ * + * If BOOST node contains only one child, we return that child and do nothing.
+ * + * If BOOST node contains two children, we take the first and check its input, + * eg. + * + *
+ * 					BOOST
+ *                  /  \
+ *               ^0.1  rest
+ * 
+ * + * We create a new node BoostQueryNode(rest, 0.1) and return that node.
+ * + * Presence of the BOOST node child means user specified at least "^" We'll use + * the default from the configuration {@see BoostAttribute} + * + * @see AqpTMODIFIERProcessor + * @see AqpFUZZYProcessor + */ +public class AqpBOOSTProcessor extends QueryNodeProcessorImpl implements + QueryNodeProcessor { + + @Override + protected QueryNode preProcessNode(QueryNode node) throws QueryNodeException { + if (node instanceof AqpANTLRNode + && ((AqpANTLRNode) node).getTokenLabel().equals("BOOST")) { + + if (node.getChildren().size() == 1) { + return node.getChildren().get(0); + } + + Float boost = getBoostValue(node); + if (boost == null) { + return node.getChildren().get(node.getChildren().size() - 1); + } + return new BoostQueryNode(node.getChildren().get( + node.getChildren().size() - 1), boost); + + } + return node; + + } + + @Override + protected QueryNode postProcessNode(QueryNode node) throws QueryNodeException { + return node; + } + + @Override + protected List setChildrenOrder(List children) + throws QueryNodeException { + return children; + } + + private Float getBoostValue(QueryNode boostNode) throws QueryNodeException { + if (boostNode.getChildren() != null) { + + AqpANTLRNode child = ((AqpANTLRNode) boostNode.getChildren().get(0)); + String input = child.getTokenInput(); + float boost; + + if (input.equals("^")) { + QueryConfigHandler queryConfig = getQueryConfigHandler(); + if (queryConfig == null + || !queryConfig + .has(AqpStandardQueryConfigHandler.ConfigurationKeys.IMPLICIT_BOOST)) { + throw new QueryNodeException(new MessageImpl( + QueryParserMessages.LUCENE_QUERY_CONVERSION_ERROR, + "Configuration error: IMPLICIT_BOOST value is missing")); + } + boost = queryConfig + .get(AqpStandardQueryConfigHandler.ConfigurationKeys.IMPLICIT_BOOST); + } else { + boost = Float.valueOf(input.replace("^", "")); + } + + return boost; + + } + return null; + } + +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpOptimizationProcessor.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpOptimizationProcessor.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpOptimizationProcessor.java (revision 0) @@ -0,0 +1,87 @@ +package org.apache.lucene.queryparser.flexible.aqp.processors; + +import java.util.ArrayList; +import java.util.List; + +import org.apache.lucene.queryparser.flexible.aqp.nodes.AqpANTLRNode; +import org.apache.lucene.queryparser.flexible.aqp.nodes.AqpBooleanQueryNode; +import org.apache.lucene.queryparser.flexible.core.QueryNodeException; +import org.apache.lucene.queryparser.flexible.core.nodes.BooleanQueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.ModifierQueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.ModifierQueryNode.Modifier; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; +import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessor; +import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessorImpl; + +/** + * Optimizes the query tree - on root node - turns +whathever into whatever if + * there is only one child (but only if Modifier is positive, MOD_REQ or + * MOD_NONE) + * + */ +public class AqpOptimizationProcessor extends QueryNodeProcessorImpl implements + QueryNodeProcessor { + + @Override + protected QueryNode preProcessNode(QueryNode node) throws QueryNodeException { + if (node.getParent() == null && node.getChildren() != null + && node.getChildren().size() == 1) { + if (node instanceof BooleanQueryNode) { + QueryNode c = node.getChildren().get(0); + if (c instanceof ModifierQueryNode + && ((ModifierQueryNode) c).getModifier() != Modifier.MOD_NOT) { + return ((ModifierQueryNode) c).getChild(); + } + } + } else if (node instanceof AqpBooleanQueryNode) { + + List children = node.getChildren(); + String thisOp = ((AqpBooleanQueryNode) node).getOperator(); + boolean rewriteSafe = true; + + QueryNode modifier; + QueryNode subClause; + for (int i = 0; i < children.size(); i++) { + modifier = children.get(i); + if (modifier.isLeaf()) { + rewriteSafe = false; + break; + } + subClause = modifier.getChildren().get(0); + if (!(subClause instanceof AqpBooleanQueryNode && ((AqpBooleanQueryNode) subClause) + .getOperator().equals(thisOp))) { + rewriteSafe = false; + break; + } + } + + if (rewriteSafe == true) { + List childrenList = new ArrayList(); + + for (int i = 0; i < children.size(); i++) { + subClause = children.get(i).getChildren().get(0); + for (QueryNode nod : subClause.getChildren()) { + childrenList.add(nod); + } + } + + children.clear(); + node.set(childrenList); + } + } + + return node; + } + + @Override + protected QueryNode postProcessNode(QueryNode node) throws QueryNodeException { + return node; + } + + @Override + protected List setChildrenOrder(List children) + throws QueryNodeException { + return children; + } + +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpFUZZYProcessor.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpFUZZYProcessor.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpFUZZYProcessor.java (revision 0) @@ -0,0 +1,116 @@ +package org.apache.lucene.queryparser.flexible.aqp.processors; + +import java.util.List; + +import org.apache.lucene.queryparser.flexible.messages.MessageImpl; +import org.apache.lucene.queryparser.flexible.core.QueryNodeException; +import org.apache.lucene.queryparser.flexible.core.config.QueryConfigHandler; +import org.apache.lucene.queryparser.flexible.core.messages.QueryParserMessages; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; +import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessor; +import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessorImpl; +import org.apache.lucene.queryparser.flexible.standard.config.StandardQueryConfigHandler; +import org.apache.lucene.queryparser.flexible.aqp.nodes.AqpANTLRNode; +import org.apache.lucene.queryparser.flexible.aqp.nodes.AqpFuzzyModifierNode; +import org.apache.lucene.queryparser.flexible.aqp.parser.AqpStandardQueryConfigHandler; + +/** + * Sets the node into the BoostQueryNode, this processor requires that + * {@link AqpTMODIFIERProcessor} ran before. Because we depend on the proper + * tree shape. + * + *
+ * + * If BOOST node contains only one child, we return that child and do nothing. + * + *
+ * + * If BOOST node contains two children, we take the first and check its input, + * eg. + * + *
+ *                  FUZZY
+ *                  /  \
+ *               ~0.1  rest
+ * 
+ * + * We create a new node {@@link AqpFuzzyModifierNode} (rest, 0.1) and + * return that node. + * + *
+ * + * Presence of the BOOST node child means user specified at least "^" We'll use + * the default from the configuration {@link BoostAttribute} + * + * @see AqpTMODIFIERProcessor + * @see AqpFUZZYProcessor + */ +public class AqpFUZZYProcessor extends QueryNodeProcessorImpl implements + QueryNodeProcessor { + + @Override + protected QueryNode preProcessNode(QueryNode node) throws QueryNodeException { + if (node instanceof AqpANTLRNode + && ((AqpANTLRNode) node).getTokenLabel().equals("FUZZY")) { + + if (node.getChildren().size() == 1) { + return node.getChildren().get(0); + } + + Float fuzzy = getFuzzyValue(node); + + if (fuzzy == null) { + return node.getChildren().get(node.getChildren().size() - 1); + } + + return new AqpFuzzyModifierNode(node.getChildren().get( + node.getChildren().size() - 1), fuzzy); + + } + return node; + + } + + @Override + protected QueryNode postProcessNode(QueryNode node) throws QueryNodeException { + return node; + } + + @Override + protected List setChildrenOrder(List children) + throws QueryNodeException { + return children; + } + + private Float getFuzzyValue(QueryNode fuzzyNode) throws QueryNodeException { + if (fuzzyNode.getChildren() != null) { + + AqpANTLRNode child = ((AqpANTLRNode) fuzzyNode.getChildren().get(0)); + String input = child.getTokenInput(); + float fuzzy; + + if (input.equals("~")) { + QueryConfigHandler queryConfig = getQueryConfigHandler(); + if (queryConfig == null + || !queryConfig + .has(AqpStandardQueryConfigHandler.ConfigurationKeys.IMPLICIT_FUZZY)) { + throw new QueryNodeException( + new MessageImpl( + QueryParserMessages.LUCENE_QUERY_CONVERSION_ERROR, + "Configuration error: " + + AqpStandardQueryConfigHandler.ConfigurationKeys.IMPLICIT_FUZZY + .toString() + " is missing")); + } + fuzzy = queryConfig + .get(AqpStandardQueryConfigHandler.ConfigurationKeys.IMPLICIT_FUZZY); + } else { + fuzzy = Float.valueOf(input.replace("~", "")); + } + + return fuzzy; + + } + return null; + } + +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpMULTIVALUEProcessor.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpMULTIVALUEProcessor.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpMULTIVALUEProcessor.java (revision 0) @@ -0,0 +1,34 @@ +package org.apache.lucene.queryparser.flexible.aqp.processors; + +import java.util.List; + +import org.apache.lucene.queryparser.flexible.core.QueryNodeException; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; +import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessor; +import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessorImpl; +import org.apache.lucene.queryparser.flexible.aqp.nodes.AqpANTLRNode; + +public class AqpMULTIVALUEProcessor extends QueryNodeProcessorImpl implements + QueryNodeProcessor { + + @Override + protected QueryNode preProcessNode(QueryNode node) throws QueryNodeException { + if (node instanceof AqpANTLRNode + && ((AqpANTLRNode) node).getTokenLabel().equals("MULTIVALUE")) { + return node; + } + return node; + } + + @Override + protected QueryNode postProcessNode(QueryNode node) throws QueryNodeException { + return node; + } + + @Override + protected List setChildrenOrder(List children) + throws QueryNodeException { + return children; + } + +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpFieldMapperProcessor.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpFieldMapperProcessor.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpFieldMapperProcessor.java (revision 0) @@ -0,0 +1,73 @@ +package org.apache.lucene.queryparser.flexible.aqp.processors; + +import java.util.List; +import java.util.Map; + +import org.apache.lucene.queryparser.flexible.messages.MessageImpl; +import org.apache.lucene.queryparser.flexible.aqp.parser.AqpStandardQueryConfigHandler; +import org.apache.lucene.queryparser.flexible.aqp.parser.AqpStandardQueryConfigHandler.ConfigurationKeys; +import org.apache.lucene.queryparser.flexible.core.QueryNodeException; +import org.apache.lucene.queryparser.flexible.core.config.ConfigurationKey; +import org.apache.lucene.queryparser.flexible.core.config.QueryConfigHandler; +import org.apache.lucene.queryparser.flexible.core.nodes.FieldQueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; +import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessorImpl; + +/** + * Looks at the QueryNode(s) and translates the field name if we have a mapping + * for it. It is used to change the field names on-the-fly + * + * @see AqpFieldMapper + * @see QueryConfigHandler + * + */ +public class AqpFieldMapperProcessor extends QueryNodeProcessorImpl { + + private Map fieldMap; + ConfigurationKey> mapKey = AqpStandardQueryConfigHandler.ConfigurationKeys.FIELD_MAPPER; + + public AqpFieldMapperProcessor() { + // empty constructor + } + + @Override + public QueryNode process(QueryNode queryTree) throws QueryNodeException { + if (getQueryConfigHandler().has(mapKey)) { + fieldMap = getQueryConfigHandler().get(mapKey); + if (this.fieldMap != null) { + return super.process(queryTree); + } + } + + return queryTree; + } + + @Override + protected QueryNode preProcessNode(QueryNode node) throws QueryNodeException { + if (node instanceof FieldQueryNode) { + String field = ((FieldQueryNode) node).getFieldAsString(); + if (fieldMap.containsKey(field)) { + String newField = fieldMap.get(field); + if (newField == null) { + throw new QueryNodeException(new MessageImpl("Server error", + "The configuration error, field " + field + + " is translated to: null")); + } + ((FieldQueryNode) node).setField(newField); + } + } + return node; + } + + @Override + protected QueryNode postProcessNode(QueryNode node) throws QueryNodeException { + return node; + } + + @Override + protected List setChildrenOrder(List children) + throws QueryNodeException { + return children; + } + +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpQREGEXProcessor.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpQREGEXProcessor.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpQREGEXProcessor.java (revision 0) @@ -0,0 +1,45 @@ +package org.apache.lucene.queryparser.flexible.aqp.processors; + +import java.util.List; + +import org.apache.lucene.queryparser.flexible.core.QueryNodeException; +import org.apache.lucene.queryparser.flexible.core.config.QueryConfigHandler; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; +import org.apache.lucene.queryparser.flexible.standard.nodes.RegexpQueryNode; +import org.apache.lucene.queryparser.flexible.aqp.nodes.AqpANTLRNode; +import org.apache.lucene.queryparser.flexible.aqp.nodes.AqpAdsabsRegexQueryNode; + +/** + * Converts QREGEX node into @{link {@link RegexpQueryNode}. The field value is + * the @{link DefaultFieldAttribute} specified in the configuration. + * + *
+ * + * If the user specified a field, it will be set by the @{link + * AqpFIELDProcessor} Therefore the {@link AqpQNORMALProcessor} should run + * before it. + * + * + * @see QueryConfigHandler + * @see DefaultFieldAttribute + * + */ +public class AqpQREGEXProcessor extends AqpQProcessor { + + public boolean nodeIsWanted(AqpANTLRNode node) { + if (node.getTokenLabel().equals("QREGEX")) { + return true; + } + return false; + } + + public QueryNode createQNode(AqpANTLRNode node) throws QueryNodeException { + String field = getDefaultFieldName(); + + AqpANTLRNode subChild = (AqpANTLRNode) node.getChildren().get(0); + String input = subChild.getTokenInput(); + return new AqpAdsabsRegexQueryNode(field, input.substring(1, + input.length() - 1), subChild.getTokenStart(), subChild.getTokenEnd()); + } + +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpOPERATORProcessor.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpOPERATORProcessor.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpOPERATORProcessor.java (revision 0) @@ -0,0 +1,137 @@ +package org.apache.lucene.queryparser.flexible.aqp.processors; + +import org.apache.lucene.queryparser.flexible.messages.MessageImpl; +import org.apache.lucene.queryparser.flexible.standard.config.StandardQueryConfigHandler; +import org.apache.lucene.queryparser.flexible.core.QueryNodeException; +import org.apache.lucene.queryparser.flexible.core.config.QueryConfigHandler; +import org.apache.lucene.queryparser.flexible.core.messages.QueryParserMessages; +import org.apache.lucene.queryparser.flexible.core.nodes.AndQueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.OrQueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; +import org.apache.lucene.queryparser.flexible.aqp.nodes.AqpANTLRNode; +import org.apache.lucene.queryparser.flexible.aqp.nodes.AqpAndQueryNode; +import org.apache.lucene.queryparser.flexible.aqp.nodes.AqpDefopQueryNode; +import org.apache.lucene.queryparser.flexible.aqp.nodes.AqpNearQueryNode; +import org.apache.lucene.queryparser.flexible.aqp.nodes.AqpNotQueryNode; +import org.apache.lucene.queryparser.flexible.aqp.nodes.AqpOrQueryNode; +import org.apache.lucene.queryparser.flexible.aqp.parser.AqpStandardQueryConfigHandler; + +/** + * This processor operates on OPERATOR nodes, these are the nodes with labels + * AND, OR, NOT, DEFOP, NEAR and possibly others.
+ * I have decided to create special QueryNodes for each type of the operator, + * because the default implementation ({@link AndQueryNode}, {@link OrQueryNode} + * ) was confusing. + * + *
+ *   AND
+ *   	- creates {@link AqpAndQueryNode}
+ *   OR
+ *   	- creates {@link AqpOrQueryNode}
+ *   NOT
+ *   	- creates {@link AqpNotQueryNode}
+ *   NEAR
+ *   	- creates {@link AqpNearQueryNode}
+ *   WITH
+ *   	- not implemented yet
+ *   PARAGRAPH
+ *   	- not implemented yet
+ * 
+ * + * This processor should run after {@link AqpDEFOPProcessor}, and also after + * {@link AqpMODIFIERProcessor} because modifiers should have precedence over + * operators. Like in the query: "this OR +that" + * + */ +public class AqpOPERATORProcessor extends AqpQProcessorPost { + + @Override + public boolean nodeIsWanted(AqpANTLRNode node) { + if (node.getTokenName().equals("OPERATOR")) { + return true; + } + return false; + } + + @Override + public QueryNode createQNode(AqpANTLRNode node) throws QueryNodeException { + + String label = node.getTokenLabel(); + + if (label.equals("DEFOP")) { + return new AqpDefopQueryNode(node.getChildren(), getDefaultOperator()); + } else if (label.equals("AND")) { + return new AqpAndQueryNode(node.getChildren()); + } else if (label.equals("OR")) { + return new AqpOrQueryNode(node.getChildren()); + } else if (label.equals("NOT")) { + return new AqpNotQueryNode(node.getChildren()); + } else if (label.toUpperCase().contains("NEAR")) { + if (label.length() <= 4) { + return new AqpNearQueryNode(node.getChildren(), + getDefaultProximityValue()); + } else { + int distance = Integer.parseInt(label.substring(4)); + if (isProximityValueAllowed(distance)) { + return new AqpNearQueryNode(node.getChildren(), distance); + } else { + throw new QueryNodeException(new MessageImpl( + QueryParserMessages.INVALID_SYNTAX, + "Proximity is only allowed in a range: " + getRange()[0] + "-" + getRange()[1])); + } + } + + } else { + throw new QueryNodeException(new MessageImpl( + QueryParserMessages.INVALID_SYNTAX, "Unknown operator " + label)); + } + + } + + private Integer getDefaultProximityValue() throws QueryNodeException { + QueryConfigHandler queryConfig = getQueryConfigHandler(); + if (queryConfig == null + || !queryConfig + .has(AqpStandardQueryConfigHandler.ConfigurationKeys.DEFAULT_PROXIMITY)) { + throw new QueryNodeException(new MessageImpl( + QueryParserMessages.LUCENE_QUERY_CONVERSION_ERROR, + "Configuration error: " + "DefaultProximity value is missing")); + } + return queryConfig + .get(AqpStandardQueryConfigHandler.ConfigurationKeys.DEFAULT_PROXIMITY); + } + + private int[] getRange() { + QueryConfigHandler queryConfig = getQueryConfigHandler(); + return queryConfig + .get(AqpStandardQueryConfigHandler.ConfigurationKeys.ALLOWED_PROXIMITY_RANGE); + + } + + private boolean isProximityValueAllowed(int userValue) { + int[] range = getRange(); + if (userValue >= range[0] && userValue <= range[1]) { + return true; + } + return false; + } + + protected StandardQueryConfigHandler.Operator getDefaultOperator() + throws QueryNodeException { + QueryConfigHandler queryConfig = getQueryConfigHandler(); + + if (queryConfig != null) { + if (queryConfig + .has(StandardQueryConfigHandler.ConfigurationKeys.DEFAULT_OPERATOR)) { + return queryConfig + .get(StandardQueryConfigHandler.ConfigurationKeys.DEFAULT_OPERATOR); + } + } + throw new QueryNodeException(new MessageImpl( + QueryParserMessages.LUCENE_QUERY_CONVERSION_ERROR, + "Configuration error: " + + StandardQueryConfigHandler.ConfigurationKeys.class.toString() + + " is missing")); + } + +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpDEFOPProcessor.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpDEFOPProcessor.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpDEFOPProcessor.java (revision 0) @@ -0,0 +1,149 @@ +package org.apache.lucene.queryparser.flexible.aqp.processors; + +import java.util.List; + +import org.apache.lucene.queryparser.flexible.messages.MessageImpl; +import org.apache.lucene.queryparser.flexible.core.QueryNodeException; +import org.apache.lucene.queryparser.flexible.core.config.QueryConfigHandler; +import org.apache.lucene.queryparser.flexible.core.messages.QueryParserMessages; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; +import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessor; +import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessorImpl; +import org.apache.lucene.queryparser.flexible.standard.config.StandardQueryConfigHandler; +import org.apache.lucene.queryparser.flexible.aqp.AqpQueryParser; +import org.apache.lucene.queryparser.flexible.aqp.nodes.AqpANTLRNode; + +/** + * Finds the {@link AqpANTLRNode} with tokenLabel + * + *
+ * DEFOP
+ * 
+ * + * and sets their @{code tokenInput} to be the name of the default operator. + * + * If there is only one child, the child is returned and we remove the operator. + * This happens mainly for simple queries such as + * + *
+ * field:value
+ * 
+ * + * But also for queries which are itself clauses, like: + * + *
+ * +(this that)
+ * 
+ * + * which produces: + * + *
+ *            DEFOP
+ *              |
+ *           MODIFIER
+ *            /   \
+ *               TMODIFIER
+ *                  |
+ *                CLAUSE
+ *                  | 
+ *                DEFOP
+ *                /   \
+ *          MODIFIER MODIFIER   
+ *             |        |
+ * 
+ * + * + * @see DefaultOperatorAttribute + * @see AqpQueryParser#setDefaultOperator(org.apache.lucene.queryparser.flexible.standard.config.DefaultOperatorAttribute.Operator) + * + */ +public class AqpDEFOPProcessor extends QueryNodeProcessorImpl implements + QueryNodeProcessor { + + @Override + protected QueryNode preProcessNode(QueryNode node) throws QueryNodeException { + + if (node instanceof AqpANTLRNode + && ((AqpANTLRNode) node).getTokenLabel().equals("DEFOP")) { + + // only one child, we'll simplify the tree + if (node.getChildren().size() == 1) { + QueryNode child = node.getChildren().get(0); + while (child instanceof AqpANTLRNode + && ((AqpANTLRNode) child).getTokenLabel().equals("DEFOP")) { + child = child.getChildren().get(0); + } + return child; + } + + /* + * This used to be here to modify the AST tree, but is now handled by the + * AqpOptimizationProcessor + * + * AqpANTLRNode thisNode = (AqpANTLRNode) node; + * StandardQueryConfigHandler.Operator op = getDefaultOperator(); + * + * // Turn the DEFOP into the default operator + * thisNode.setTokenLabel(op.name()); + * + * List children = node.getChildren(); if (children != null && + * children.size() == 1) { AqpANTLRNode child = (AqpANTLRNode) + * children.get(0); if (child.getTokenName().equals("OPERATOR") || + * child.getTokenLabel().equals("CLAUSE") || + * child.getTokenLabel().equals("ATOM")) { return child; } } else if + * (children != null && children.size() > 1) { // several childeren + * (=clauses) below the operator // we check if we can put them together, + * ie // (this) AND (that) --> this AND that + * + * String thisOp = thisNode.getTokenLabel(); String last = ((AqpANTLRNode) + * children.get(0)).getTokenLabel(); boolean rewriteSafe = true; + * + * for (int i = 1; i < children.size(); i++) { AqpANTLRNode t = + * (AqpANTLRNode) children.get(i); String tt = t.getTokenLabel(); if + * (!(tt.equals(last) && t.getTokenLabel().equals(thisOp))) { rewriteSafe + * = false; break; } } + * + * if (rewriteSafe == true) { QueryNode firstChild = children.get(0); + * List childrenList = firstChild.getChildren(); + * + * for (int i = 1; i < children.size(); i++) { QueryNode otherChild = + * children.get(i); for (QueryNode nod : otherChild.getChildren()) { + * childrenList.add(nod); } } + * + * children.clear(); thisNode.set(childrenList); } } + */ + } + return node; + } + + @Override + protected QueryNode postProcessNode(QueryNode node) throws QueryNodeException { + return node; + } + + @Override + protected List setChildrenOrder(List children) + throws QueryNodeException { + return children; + } + + protected StandardQueryConfigHandler.Operator getDefaultOperator() + throws QueryNodeException { + QueryConfigHandler queryConfig = getQueryConfigHandler(); + + if (queryConfig != null) { + + if (queryConfig + .has(StandardQueryConfigHandler.ConfigurationKeys.DEFAULT_OPERATOR)) { + return queryConfig + .get(StandardQueryConfigHandler.ConfigurationKeys.DEFAULT_OPERATOR); + } + } + throw new QueryNodeException(new MessageImpl( + QueryParserMessages.LUCENE_QUERY_CONVERSION_ERROR, + "Configuration error: " + + StandardQueryConfigHandler.ConfigurationKeys.class.toString() + + " is missing")); + } + +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpMODIFIERProcessor.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpMODIFIERProcessor.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpMODIFIERProcessor.java (revision 0) @@ -0,0 +1,92 @@ +package org.apache.lucene.queryparser.flexible.aqp.processors; + +import org.apache.lucene.queryparser.flexible.messages.MessageImpl; +import org.apache.lucene.queryparser.flexible.core.QueryNodeException; +import org.apache.lucene.queryparser.flexible.core.messages.QueryParserMessages; +import org.apache.lucene.queryparser.flexible.core.nodes.ModifierQueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.ModifierQueryNode.Modifier; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; +import org.apache.lucene.queryparser.flexible.aqp.nodes.AqpANTLRNode; + +/** + * Creates a {@link ModifierQueryNode} from the MODIFIER node last child + * + * If MODIFIER node contains only one child, we return that child and do + * nothing.
+ * + * If BOOST node contains two children, we take the first and check its input, + * eg. + * + *
+ *               MODIFIER
+ *                  /  \
+ *                 +  rest
+ * 
+ * + * We create a new node ModifierQueryNode(rest, Modifier) and return that node.
+ * + * This processor should run before {@link AqpOPERATORProcessor} to ensure that + * local modifiers have precedence over the boolean operations. For example: + * + *
+ * title:(+a -b c)
+ * 
+ * + * Should produce (when OR is a default operator): + * + *
+ *  +title:a -title:b title:c
+ * 
+ * + * and when AND is the default operator: + * + *
+ *  +title:a -title:b +title:c
+ * 
+ * + * @see Modifier + * @see AqpBooleanQueryNode + */ +public class AqpMODIFIERProcessor extends AqpQProcessor { + + public boolean nodeIsWanted(AqpANTLRNode node) { + if (node.getTokenLabel().equals("MODIFIER")) { + return true; + } + return false; + } + + public AqpANTLRNode getModifierNode(QueryNode node) { + return ((AqpANTLRNode) node.getChildren().get(0)); + } + + public QueryNode getValueNode(QueryNode node) { + return node.getChildren().get(node.getChildren().size() - 1); + } + + public QueryNode createQNode(AqpANTLRNode node) throws QueryNodeException { + + if (node.getChildren().size() == 1) { + return node.getChildren().get(0); + } + + AqpANTLRNode modifierNode = getModifierNode(node); + String modifier = modifierNode.getTokenName(); + + QueryNode childNode = getValueNode(node); + + if (modifier.equals("PLUS")) { + return new ModifierQueryNode(childNode, + ModifierQueryNode.Modifier.MOD_REQ); + } else if (modifier.equals("MINUS")) { + return new ModifierQueryNode(childNode, + ModifierQueryNode.Modifier.MOD_NOT); + } else { + throw new QueryNodeException(new MessageImpl( + QueryParserMessages.LUCENE_QUERY_CONVERSION_ERROR, + "Unknown modifier: " + modifier + "\n" + node.toString())); + } + + } + +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpQProcessor.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpQProcessor.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpQProcessor.java (revision 0) @@ -0,0 +1,188 @@ +package org.apache.lucene.queryparser.flexible.aqp.processors; + +import java.util.List; + +import org.antlr.runtime.CharStream; +import org.apache.lucene.queryparser.flexible.messages.MessageImpl; +import org.apache.lucene.queryparser.flexible.standard.parser.ParseException; +import org.apache.lucene.queryparser.flexible.core.QueryNodeException; +import org.apache.lucene.queryparser.flexible.core.config.QueryConfigHandler; +import org.apache.lucene.queryparser.flexible.core.messages.QueryParserMessages; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; +import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessor; +import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessorImpl; +import org.apache.lucene.queryparser.flexible.aqp.config.AqpFeedback; +import org.apache.lucene.queryparser.flexible.aqp.nodes.AqpANTLRNode; +import org.apache.lucene.queryparser.flexible.aqp.parser.AqpStandardQueryConfigHandler; +import org.apache.lucene.queryparser.flexible.aqp.util.AqpCommonTree; + +/** + * A generic class that is used by other query processors, eg. + * {@link AqpQNORMALProcessor} + * + * + * + */ +public class AqpQProcessor extends QueryNodeProcessorImpl implements + QueryNodeProcessor { + + public String defaultField = null; + + @Override + protected QueryNode preProcessNode(QueryNode node) throws QueryNodeException { + if (node instanceof AqpANTLRNode) { + AqpANTLRNode n = (AqpANTLRNode) node; + if (nodeIsWanted(n)) { + return createQNode(n); + } + } + return node; + } + + @Override + protected QueryNode postProcessNode(QueryNode node) throws QueryNodeException { + return node; + } + + @Override + protected List setChildrenOrder(List children) + throws QueryNodeException { + return children; + } + + public boolean nodeIsWanted(AqpANTLRNode node) { + throw new UnsupportedOperationException(); + } + + public QueryNode createQNode(AqpANTLRNode node) throws QueryNodeException { + throw new UnsupportedOperationException(); + } + + public String getDefaultFieldName() throws QueryNodeException { + QueryConfigHandler queryConfig = getQueryConfigHandler(); + + String defaultField = null; + if (queryConfig != null) { + + if (queryConfig + .has(AqpStandardQueryConfigHandler.ConfigurationKeys.DEFAULT_FIELD)) { + defaultField = queryConfig + .get(AqpStandardQueryConfigHandler.ConfigurationKeys.DEFAULT_FIELD); + } + return defaultField; + } + throw new QueryNodeException(new MessageImpl( + QueryParserMessages.LUCENE_QUERY_CONVERSION_ERROR, + "Configuration error: DEFAULT_FIELD is missing")); + } + + public AqpFeedback getFeedbackAttr() throws QueryNodeException { + QueryConfigHandler config = getQueryConfigHandler(); + if (config.has(AqpStandardQueryConfigHandler.ConfigurationKeys.FEEDBACK)) { + return config + .get(AqpStandardQueryConfigHandler.ConfigurationKeys.FEEDBACK); + } else { + throw new QueryNodeException(new MessageImpl( + QueryParserMessages.NODE_ACTION_NOT_SUPPORTED, + "Configuration error, missing AqpFeedback.class in the config!")); + } + } + + public QueryNode getTerminalNode(QueryNode node) { + while (!node.isLeaf()) { + return getTerminalNode(node.getChildren().get( + node.getChildren().size() - 1)); + } + return node; + } + + public static OriginalInput getOriginalInput(AqpANTLRNode node) + throws ParseException { + + CharStream inputStream = getInputStream(node); + if (inputStream == null) { + throw new ParseException(new MessageImpl( + "The supplied tree doesn't have input stream")); + } + int[] startIndex = new int[] { inputStream.size() }; + getTheLowestIndex(startIndex, node); + if (startIndex[0] == -1) { + throw new ParseException(new MessageImpl( + "We cannot find where the input starts")); + } + int[] lastIndex = new int[] { startIndex[0] }; + getTheHighestIndex(lastIndex, node); + if (lastIndex[0] < startIndex[0]) { + throw new ParseException(new MessageImpl( + "We cannot find where the input ends")); + } + // if (lastIndex[0]+1 < inputStream.size()) { + // lastIndex[0] += 1; + // } + return new OriginalInput( + inputStream.substring(startIndex[0], lastIndex[0]), startIndex[0], + lastIndex[0]); + } + + public static CharStream getInputStream(QueryNode node) { + if (node.isLeaf()) { + if (node instanceof AqpANTLRNode) { + if (((AqpANTLRNode) node).getTree().getToken().getInputStream() != null) { + return ((AqpANTLRNode) node).getTree().getToken().getInputStream(); + } + } + } else { + for (QueryNode child : node.getChildren()) { + CharStream r = getInputStream(child); + if (r != null) { + return r; + } + } + } + return null; + } + + private static void getTheHighestIndex(int[] i, QueryNode node) { + if (!node.isLeaf()) { + for (QueryNode n : node.getChildren()) { + getTheHighestIndex(i, n); + } + } + if (node instanceof AqpANTLRNode) { + int si = ((AqpANTLRNode) node).getInputTokenEnd(); + if (i[0] < si) { + i[0] = si; + } + } + } + + private static void getTheLowestIndex(int[] i, QueryNode node) { + if (!node.isLeaf()) { + for (QueryNode n : node.getChildren()) { + getTheLowestIndex(i, n); + } + } + if (node instanceof AqpANTLRNode) { + int si = ((AqpANTLRNode) node).getInputTokenStart(); + if (si > -1 && si < i[0]) { + i[0] = si; + } + } + } + + public static class OriginalInput { + public String value; + public int start; + public int end; + + public OriginalInput(String value, int startIndex, int endIndex) { + this.value = value; + this.start = startIndex; + this.end = endIndex; + } + + public String toString() { + return String.format("%s [%d:%d]", this.value, this.start, this.end); + } + } +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpATOMProcessor.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpATOMProcessor.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpATOMProcessor.java (revision 0) @@ -0,0 +1,50 @@ +package org.apache.lucene.queryparser.flexible.aqp.processors; + +import java.util.List; + +import org.apache.lucene.queryparser.flexible.core.QueryNodeException; +import org.apache.lucene.queryparser.flexible.core.nodes.ModifierQueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; +import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessor; +import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessorImpl; +import org.apache.lucene.queryparser.flexible.aqp.nodes.AqpANTLRNode; + +public class AqpATOMProcessor extends QueryNodeProcessorImpl implements + QueryNodeProcessor { + + @Override + protected QueryNode preProcessNode(QueryNode node) throws QueryNodeException { + return node; + } + + @Override + protected QueryNode postProcessNode(QueryNode node) throws QueryNodeException { + + if (node instanceof AqpANTLRNode + && ((AqpANTLRNode) node).getTokenLabel().equals("ATOM")) { + + AqpANTLRNode atomNode = (AqpANTLRNode) node; + + List children = node.getChildren(); + QueryNode modifierNode = children.get(0); // MODIFIER + node = children.get(1); // NUCLEUS + + if (modifierNode.getChildren() != null) { + String modifier = ((AqpANTLRNode) modifierNode.getChildren().get(0)) + .getTokenName(); + node = new ModifierQueryNode(node, + modifier.equals("PLUS") ? ModifierQueryNode.Modifier.MOD_REQ + : ModifierQueryNode.Modifier.MOD_NOT); + } + return node; + } + return node; + } + + @Override + protected List setChildrenOrder(List children) + throws QueryNodeException { + return children; + } + +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpTreeRewriteProcessor.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpTreeRewriteProcessor.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpTreeRewriteProcessor.java (revision 0) @@ -0,0 +1,76 @@ +package org.apache.lucene.queryparser.flexible.aqp.processors; + +import java.util.List; + +import org.apache.lucene.queryparser.flexible.core.QueryNodeException; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; +import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessorImpl; +import org.apache.lucene.queryparser.flexible.aqp.nodes.AqpANTLRNode; + +/* + * Processor which massages the AST tree before other processors work + * with it. + * - replaces chain of OPERATORs with the lowest ie. (AND (AND (AND..))) + * becomes (AND ...); this happens only if the OPERATOR has one + * single child of type: OPERATOR, ATOM, CLAUSE + * + * Useful mostly for the DEFOP operator as our ANTLR grammars + * usually group same clauses under one operator + * + * - + */ +public class AqpTreeRewriteProcessor extends QueryNodeProcessorImpl { + + @Override + protected QueryNode preProcessNode(QueryNode node) throws QueryNodeException { + + if (node instanceof AqpANTLRNode && node.getChildren() != null) { + List children = node.getChildren(); + AqpANTLRNode n = (AqpANTLRNode) node; + AqpANTLRNode child; + + // turn (AND (AND (CLAUSE...))) into (AND (CLAUSE...)) + // also (AND (ATOM ....)) into (ATOM...) + if (n.getTokenName().equals("OPERATOR") && children.size() == 1) { + child = (AqpANTLRNode) children.get(0); + if (child.getTokenName().equals("OPERATOR") + || child.getTokenName().equals("ATOM") + || child.getTokenName().equals("CLAUSE")) { + return child; + } + } + + /* + * if (node.getParent()==null && children.size() > 1) { // it is a root + * mode String last = ((AqpANTLRNode)children.get(0)).getTokenLabel(); + * boolean rewriteSafe = true; // check all children nodes are of the same + * type for (int i=1;i childrenList = + * firstChild.getChildren(); + * + * for (int i=1;i setChildrenOrder(List children) + throws QueryNodeException { + return children; + } + +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpQANYTHINGProcessor.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpQANYTHINGProcessor.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpQANYTHINGProcessor.java (revision 0) @@ -0,0 +1,26 @@ +package org.apache.lucene.queryparser.flexible.aqp.processors; + +import org.apache.lucene.queryparser.flexible.core.QueryNodeException; +import org.apache.lucene.queryparser.flexible.core.nodes.MatchAllDocsQueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; +import org.apache.lucene.queryparser.flexible.aqp.nodes.AqpANTLRNode; + +/** + * Converts QANYTHING node into @{link {@link MatchAllDocsQueryNode}. The field + * value is the @{link DefaultFieldAttribute} specified in the configuration. + * + */ +public class AqpQANYTHINGProcessor extends AqpQProcessor { + + public boolean nodeIsWanted(AqpANTLRNode node) { + if (node.getTokenLabel().equals("QANYTHING")) { + return true; + } + return false; + } + + public QueryNode createQNode(AqpANTLRNode node) throws QueryNodeException { + return new MatchAllDocsQueryNode(); + } + +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpQPHRASEProcessor.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpQPHRASEProcessor.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpQPHRASEProcessor.java (revision 0) @@ -0,0 +1,46 @@ +package org.apache.lucene.queryparser.flexible.aqp.processors; + +import org.apache.lucene.queryparser.flexible.core.QueryNodeException; +import org.apache.lucene.queryparser.flexible.core.config.QueryConfigHandler; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.QuotedFieldQueryNode; +import org.apache.lucene.queryparser.flexible.standard.parser.EscapeQuerySyntaxImpl; +import org.apache.lucene.queryparser.flexible.aqp.nodes.AqpANTLRNode; + +/** + * Converts QPHRASE node into @{link {@link QuotedFieldQueryNode}. The field + * value is the @{link DefaultFieldAttribute} specified in the configuration. + * + *
+ * + * If the user specified a field, it will be set by the @{link + * AqpFIELDProcessor} Therefore the {@link AqpQPHRASEProcessor} should run + * before it. + * + * + * @see QueryConfigHandler + * @see DefaultFieldAttribute + * + */ +public class AqpQPHRASEProcessor extends AqpQProcessor { + + public boolean nodeIsWanted(AqpANTLRNode node) { + if (node.getTokenLabel().equals("QPHRASE")) { + return true; + } + return false; + } + + public QueryNode createQNode(AqpANTLRNode node) throws QueryNodeException { + String field = getDefaultFieldName(); + + AqpANTLRNode subChild = (AqpANTLRNode) node.getChildren().get(0); + + return new QuotedFieldQueryNode(field, + EscapeQuerySyntaxImpl.discardEscapeChar(subChild.getTokenInput() + .substring(1, subChild.getTokenInput().length() - 1)), + subChild.getTokenStart() + 1, subChild.getTokenEnd() - 1); + + } + +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpQRANGEINProcessor.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpQRANGEINProcessor.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpQRANGEINProcessor.java (revision 0) @@ -0,0 +1,101 @@ +package org.apache.lucene.queryparser.flexible.aqp.processors; + +import org.apache.lucene.queryparser.flexible.core.QueryNodeException; +import org.apache.lucene.queryparser.flexible.core.config.QueryConfigHandler; +import org.apache.lucene.queryparser.flexible.core.nodes.FieldQueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; +import org.apache.lucene.queryparser.flexible.standard.nodes.TermRangeQueryNode; +import org.apache.lucene.queryparser.flexible.standard.parser.EscapeQuerySyntaxImpl; +import org.apache.lucene.queryparser.flexible.aqp.nodes.AqpANTLRNode; + +/** + * Converts QRANGEIN node into @{link {@link ParametricQueryNode}. The field + * value is the @{link DefaultFieldAttribute} specified in the configuration. + * + * Because QRANGE nodes have this shape: + * + *
+ *                      QRANGE
+ *                      /    \
+ *                 QNORMAL  QPHRASE
+ *                   /          \
+ *                 some       "phrase"
+ * 
+ * + * It is important to queue {@AqpQRANGEEProcessor} and + * {@AqpQRANGEINProcessor} before processors that + * transform QNORMAL, QPHRASE and other Q nodes
+ * + * If the user specified a field, it will be set by the @{link + * AqpFIELDProcessor} Therefore this processor should queue before @{link + * AqpFIELDProcessor}. + * + * + * @see QueryConfigHandler + * @see DefaultFieldAttribute + * @see AqpQRANGEEXProcessor + * @see AqpQueryNodeProcessorPipeline + * + */ +public class AqpQRANGEINProcessor extends AqpQProcessor { + + protected boolean lowerInclusive = true; + protected boolean upperInclusive = true; + + public boolean nodeIsWanted(AqpANTLRNode node) { + if (node.getTokenLabel().equals("QRANGEIN")) { + return true; + } + return false; + } + + public QueryNode createQNode(AqpANTLRNode node) throws QueryNodeException { + String field = getDefaultFieldName(); + + AqpANTLRNode lowerNode = (AqpANTLRNode) node.getChildren().get(0); + AqpANTLRNode upperNode = (AqpANTLRNode) node.getChildren().get(1); + + NodeData lower = getTokenInput(lowerNode); + NodeData upper = getTokenInput(upperNode); + + FieldQueryNode lowerBound = new FieldQueryNode(field, + EscapeQuerySyntaxImpl.discardEscapeChar(lower.value), lower.start, + lower.end); + FieldQueryNode upperBound = new FieldQueryNode(field, + EscapeQuerySyntaxImpl.discardEscapeChar(upper.value), upper.start, + upper.end); + + return new TermRangeQueryNode(lowerBound, upperBound, lowerInclusive, + upperInclusive); + + } + + public NodeData getTokenInput(AqpANTLRNode node) { + String label = node.getTokenLabel(); + AqpANTLRNode subNode = (AqpANTLRNode) node.getChildren().get(0); + + if (label.equals("QANYTHING")) { + return new NodeData("*", subNode.getTokenStart(), subNode.getTokenEnd()); + } else if (label.contains("PHRASE")) { + return new NodeData(subNode.getTokenInput().substring(1, + subNode.getTokenInput().length() - 1), subNode.getTokenStart() + 1, + subNode.getTokenEnd() - 1); + } else { + return new NodeData(subNode.getTokenInput(), subNode.getTokenStart(), + subNode.getTokenEnd()); + } + } + + class NodeData { + public String value; + public int start; + public int end; + + NodeData(String value, int start, int end) { + this.value = value; + this.start = start; + this.end = end; + } + } + +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpFuzzyModifierProcessor.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpFuzzyModifierProcessor.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpFuzzyModifierProcessor.java (revision 0) @@ -0,0 +1,108 @@ +package org.apache.lucene.queryparser.flexible.aqp.processors; + +import java.util.List; + +import org.apache.lucene.queryparser.flexible.messages.MessageImpl; +import org.apache.lucene.queryparser.flexible.core.QueryNodeException; +import org.apache.lucene.queryparser.flexible.core.config.QueryConfigHandler; +import org.apache.lucene.queryparser.flexible.core.messages.QueryParserMessages; +import org.apache.lucene.queryparser.flexible.core.nodes.FieldQueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.FuzzyQueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.QuotedFieldQueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.SlopQueryNode; +import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessor; +import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessorImpl; +import org.apache.lucene.queryparser.flexible.standard.nodes.WildcardQueryNode; +import org.apache.lucene.queryparser.flexible.aqp.builders.AqpSlowFuzzyQueryNodeBuilder; +import org.apache.lucene.queryparser.flexible.aqp.config.AqpFeedback; +import org.apache.lucene.queryparser.flexible.aqp.nodes.AqpFuzzyModifierNode; +import org.apache.lucene.queryparser.flexible.aqp.nodes.SlowFuzzyQueryNode; +import org.apache.lucene.queryparser.flexible.aqp.parser.AqpStandardQueryConfigHandler; + +/** + * Rewrites the query node which is below the {@link AqpFuzzyModifierNode} + * + * The actions are:
+ * + * + * {@link FieldQueryNode}
+ * - query is turned into {@link FuzzyQueryNode} or {@link SlowFuzzyQueryNode}
+ * depending the configuration of {@link AqpStandardQueryConfigHandler}.ConfigurationKeys.ALLOW_SLOW_FUZZY + * - invalid syntax is raised if not 0.0 > fuzzy < 1.0
+ * + * {@link WildcardQueryNode}, {@link QuotedFieldQueryNode}
+ * - becomes {@link SlopQueryNode}
+ * + * + * {@link QuotedFieldQueryNode}
+ * - wrapped with {@link SlopQueryNode}
+ * + * + */ +public class AqpFuzzyModifierProcessor extends QueryNodeProcessorImpl implements + QueryNodeProcessor { + + @Override + protected QueryNode preProcessNode(QueryNode node) throws QueryNodeException { + if (node instanceof AqpFuzzyModifierNode) { + QueryNode child = ((AqpFuzzyModifierNode) node).getChild(); + Float fuzzy = ((AqpFuzzyModifierNode) node).getFuzzyValue(); + + QueryConfigHandler config = getQueryConfigHandler(); + + if (child instanceof QuotedFieldQueryNode + || child instanceof WildcardQueryNode) { + + if (fuzzy.intValue() < fuzzy) { + + if (config + .has(AqpStandardQueryConfigHandler.ConfigurationKeys.FEEDBACK)) { + AqpFeedback feedback = config + .get(AqpStandardQueryConfigHandler.ConfigurationKeys.FEEDBACK); + feedback.sendEvent(feedback.createEvent(AqpFeedback.TYPE.WARN, + this.getClass(), node, + "For phrases and wildcard queries the float attribute " + fuzzy + + " is automatically converted to: " + fuzzy.intValue())); + } + } + return new SlopQueryNode(child, fuzzy.intValue()); + } else if (child instanceof FieldQueryNode) { + + FieldQueryNode fn = (FieldQueryNode) child; + + if (config + .has(AqpStandardQueryConfigHandler.ConfigurationKeys.ALLOW_SLOW_FUZZY) != false + && config + .get(AqpStandardQueryConfigHandler.ConfigurationKeys.ALLOW_SLOW_FUZZY) == true) { + if (fuzzy > 0.0f && fuzzy <= 1.0f) { + return new SlowFuzzyQueryNode(fn.getFieldAsString(), + fn.getTextAsString(), fuzzy, fn.getBegin(), fn.getEnd()); + } + } + + return new FuzzyQueryNode(fn.getFieldAsString(), fn.getTextAsString(), + fuzzy, fn.getBegin(), fn.getEnd()); + + } else { + throw new QueryNodeException(new MessageImpl( + QueryParserMessages.INVALID_SYNTAX, node.toString() + + "\nUse of ~ is not allowed here")); + } + + } + return node; + } + + @Override + protected QueryNode postProcessNode(QueryNode node) throws QueryNodeException { + return node; + } + + @Override + protected List setChildrenOrder(List children) + throws QueryNodeException { + return children; + } + +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpQProcessorPost.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpQProcessorPost.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpQProcessorPost.java (revision 0) @@ -0,0 +1,42 @@ +package org.apache.lucene.queryparser.flexible.aqp.processors; + +import org.apache.lucene.queryparser.flexible.core.QueryNodeException; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; +import org.apache.lucene.queryparser.flexible.aqp.nodes.AqpANTLRNode; + +/** + * The class that is used for ANTLR query nodes, it is the same as + * {@link AqpQPHRASEProcessor}, the only difference is that it works in + * postProcessor phase (instead of pre-process phase) + * + * + */ + +public class AqpQProcessorPost extends AqpQProcessor { + + @Override + protected QueryNode postProcessNode(QueryNode node) throws QueryNodeException { + if (node instanceof AqpANTLRNode) { + AqpANTLRNode n = (AqpANTLRNode) node; + if (nodeIsWanted(n)) { + return createQNode(n); + } + } + return node; + } + + @Override + protected QueryNode preProcessNode(QueryNode node) throws QueryNodeException { + return node; + } + + @Override + public boolean nodeIsWanted(AqpANTLRNode node) { + throw new UnsupportedOperationException(); + } + + @Override + public QueryNode createQNode(AqpANTLRNode node) throws QueryNodeException { + throw new UnsupportedOperationException(); + } +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpQNORMALProcessor.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpQNORMALProcessor.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpQNORMALProcessor.java (revision 0) @@ -0,0 +1,46 @@ +package org.apache.lucene.queryparser.flexible.aqp.processors; + +import java.util.List; + +import org.apache.lucene.queryparser.flexible.core.QueryNodeException; +import org.apache.lucene.queryparser.flexible.core.config.QueryConfigHandler; +import org.apache.lucene.queryparser.flexible.core.nodes.FieldQueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; +import org.apache.lucene.queryparser.flexible.standard.parser.EscapeQuerySyntaxImpl; +import org.apache.lucene.queryparser.flexible.aqp.nodes.AqpANTLRNode; + +/** + * Converts QNORMAL node into @{link {@link FieldQueryNode}. The field value is + * the @{link DefaultFieldAttribute} specified in the configuration. + * + *
+ * + * If the user specified a field, it will be set by the @{link + * AqpFIELDProcessor} Therefore the {@link AqpQNORMALProcessor} should run + * before it. + * + * + * @see QueryConfigHandler + * @see DefaultFieldAttribute + * + */ +public class AqpQNORMALProcessor extends AqpQProcessor { + + public boolean nodeIsWanted(AqpANTLRNode node) { + if (node.getTokenLabel().equals("QNORMAL")) { + return true; + } + return false; + } + + public QueryNode createQNode(AqpANTLRNode node) throws QueryNodeException { + String field = getDefaultFieldName(); + + AqpANTLRNode subChild = (AqpANTLRNode) node.getChildren().get(0); + + return new FieldQueryNode(field, + EscapeQuerySyntaxImpl.discardEscapeChar(subChild.getTokenInput()), + subChild.getTokenStart(), subChild.getTokenEnd()); + } + +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpQTRUNCATEDProcessor.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpQTRUNCATEDProcessor.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpQTRUNCATEDProcessor.java (revision 0) @@ -0,0 +1,55 @@ +package org.apache.lucene.queryparser.flexible.aqp.processors; + +import org.apache.lucene.queryparser.flexible.messages.MessageImpl; +import org.apache.lucene.queryparser.flexible.core.QueryNodeException; +import org.apache.lucene.queryparser.flexible.core.config.QueryConfigHandler; +import org.apache.lucene.queryparser.flexible.core.messages.QueryParserMessages; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; +import org.apache.lucene.queryparser.flexible.standard.nodes.WildcardQueryNode; +import org.apache.lucene.queryparser.flexible.standard.parser.EscapeQuerySyntaxImpl; +import org.apache.lucene.queryparser.flexible.aqp.nodes.AqpANTLRNode; + +/** + * Converts QTRUNCATED node into @{link {@link WildcardQueryNode}. The field + * value used is the @{link DefaultFieldAttribute} specified in the + * configuration. + * + *
+ * + * If the user specified a field, it will be set by the @{link + * AqpFIELDProcessor} Therefore the {@link AqpQTRUNCATEDProcessor} should run + * before it. + * + * + * @see QueryConfigHandler + * @see DefaultFieldAttribute + * + */ +public class AqpQTRUNCATEDProcessor extends AqpQProcessor { + + public boolean nodeIsWanted(AqpANTLRNode node) { + if (node.getTokenLabel().equals("QTRUNCATED")) { + return true; + } + return false; + } + + public QueryNode createQNode(AqpANTLRNode node) throws QueryNodeException { + String field = getDefaultFieldName(); + + AqpANTLRNode subChild = (AqpANTLRNode) node.getChildren().get(0); + String input = subChild.getTokenInput(); + + if (input.contains("*?") || input.contains("?*")) { + throw new QueryNodeException(new MessageImpl( + QueryParserMessages.INVALID_SYNTAX, + "It is not allowed to put * next to ?" + input)); + } + + return new WildcardQueryNode(field, + EscapeQuerySyntaxImpl.discardEscapeChar(input), + subChild.getTokenStart(), subChild.getTokenEnd()); + + } + +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpQRANGEEXProcessor.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpQRANGEEXProcessor.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpQRANGEEXProcessor.java (revision 0) @@ -0,0 +1,49 @@ +package org.apache.lucene.queryparser.flexible.aqp.processors; + +import org.apache.lucene.queryparser.flexible.core.config.QueryConfigHandler; +import org.apache.lucene.queryparser.flexible.aqp.nodes.AqpANTLRNode; + +/** + * Converts QRANGEEX node into @{link {@link ParametricQueryNode}. The field + * value is the @{link DefaultFieldAttribute} specified in the configuration. + * + * Because QRANGE nodes have this shape: + * + *
+ *                      QRANGE
+ *                      /    \
+ *                 QNORMAL  QPHRASE
+ *                   /          \
+ *                 some       "phrase"
+ * 
+ * + * It is important to queue {@AqpQRANGEEProcessor} and + * {@AqpQRANGEINProcessor} before processors that + * transform QNORMAL, QPHRASE and other Q nodes
+ * + * If the user specified a field, it will be set by the @{link + * AqpFIELDProcessor} Therefore this processor should queue before @{link + * AqpFIELDProcessor}. + * + * + * @see QueryConfigHandler + * @see DefaultFieldAttribute + * @see AqpQRANGEINProcessor + * @see AqpQueryNodeProcessorPipeline + * + */ +public class AqpQRANGEEXProcessor extends AqpQRANGEINProcessor { + + public AqpQRANGEEXProcessor() { + lowerInclusive = false; + upperInclusive = false; + } + + public boolean nodeIsWanted(AqpANTLRNode node) { + if (node.getTokenLabel().equals("QRANGEEX")) { + return true; + } + return false; + } + +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpCLAUSEProcessor.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpCLAUSEProcessor.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpCLAUSEProcessor.java (revision 0) @@ -0,0 +1,43 @@ +package org.apache.lucene.queryparser.flexible.aqp.processors; + +import java.util.List; + +import org.apache.lucene.queryparser.flexible.core.QueryNodeException; +import org.apache.lucene.queryparser.flexible.core.nodes.GroupQueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; +import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessor; +import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessorImpl; +import org.apache.lucene.queryparser.flexible.aqp.nodes.AqpANTLRNode; + +/** + * Converts CLAUSE node into @{link {@link GroupQueryNode} + * + * + */ +public class AqpCLAUSEProcessor extends QueryNodeProcessorImpl implements + QueryNodeProcessor { + + @Override + protected QueryNode preProcessNode(QueryNode node) throws QueryNodeException { + + if (node instanceof AqpANTLRNode + && ((AqpANTLRNode) node).getTokenLabel().equals("CLAUSE")) { + + return new GroupQueryNode(node.getChildren().get(0)); + // return new BooleanQueryNode(node.getChildren()); + } + return node; + } + + @Override + protected QueryNode postProcessNode(QueryNode node) throws QueryNodeException { + return node; + } + + @Override + protected List setChildrenOrder(List children) + throws QueryNodeException { + return children; + } + +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpNUCLEUSProcessor.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpNUCLEUSProcessor.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpNUCLEUSProcessor.java (revision 0) @@ -0,0 +1,56 @@ +package org.apache.lucene.queryparser.flexible.aqp.processors; + +import java.util.List; + +import org.apache.lucene.queryparser.flexible.core.QueryNodeException; +import org.apache.lucene.queryparser.flexible.core.nodes.FieldQueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; +import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessor; +import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessorImpl; +import org.apache.lucene.queryparser.flexible.standard.parser.EscapeQuerySyntaxImpl; +import org.apache.lucene.queryparser.flexible.aqp.nodes.AqpANTLRNode; +import org.apache.lucene.queryparser.flexible.aqp.util.AqpUtils; + +public class AqpNUCLEUSProcessor extends QueryNodeProcessorImpl implements + QueryNodeProcessor { + + @Override + protected QueryNode postProcessNode(QueryNode node) throws QueryNodeException { + if (node instanceof AqpANTLRNode + && ((AqpANTLRNode) node).getTokenLabel().equals("NUCLEUS")) { + List children = node.getChildren(); + AqpANTLRNode fieldNode = (AqpANTLRNode) children.remove(0); + String field = getFieldValue(fieldNode); + QueryNode valueNode = children.get(0); + if (field != null) { + AqpUtils.applyFieldToAllChildren(EscapeQuerySyntaxImpl.discardEscapeChar(field) + .toString(), valueNode); + } + return valueNode; + } + return node; + } + + @Override + protected QueryNode preProcessNode(QueryNode node) throws QueryNodeException { + return node; + } + + @Override + protected List setChildrenOrder(List children) + throws QueryNodeException { + return children; + } + + private String getFieldValue(AqpANTLRNode fieldNode) + throws QueryNodeException { + + if (fieldNode != null && fieldNode.getChildren() != null) { + return ((AqpANTLRNode) fieldNode.getChildren().get(0)).getTokenInput(); + } + return null; + + } + + +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpQPHRASETRUNCProcessor.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpQPHRASETRUNCProcessor.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpQPHRASETRUNCProcessor.java (revision 0) @@ -0,0 +1,47 @@ +package org.apache.lucene.queryparser.flexible.aqp.processors; + +import org.apache.lucene.queryparser.flexible.core.QueryNodeException; +import org.apache.lucene.queryparser.flexible.core.config.QueryConfigHandler; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; +import org.apache.lucene.queryparser.flexible.standard.nodes.WildcardQueryNode; +import org.apache.lucene.queryparser.flexible.standard.parser.EscapeQuerySyntaxImpl; +import org.apache.lucene.queryparser.flexible.aqp.nodes.AqpANTLRNode; + +/** + * Converts QPHRASETRUNC node into @{link {@link WildcardQueryNode}. The field + * value used is the @{link DefaultFieldAttribute} specified in the + * configuration. + * + *
+ * + * If the user specified a field, it will be set by the @{link + * AqpFIELDProcessor} Therefore the {@link AqpQTRUNCATEDProcessor} should run + * before it. + * + * + * @see QueryConfigHandler + * @see DefaultFieldAttribute + * + */ +public class AqpQPHRASETRUNCProcessor extends AqpQProcessor { + + public boolean nodeIsWanted(AqpANTLRNode node) { + if (node.getTokenLabel().equals("QPHRASETRUNC")) { + return true; + } + return false; + } + + public QueryNode createQNode(AqpANTLRNode node) throws QueryNodeException { + String field = getDefaultFieldName(); + + AqpANTLRNode subChild = (AqpANTLRNode) node.getChildren().get(0); + + return new WildcardQueryNode(field, + EscapeQuerySyntaxImpl.discardEscapeChar(subChild.getTokenInput() + .substring(1, subChild.getTokenInput().length() - 1)), + subChild.getTokenStart() + 1, subChild.getTokenEnd() - 1); + + } + +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpAnalyzerQueryNodeProcessor.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpAnalyzerQueryNodeProcessor.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpAnalyzerQueryNodeProcessor.java (revision 0) @@ -0,0 +1,402 @@ +package org.apache.lucene.queryparser.flexible.aqp.processors; + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import java.io.IOException; +import java.io.StringReader; +import java.util.ArrayList; +import java.util.LinkedList; +import java.util.List; + +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.analysis.CachingTokenFilter; +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; +import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; +import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; +import org.apache.lucene.analysis.tokenattributes.TypeAttribute; +import org.apache.lucene.queryparser.flexible.core.QueryNodeException; +import org.apache.lucene.queryparser.flexible.core.config.QueryConfigHandler; +import org.apache.lucene.queryparser.flexible.core.nodes.FieldQueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.FuzzyQueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.GroupQueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.NoTokenFoundQueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.QuotedFieldQueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.RangeQueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.TextableQueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.TokenizedPhraseQueryNode; +import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessorImpl; +import org.apache.lucene.queryparser.flexible.standard.config.StandardQueryConfigHandler.ConfigurationKeys; +import org.apache.lucene.queryparser.flexible.standard.nodes.MultiPhraseQueryNode; +import org.apache.lucene.queryparser.flexible.standard.nodes.RegexpQueryNode; +import org.apache.lucene.queryparser.flexible.standard.nodes.StandardBooleanQueryNode; +import org.apache.lucene.queryparser.flexible.standard.nodes.WildcardQueryNode; +import org.apache.lucene.queryparser.flexible.standard.processors.AnalyzerQueryNodeProcessor; + +/** + * This is an improved version of the {@link AnalyzerQueryNodeProcessor} it is + * better because it keeps track of the position offset which is absolutely + * indispensable for proper parsing of expanded queries. And also we save the + * type attribute name with the node + * + * TODO: send a patch and make them accept it + * + * This processor verifies if {@link ConfigurationKeys#ANALYZER} is defined in + * the {@link QueryConfigHandler}. If it is and the analyzer is not + * null, it looks for every {@link FieldQueryNode} that is not + * {@link WildcardQueryNode}, {@link FuzzyQueryNode} or {@link RangeQueryNode} + * contained in the query node tree, then it applies the analyzer to that + * {@link FieldQueryNode} object.
+ *
+ * If the analyzer return only one term, the returned term is set to the + * {@link FieldQueryNode} and it's returned.
+ *
+ * If the analyzer return more than one term, a {@link TokenizedPhraseQueryNode} + * or {@link MultiPhraseQueryNode} is created, whether there is one or more + * terms at the same position, and it's returned.
+ *
+ * If no term is returned by the analyzer a {@link NoTokenFoundQueryNode} object + * is returned.
+ * + * @see ConfigurationKeys#ANALYZER + * @see Analyzer + * @see TokenStream + */ + +public class AqpAnalyzerQueryNodeProcessor extends QueryNodeProcessorImpl { + + public String TYPE_ATTRIBUTE = "token_type_attribute"; + private Analyzer analyzer; + + private boolean positionIncrementsEnabled; + + public AqpAnalyzerQueryNodeProcessor() { + // empty constructor + } + + @Override + public QueryNode process(QueryNode queryTree) throws QueryNodeException { + Analyzer analyzer = getQueryConfigHandler().get(ConfigurationKeys.ANALYZER); + + if (analyzer != null) { + this.analyzer = analyzer; + this.positionIncrementsEnabled = false; + Boolean positionIncrementsEnabled = getQueryConfigHandler().get( + ConfigurationKeys.ENABLE_POSITION_INCREMENTS); + + if (positionIncrementsEnabled != null) { + this.positionIncrementsEnabled = positionIncrementsEnabled; + } + + if (this.analyzer != null) { + return super.process(queryTree); + } + + } + + return queryTree; + + } + + @Override + protected QueryNode postProcessNode(QueryNode node) throws QueryNodeException { + + if (node instanceof TextableQueryNode + && !(node instanceof WildcardQueryNode) + && !(node instanceof FuzzyQueryNode) + && !(node instanceof RegexpQueryNode) + && !(node.getParent() instanceof RangeQueryNode)) { + + FieldQueryNode fieldNode = ((FieldQueryNode) node); + int queryStart = Math.max(fieldNode.getBegin(), 0); // could be -1 + String text = fieldNode.getTextAsString(); + String field = fieldNode.getFieldAsString(); + + TokenStream source; + try { + source = this.analyzer.tokenStream(field, new StringReader(text)); + source.reset(); + } catch (IOException e1) { + throw new RuntimeException(e1); + } + CachingTokenFilter buffer = new CachingTokenFilter(source); + + PositionIncrementAttribute posIncrAtt = null; + int numTokens = 0; + int positionCount = 0; + boolean severalTokensAtSamePosition = false; + + if (buffer.hasAttribute(PositionIncrementAttribute.class)) { + posIncrAtt = buffer.getAttribute(PositionIncrementAttribute.class); + } + + TypeAttribute typeAtt = null; + if (buffer.hasAttribute(TypeAttribute.class)) { + typeAtt = buffer.getAttribute(TypeAttribute.class); + } + + try { + + while (buffer.incrementToken()) { + numTokens++; + int positionIncrement = (posIncrAtt != null) ? posIncrAtt + .getPositionIncrement() : 1; + if (positionIncrement != 0) { + positionCount += positionIncrement; + + } else { + severalTokensAtSamePosition = true; + } + + } + + } catch (IOException e) { + // ignore + } + + try { + // rewind the buffer stream + buffer.reset(); + + // close original stream - all tokens buffered + source.close(); + } catch (IOException e) { + // ignore + } + + if (!buffer.hasAttribute(CharTermAttribute.class)) { + return new NoTokenFoundQueryNode(); + } + + CharTermAttribute termAtt = buffer.getAttribute(CharTermAttribute.class); + + int offsetStart = -1; + int offsetEnd = -1; + OffsetAttribute offsetAtt; + if (buffer.hasAttribute(OffsetAttribute.class)) { + offsetAtt = buffer.getAttribute(OffsetAttribute.class); + } else { + offsetAtt = null; + } + + if (numTokens == 0) { + return new NoTokenFoundQueryNode(); + + } else if (numTokens == 1) { + String term = null; + try { + boolean hasNext; + hasNext = buffer.incrementToken(); + assert hasNext == true; + term = termAtt.toString(); + + } catch (IOException e) { + // safe to ignore, because we know the number of tokens + } + + fieldNode.setText(term); + if (offsetAtt != null) { + fieldNode.setBegin(queryStart + offsetAtt.startOffset()); + fieldNode.setEnd(queryStart + offsetAtt.endOffset()); + } + if (typeAtt != null) + fieldNode.setTag(TYPE_ATTRIBUTE, typeAtt.type()); + return fieldNode; + + } else if (severalTokensAtSamePosition + || !(node instanceof QuotedFieldQueryNode)) { + if (positionCount == 1 || !(node instanceof QuotedFieldQueryNode)) { + // no phrase query: + LinkedList children = new LinkedList(); + + for (int i = 0; i < numTokens; i++) { + String term = null; + offsetStart = offsetEnd = -1; + try { + boolean hasNext = buffer.incrementToken(); + assert hasNext == true; + term = termAtt.toString(); + if (offsetAtt != null) { + offsetStart = queryStart + offsetAtt.startOffset(); + offsetEnd = queryStart + offsetAtt.endOffset(); + } + + } catch (IOException e) { + // safe to ignore, because we know the number of tokens + } + + FieldQueryNode fq = new FieldQueryNode(field, term, offsetStart, + offsetEnd); + if (typeAtt != null) + fq.setTag(TYPE_ATTRIBUTE, typeAtt.type()); + children.add(fq); + + } + return new GroupQueryNode(new StandardBooleanQueryNode(children, + positionCount == 1)); + } else { + // phrase query: + MultiPhraseQueryNode mpq = new MultiPhraseQueryNode(); + + List multiTerms = new ArrayList(); + int position = -1; + int i = 0; + int termGroupCount = 0; + + for (; i < numTokens; i++) { + String term = null; + offsetStart = offsetEnd = -1; + int positionIncrement = 1; + String tokenType = null; + try { + boolean hasNext = buffer.incrementToken(); + assert hasNext == true; + term = termAtt.toString(); + if (posIncrAtt != null) { + positionIncrement = posIncrAtt.getPositionIncrement(); + } + if (offsetAtt != null) { + offsetStart = queryStart + offsetAtt.startOffset(); + offsetEnd = queryStart + offsetAtt.endOffset(); + } + if (typeAtt != null) + tokenType = typeAtt.type(); + } catch (IOException e) { + // safe to ignore, because we know the number of tokens + } + + if (positionIncrement > 0 && multiTerms.size() > 0) { + + for (FieldQueryNode termNode : multiTerms) { + + if (this.positionIncrementsEnabled) { + termNode.setPositionIncrement(position); + } else { + termNode.setPositionIncrement(termGroupCount); + } + + mpq.add(termNode); + + } + + // Only increment once for each "group" of + // terms that were in the same position: + termGroupCount++; + + multiTerms.clear(); + + } + + position += positionIncrement; + FieldQueryNode fq = new FieldQueryNode(field, term, offsetStart, + offsetEnd); + fq.setTag(TYPE_ATTRIBUTE, tokenType); + multiTerms.add(fq); + + } + + for (FieldQueryNode termNode : multiTerms) { + + if (this.positionIncrementsEnabled) { + termNode.setPositionIncrement(position); + + } else { + termNode.setPositionIncrement(termGroupCount); + } + + mpq.add(termNode); + + } + + return mpq; + + } + + } else { + + TokenizedPhraseQueryNode pq = new TokenizedPhraseQueryNode(); + + int position = -1; + + for (int i = 0; i < numTokens; i++) { + String term = null; + int positionIncrement = 1; + offsetStart = offsetEnd = -1; + + try { + boolean hasNext = buffer.incrementToken(); + assert hasNext == true; + term = termAtt.toString(); + + if (posIncrAtt != null) { + positionIncrement = posIncrAtt.getPositionIncrement(); + } + + if (offsetAtt != null) { + offsetStart = queryStart + offsetAtt.startOffset(); + offsetEnd = queryStart + offsetAtt.endOffset(); + } + + } catch (IOException e) { + // safe to ignore, because we know the number of tokens + } + + FieldQueryNode newFieldNode = new FieldQueryNode(field, term, + offsetStart, offsetEnd); + if (typeAtt != null) + newFieldNode.setTag(TYPE_ATTRIBUTE, typeAtt.type()); + + if (this.positionIncrementsEnabled) { + position += positionIncrement; + newFieldNode.setPositionIncrement(position); + + } else { + newFieldNode.setPositionIncrement(i); + } + + pq.add(newFieldNode); + + } + + return pq; + + } + + } + + return node; + + } + + @Override + protected QueryNode preProcessNode(QueryNode node) throws QueryNodeException { + + return node; + + } + + @Override + protected List setChildrenOrder(List children) + throws QueryNodeException { + + return children; + + } + +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpMULTITERMProcessor.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpMULTITERMProcessor.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpMULTITERMProcessor.java (revision 0) @@ -0,0 +1,82 @@ +package org.apache.lucene.queryparser.flexible.aqp.processors; + +import java.util.List; + +import org.apache.lucene.queryparser.flexible.core.QueryNodeException; +import org.apache.lucene.queryparser.flexible.core.nodes.BoostQueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.ModifierQueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.OrQueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; +import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessor; +import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessorImpl; +import org.apache.lucene.queryparser.flexible.core.util.UnescapedCharSequence; +import org.apache.lucene.queryparser.flexible.standard.parser.EscapeQuerySyntaxImpl; +import org.apache.lucene.queryparser.flexible.aqp.nodes.AqpANTLRNode; +import org.apache.lucene.queryparser.flexible.aqp.util.AqpUtils; +import org.apache.lucene.queryparser.flexible.aqp.util.AqpUtils.Modifier; + +public class AqpMULTITERMProcessor extends QueryNodeProcessorImpl implements + QueryNodeProcessor { + + @Override + protected QueryNode postProcessNode(QueryNode node) throws QueryNodeException { + return node; + } + + @Override + protected QueryNode preProcessNode(QueryNode node) throws QueryNodeException { + if (node instanceof AqpANTLRNode + && ((AqpANTLRNode) node).getTokenLabel().equals("MULTITERM")) { + AqpANTLRNode mNode = (AqpANTLRNode) node; + AqpANTLRNode modifierNode = (AqpANTLRNode) mNode.getChild("MODIFIER"); + AqpANTLRNode fieldNode = (AqpANTLRNode) mNode.getChild("FIELD"); + AqpANTLRNode multiNode = (AqpANTLRNode) mNode.getChild("MULTIATOM"); + AqpANTLRNode tModifierNode = (AqpANTLRNode) mNode.getChild("TMODIFIER"); + + String field = AqpUtils.getFirstChildInputString(fieldNode); + Modifier modifier = AqpUtils.getFirstChildInputModifier(modifierNode); + + if (field != null) { + AqpUtils.applyFieldToAllChildren(EscapeQuerySyntaxImpl + .discardEscapeChar(field).toString(), multiNode); + } + + node = new OrQueryNode(multiNode.getChildren()); + + if (tModifierNode != null) { + AqpANTLRNode boostNode = tModifierNode.getChild("BOOST"); + Float boost = AqpUtils.getFirstChildInputFloat(boostNode); + if (boost != null) { + node = new BoostQueryNode(node, boost); + } + } + + if (modifier != null) { + node = new ModifierQueryNode(node, + modifier == Modifier.PLUS ? ModifierQueryNode.Modifier.MOD_REQ + : ModifierQueryNode.Modifier.MOD_NOT); + } + + return node; + + } + return node; + } + + @Override + protected List setChildrenOrder(List children) + throws QueryNodeException { + return children; + } + + private String getFieldValue(AqpANTLRNode fieldNode) + throws QueryNodeException { + + if (fieldNode != null && fieldNode.getChildren() != null) { + return ((AqpANTLRNode) fieldNode.getChildren().get(0)).getTokenInput(); + } + return null; + + } + +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpTMODIFIERProcessor.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpTMODIFIERProcessor.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpTMODIFIERProcessor.java (revision 0) @@ -0,0 +1,98 @@ +package org.apache.lucene.queryparser.flexible.aqp.processors; + +import java.util.ArrayList; +import java.util.List; + +import org.apache.lucene.queryparser.flexible.core.QueryNodeException; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; +import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessor; +import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessorImpl; +import org.apache.lucene.queryparser.flexible.aqp.nodes.AqpANTLRNode; + +/** + * TMODIFIER node contains FUZZY, BOOST, FIELD nodes. This processor changes the + * tree from this shape: + * + *
+ *               TMODIFIER
+ *               /   |   \
+ *           BOOST FUZZY FIELD
+ *             /           \
+ *            ^1           ...
+ * 
+ * + * To this shape: + * + *
+ *               BOOST
+ *                / \
+ *              ^1  FUZZY
+ *                    |
+ *                    FIELD
+ *                      \
+ *                     ...
+ * 
+ * + * After the processor ran, the TMODIFIER node is removed and we return the + * FUZZY node
+ * + * If TMODIFIER contains only single child, we return that child (thus remove + * the TMODIFIER node from the tree).
+ * + * @see AqpFUZZYProcessor + * @see AqpBOOSTProcessor + * @see AqpFIELDProcessor + */ +public class AqpTMODIFIERProcessor extends QueryNodeProcessorImpl implements + QueryNodeProcessor { + + @Override + protected QueryNode postProcessNode(QueryNode node) throws QueryNodeException { + + if (node instanceof AqpANTLRNode + && ((AqpANTLRNode) node).getTokenLabel().equals("TMODIFIER")) { + + List children = node.getChildren(); + + if (children.size() == 1) { + return children.get(0); + } + + QueryNode masterChild = null; + QueryNode currentChild; + List currentChildren; + + for (int i = 0; i < children.size(); i++) { + currentChild = children.get(i); + if (currentChild.isLeaf()) { + continue; + } + if (masterChild == null) { + masterChild = currentChild; + node = masterChild; + continue; + } + currentChildren = masterChild.getChildren(); + currentChildren.add(currentChild); + // masterChild.set(currentChildren); + masterChild = children.get(i); + } + + return node; + + } + return node; + } + + @Override + protected QueryNode preProcessNode(QueryNode node) throws QueryNodeException { + return node; + } + + @Override + protected List setChildrenOrder(List children) + throws QueryNodeException { + return children; + } + +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpFIELDProcessor.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpFIELDProcessor.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpFIELDProcessor.java (revision 0) @@ -0,0 +1,65 @@ +package org.apache.lucene.queryparser.flexible.aqp.processors; + +import java.util.List; + +import org.apache.lucene.queryparser.flexible.core.QueryNodeException; +import org.apache.lucene.queryparser.flexible.core.nodes.FieldQueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.FieldableNode; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; +import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessor; +import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessorImpl; +import org.apache.lucene.queryparser.flexible.standard.parser.EscapeQuerySyntaxImpl; +import org.apache.lucene.queryparser.flexible.aqp.nodes.AqpANTLRNode; +import org.apache.lucene.queryparser.flexible.aqp.util.AqpUtils; + +/** + * This processor applies the user-submitted value to all {@link FieldableNode} + * nodes which are below it. The FIELD node itself will be discarded.
+ * If the FIELD has only one child, the child will be returned + * + * @see FieldableNode + * @see AqpQNORMALProcessor and similar + * + */ +public class AqpFIELDProcessor extends QueryNodeProcessorImpl implements + QueryNodeProcessor { + + @Override + protected QueryNode postProcessNode(QueryNode node) throws QueryNodeException { + if (node instanceof AqpANTLRNode + && ((AqpANTLRNode) node).getTokenLabel().equals("FIELD")) { + if (node.getChildren().size() == 1) { + return node.getChildren().get(0); + } + + String field = getFieldValue(node); + node = node.getChildren().get(node.getChildren().size() - 1); + if (field != null) { + AqpUtils.applyFieldToAllChildren(EscapeQuerySyntaxImpl.discardEscapeChar(field) + .toString(), node); + } + } + return node; + } + + @Override + protected QueryNode preProcessNode(QueryNode node) throws QueryNodeException { + return node; + } + + @Override + protected List setChildrenOrder(List children) + throws QueryNodeException { + return children; + } + + private String getFieldValue(QueryNode fieldNode) throws QueryNodeException { + + if (fieldNode != null && fieldNode.getChildren() != null) { + return ((AqpANTLRNode) fieldNode.getChildren().get(0)).getTokenInput(); + } + return null; + } + + +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpNullDefaultFieldProcessor.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpNullDefaultFieldProcessor.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/processors/AqpNullDefaultFieldProcessor.java (revision 0) @@ -0,0 +1,66 @@ +package org.apache.lucene.queryparser.flexible.aqp.processors; + +import java.util.List; + +import org.apache.lucene.queryparser.flexible.core.QueryNodeException; +import org.apache.lucene.queryparser.flexible.core.nodes.FieldQueryNode; +import org.apache.lucene.queryparser.flexible.core.nodes.FieldableNode; +import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; +import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessor; +import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessorImpl; +import org.apache.lucene.queryparser.flexible.standard.processors.AnalyzerQueryNodeProcessor; +import org.apache.lucene.queryparser.flexible.standard.processors.MatchAllDocsQueryNodeProcessor; +import org.apache.lucene.queryparser.flexible.standard.processors.MultiFieldQueryNodeProcessor; + +/** + * This processor changes field value 'null' into ''. + * + * The processor solves the following problem (probably affecting SOLR only): + * + * {@link MultiFieldQueryNodeProcessor} looks at {@link FieldableNode} and + * eventually creates several instances of them (eg. "query" becomes field:query + * fieldb:query). But this works only when the initial field==null. If it was + * '', nothing happens. If the current configuration contains + * {@link MultiFieldQueryNodeProcessor} but it is empty, also the field will be + * null (because {@link DefaultFieldAttribute} is by default null) + * + * {@link AnalyzerQueryNodeProcessor}, on the other hand, expects that field is + * a string value - to be precise, it asks the current analyzer to check the + * field. And if the field was null, then with SOLR this throws + * {@link NullPointerException} + * + * Please put {@link AqpNullDefaultFieldProcessor} also before + * {@link MatchAllDocsQueryNodeProcessor} otherwise you will get + * {@link NullPointerException} if the default field is null + * + * @see FieldableNode + * @see MultiFieldQueryNodeProcessor + * @see AnalyzerQueryNodeProcessor + * @see AqpInvenioQueryParser + * @see DefaultFieldAttribute + * + */ +public class AqpNullDefaultFieldProcessor extends QueryNodeProcessorImpl + implements QueryNodeProcessor { + + @Override + protected QueryNode preProcessNode(QueryNode node) throws QueryNodeException { + if (node instanceof FieldQueryNode + && ((FieldQueryNode) node).getField() == null) { + ((FieldQueryNode) node).setField(""); + } + return node; + } + + @Override + protected QueryNode postProcessNode(QueryNode node) throws QueryNodeException { + return node; + } + + @Override + protected List setChildrenOrder(List children) + throws QueryNodeException { + return children; + } + +} Index: lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/AqpQueryParser.java =================================================================== --- lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/AqpQueryParser.java (revision 0) +++ lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/aqp/AqpQueryParser.java (revision 0) @@ -0,0 +1,680 @@ +package org.apache.lucene.queryparser.flexible.aqp; + +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import java.lang.reflect.Constructor; +import java.lang.reflect.InvocationTargetException; +import java.util.List; +import java.util.ListIterator; +import java.util.Locale; +import java.util.Map; +import java.util.TooManyListenersException; + +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.document.DateTools; +import org.apache.lucene.document.DateTools.Resolution; +import org.apache.lucene.queryparser.flexible.core.QueryNodeException; +import org.apache.lucene.queryparser.flexible.core.QueryParserHelper; +import org.apache.lucene.queryparser.flexible.core.builders.QueryBuilder; +import org.apache.lucene.queryparser.flexible.core.builders.QueryTreeBuilder; +import org.apache.lucene.queryparser.flexible.core.config.QueryConfigHandler; +import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessor; +import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessorPipeline; +import org.apache.lucene.queryparser.flexible.standard.StandardQueryParser; +import org.apache.lucene.queryparser.flexible.standard.builders.StandardQueryTreeBuilder; +import org.apache.lucene.queryparser.flexible.standard.config.FuzzyConfig; +import org.apache.lucene.queryparser.flexible.standard.config.NumericConfig; +import org.apache.lucene.queryparser.flexible.standard.config.StandardQueryConfigHandler; +import org.apache.lucene.queryparser.flexible.standard.config.StandardQueryConfigHandler.ConfigurationKeys; +import org.apache.lucene.queryparser.flexible.standard.config.StandardQueryConfigHandler.Operator; +import org.apache.lucene.queryparser.flexible.standard.parser.StandardSyntaxParser; +import org.apache.lucene.queryparser.flexible.standard.processors.StandardQueryNodeProcessorPipeline; +import org.apache.lucene.queryparser.flexible.aqp.AqpSyntaxParser; +import org.apache.lucene.queryparser.flexible.aqp.builders.AqpQueryTreeBuilder; +import org.apache.lucene.queryparser.flexible.aqp.config.AqpFeedback; +import org.apache.lucene.queryparser.flexible.aqp.parser.AqpStandardQueryConfigHandler; +import org.apache.lucene.queryparser.flexible.aqp.util.AqpDebuggingQueryNodeProcessorPipeline; +import org.apache.lucene.search.FuzzyQuery; +import org.apache.lucene.search.MultiTermQuery; +import org.apache.lucene.search.Query; + +/** + *

+ * This class is a helper that enables users to easily use the Lucene query + * parser written in EBNF grammar using ANTLR + *

+ *

+ * To construct a Query object from a query string, use the + * {@link #parse(String, String)} method: + *

    + * ANTLRQueryParser queryParserHelper = new ANTLRQueryParser();
    + * Query query = queryParserHelper.parse("a AND b", "defaultField"); + *
+ *

+ * To change any configuration before parsing the query string do, for example: + *

+ *

    + * // the query config handler returned by {@link StandardQueryParser} is a + * {@link StandardQueryConfigHandler}
    + * queryParserHelper.getQueryConfigHandler().setAnalyzer(new + * WhitespaceAnalyzer()); + *
+ *

+ * The syntax for query strings is as follows (copied from the old QueryParser + * javadoc): + *

    + * A Query is a series of clauses. A clause may be prefixed by: + *
      + *
    • a plus (+) or a minus (-) sign, indicating that + * the clause is required or prohibited respectively; or + *
    • a term followed by a colon, indicating the field to be searched. This + * enables one to construct queries which search multiple fields. + *
    + * + * A clause may be either: + *
      + *
    • a term, indicating all the documents that contain this term; or + *
    • a nested query, enclosed in parentheses. Note that this may be used with + * a +/- prefix to require any of a set of terms. + *
    + * + * Thus, in BNF, the query grammar is: + * + *
    + *   Query  ::= ( Clause )*
    + *   Clause ::= ["+", "-"] [<TERM> ":"] ( <TERM> | "(" Query ")" )
    + * 
    + * + *

    + * Examples of appropriately formatted queries can be found in the query syntax + * documentation. + *

    + *
+ *

+ * The text parser used by this helper is a {@link StandardSyntaxParser}. + *

+ *

+ * The query node processor used by this helper is a + * {@link StandardQueryNodeProcessorPipeline}. + *

+ *

+ * The builder used by this helper is a {@link StandardQueryTreeBuilder}. + *

+ * + * @see StandardQueryParser + * @see StandardQueryConfigHandler + * @see StandardSyntaxParser + * @see StandardQueryNodeProcessorPipeline + * @see StandardQueryTreeBuilder + * + * + * TODO: add the constructor to the SQP and remove the duplicated code + * + * public StandardQueryParser(QueryConfigHandler config, SyntaxParser + * parser, QueryNodeProcessor processor, QueryBuilder builder) { + * super(config, parser,processor, builder); } + */ +public class AqpQueryParser extends QueryParserHelper { + + private boolean debugMode = false; + private String syntaxName = null; + + public AqpQueryParser(QueryConfigHandler config, AqpSyntaxParser parser, + QueryNodeProcessorPipeline processor, QueryTreeBuilder builder) { + + super(config, parser, processor, builder); + syntaxName = parser.getClass().getName(); + } + + @Override + public String toString() { + return ""; + } + + /** + * De/activates the debugging output of the query parser + * + * It works by wrapping the processor pipeline into a debugging + * class and by calling setDebug on the underlying builder. + * @throws SecurityException + * @throws NoSuchMethodException + * @throws InvocationTargetException + * @throws IllegalArgumentException + * + * @see AqpDebuggingQueryNodeProcessorPipeline + * @see AqpQueryTreeBuilder + */ + @SuppressWarnings("unchecked") + public void setDebug(boolean debug) throws InstantiationException, + IllegalAccessException, NoSuchMethodException, SecurityException, IllegalArgumentException, InvocationTargetException { + + if (debugMode != debug) { + + QueryNodeProcessorPipeline processor = (QueryNodeProcessorPipeline) this.getQueryNodeProcessor(); + QueryBuilder builder = this.getQueryBuilder(); + + QueryNodeProcessorPipeline newPipeline; + + QueryConfigHandler configHandler = this.getQueryConfigHandler(); + + if (debug) { + newPipeline = new AqpDebuggingQueryNodeProcessorPipeline( + this.getQueryConfigHandler(), processor.getClass()); + } + else { + // can't use the simple form because parser pipelines may be using config to adjust themselves + // newPipeline = clazz.newInstance(); + + Class clazz = ((AqpDebuggingQueryNodeProcessorPipeline) processor) + .getOriginalProcessorClass(); + Constructor constructor = clazz.getConstructor(QueryConfigHandler.class); + newPipeline = constructor.newInstance(new Object[]{configHandler}); + } + + + List listOfProcessors = (List) processor; + ListIterator it = listOfProcessors.listIterator(); + while (it.hasNext()) { + newPipeline.add(it.next()); + } + this.setQueryNodeProcessor(newPipeline); + + + QueryBuilder newBuilder = builder.getClass().newInstance(); + if (newBuilder instanceof AqpQueryTreeBuilder) { + ((AqpQueryTreeBuilder) newBuilder).setDebug(debug); + this.setQueryBuilder(newBuilder); + } + + } + debugMode = debug; + } + + public boolean getDebug() { + return debugMode; + } + + /** + * Overrides {@link QueryParserHelper#parse(String, String)} so it casts the + * return object to {@link Query}. For more reference about this method, check + * {@link QueryParserHelper#parse(String, String)}. + * + * @param query + * the query string + * @param defaultField + * the default field used by the text parser + * + * @return the object built from the query + * + * @throws QueryNodeException + * if something wrong happens along the three phases + */ + @Override + public Query parse(String query, String defaultField) + throws QueryNodeException { + + if (defaultField != null) { + setDefaultField(defaultField); + } + try { + return (Query) super.parse(query, defaultField); + } catch (NestedParseException e) { + throw new QueryNodeException(e); + } + + } + + public String getDefaultField() { + return getQueryConfigHandler().get( + AqpStandardQueryConfigHandler.ConfigurationKeys.DEFAULT_FIELD); + } + + public void setDefaultField(String field) { + getQueryConfigHandler().set( + AqpStandardQueryConfigHandler.ConfigurationKeys.DEFAULT_FIELD, field); + } + + public Integer getDefaultProximity() { + return getQueryConfigHandler().get( + AqpStandardQueryConfigHandler.ConfigurationKeys.DEFAULT_PROXIMITY); + } + + public void setDefaultProximity(Integer value) { + getQueryConfigHandler().set( + AqpStandardQueryConfigHandler.ConfigurationKeys.DEFAULT_PROXIMITY, + value); + } + + public Float getImplicitBoost() { + return getQueryConfigHandler().get( + AqpStandardQueryConfigHandler.ConfigurationKeys.IMPLICIT_BOOST); + } + + public void setImplicitBoost(Float value) { + getQueryConfigHandler().set( + AqpStandardQueryConfigHandler.ConfigurationKeys.IMPLICIT_BOOST, value); + } + + public AqpFeedback getFeedback() { + return getQueryConfigHandler().get( + AqpStandardQueryConfigHandler.ConfigurationKeys.FEEDBACK); + } + + public void setFeedback(AqpFeedback feedbackInstance) { + getQueryConfigHandler().set( + AqpStandardQueryConfigHandler.ConfigurationKeys.FEEDBACK, + feedbackInstance); + } + + public Float getImplicitFuzzy() { + return getQueryConfigHandler().get( + AqpStandardQueryConfigHandler.ConfigurationKeys.IMPLICIT_FUZZY); + } + + public void setImplicitFuzzy(Float value) { + getQueryConfigHandler().set( + AqpStandardQueryConfigHandler.ConfigurationKeys.IMPLICIT_FUZZY, value); + } + + public Boolean getAllowSlowFuzzy() { + return getQueryConfigHandler().get( + AqpStandardQueryConfigHandler.ConfigurationKeys.ALLOW_SLOW_FUZZY); + } + + public void setAllowSlowFuzzy(Boolean value) { + getQueryConfigHandler() + .set(AqpStandardQueryConfigHandler.ConfigurationKeys.ALLOW_SLOW_FUZZY, + value); + } + + /******************************************************************** + * Everything below is simpy copy of the StandardQueryParser * + *******************************************************************/ + + /** + * Gets implicit operator setting, which will be either {@link Operator#AND} + * or {@link Operator#OR}. + */ + public StandardQueryConfigHandler.Operator getDefaultOperator() { + return getQueryConfigHandler().get(ConfigurationKeys.DEFAULT_OPERATOR); + + } + + /** + * Sets the boolean operator of the QueryParser. In default mode ( + * {@link Operator#OR}) terms without any modifiers are considered optional: + * for example capital of Hungary is equal to + * capital OR of OR Hungary.
+ * In {@link Operator#AND} mode terms are considered to be in conjunction: the + * above mentioned query is parsed as capital AND of AND Hungary + */ + public void setDefaultOperator(StandardQueryConfigHandler.Operator operator) { + getQueryConfigHandler().set(ConfigurationKeys.DEFAULT_OPERATOR, operator); + } + + /** + * Set to true to allow leading wildcard characters. + *

+ * When set, * or ? are allowed as the first + * character of a PrefixQuery and WildcardQuery. Note that this can produce + * very slow queries on big indexes. + *

+ * Default: false. + */ + public void setLowercaseExpandedTerms(boolean lowercaseExpandedTerms) { + getQueryConfigHandler().set(ConfigurationKeys.LOWERCASE_EXPANDED_TERMS, + lowercaseExpandedTerms); + } + + /** + * @see #setLowercaseExpandedTerms(boolean) + */ + public boolean getLowercaseExpandedTerms() { + Boolean lowercaseExpandedTerms = getQueryConfigHandler().get( + ConfigurationKeys.LOWERCASE_EXPANDED_TERMS); + + if (lowercaseExpandedTerms == null) { + return true; + + } else { + return lowercaseExpandedTerms; + } + + } + + /** + * Set to true to allow leading wildcard characters. + *

+ * When set, * or ? are allowed as the first + * character of a PrefixQuery and WildcardQuery. Note that this can produce + * very slow queries on big indexes. + *

+ * Default: false. + */ + public void setAllowLeadingWildcard(boolean allowLeadingWildcard) { + getQueryConfigHandler().set(ConfigurationKeys.ALLOW_LEADING_WILDCARD, + allowLeadingWildcard); + } + + /** + * Set to true to enable position increments in result query. + *

+ * When set, result phrase and multi-phrase queries will be aware of position + * increments. Useful when e.g. a StopFilter increases the position increment + * of the token that follows an omitted token. + *

+ * Default: false. + */ + public void setEnablePositionIncrements(boolean enabled) { + getQueryConfigHandler().set(ConfigurationKeys.ENABLE_POSITION_INCREMENTS, + enabled); + } + + /** + * @see #setEnablePositionIncrements(boolean) + */ + public boolean getEnablePositionIncrements() { + Boolean enablePositionsIncrements = getQueryConfigHandler().get( + ConfigurationKeys.ENABLE_POSITION_INCREMENTS); + + if (enablePositionsIncrements == null) { + return false; + + } else { + return enablePositionsIncrements; + } + + } + + /** + * By default, it uses + * {@link MultiTermQuery#CONSTANT_SCORE_AUTO_REWRITE_DEFAULT} when creating a + * prefix, wildcard and range queries. This implementation is generally + * preferable because it a) Runs faster b) Does not have the scarcity of terms + * unduly influence score c) avoids any {@link TooManyListenersException} + * exception. However, if your application really needs to use the + * old-fashioned boolean queries expansion rewriting and the above points are + * not relevant then use this change the rewrite method. + */ + public void setMultiTermRewriteMethod(MultiTermQuery.RewriteMethod method) { + getQueryConfigHandler().set(ConfigurationKeys.MULTI_TERM_REWRITE_METHOD, + method); + } + + /** + * @see #setMultiTermRewriteMethod(org.apache.lucene.search.MultiTermQuery.RewriteMethod) + */ + public MultiTermQuery.RewriteMethod getMultiTermRewriteMethod() { + return getQueryConfigHandler().get( + ConfigurationKeys.MULTI_TERM_REWRITE_METHOD); + } + + /** + * Set the fields a query should be expanded to when the field is + * null + * + * @param fields + * the fields used to expand the query + */ + public void setMultiFields(CharSequence[] fields) { + + if (fields == null) { + fields = new CharSequence[0]; + } + + getQueryConfigHandler().set(ConfigurationKeys.MULTI_FIELDS, fields); + + } + + /** + * Returns the fields used to expand the query when the field for a certain + * query is null + * + * @param fields + * the fields used to expand the query + */ + public void getMultiFields(CharSequence[] fields) { + getQueryConfigHandler().get(ConfigurationKeys.MULTI_FIELDS); + } + + /** + * Set the prefix length for fuzzy queries. Default is 0. + * + * @param fuzzyPrefixLength + * The fuzzyPrefixLength to set. + */ + public void setFuzzyPrefixLength(int fuzzyPrefixLength) { + QueryConfigHandler config = getQueryConfigHandler(); + FuzzyConfig fuzzyConfig = config.get(ConfigurationKeys.FUZZY_CONFIG); + + if (fuzzyConfig == null) { + fuzzyConfig = new FuzzyConfig(); + config.set(ConfigurationKeys.FUZZY_CONFIG, fuzzyConfig); + } + + fuzzyConfig.setPrefixLength(fuzzyPrefixLength); + + } + + public void setNumericConfigMap(Map numericConfigMap) { + getQueryConfigHandler().set(ConfigurationKeys.NUMERIC_CONFIG_MAP, + numericConfigMap); + } + + public Map getNumericConfigMap() { + return getQueryConfigHandler().get(ConfigurationKeys.NUMERIC_CONFIG_MAP); + } + + /** + * Set locale used by date range parsing. + */ + public void setLocale(Locale locale) { + getQueryConfigHandler().set(ConfigurationKeys.LOCALE, locale); + } + + /** + * Returns current locale, allowing access by subclasses. + */ + public Locale getLocale() { + return getQueryConfigHandler().get(ConfigurationKeys.LOCALE); + } + + /** + * Sets the default slop for phrases. If zero, then exact phrase matches are + * required. Default value is zero. + * + * @deprecated renamed to {@link #setPhraseSlop(int)} + */ + @Deprecated + public void setDefaultPhraseSlop(int defaultPhraseSlop) { + getQueryConfigHandler().set(ConfigurationKeys.PHRASE_SLOP, + defaultPhraseSlop); + } + + /** + * Sets the default slop for phrases. If zero, then exact phrase matches are + * required. Default value is zero. + */ + public void setPhraseSlop(int defaultPhraseSlop) { + getQueryConfigHandler().set(ConfigurationKeys.PHRASE_SLOP, + defaultPhraseSlop); + } + + public void setAnalyzer(Analyzer analyzer) { + getQueryConfigHandler().set(ConfigurationKeys.ANALYZER, analyzer); + } + + public Analyzer getAnalyzer() { + return getQueryConfigHandler().get(ConfigurationKeys.ANALYZER); + } + + /** + * @see #setAllowLeadingWildcard(boolean) + */ + public boolean getAllowLeadingWildcard() { + Boolean allowLeadingWildcard = getQueryConfigHandler().get( + ConfigurationKeys.ALLOW_LEADING_WILDCARD); + + if (allowLeadingWildcard == null) { + return false; + + } else { + return allowLeadingWildcard; + } + } + + /** + * Get the minimal similarity for fuzzy queries. + */ + public float getFuzzyMinSim() { + FuzzyConfig fuzzyConfig = getQueryConfigHandler().get( + ConfigurationKeys.FUZZY_CONFIG); + + if (fuzzyConfig == null) { + return FuzzyQuery.defaultMinSimilarity; + } else { + return fuzzyConfig.getMinSimilarity(); + } + } + + /** + * Get the prefix length for fuzzy queries. + * + * @return Returns the fuzzyPrefixLength. + */ + public int getFuzzyPrefixLength() { + FuzzyConfig fuzzyConfig = getQueryConfigHandler().get( + ConfigurationKeys.FUZZY_CONFIG); + + if (fuzzyConfig == null) { + return FuzzyQuery.defaultPrefixLength; + } else { + return fuzzyConfig.getPrefixLength(); + } + } + + /** + * Gets the default slop for phrases. + */ + public int getPhraseSlop() { + Integer phraseSlop = getQueryConfigHandler().get( + ConfigurationKeys.PHRASE_SLOP); + + if (phraseSlop == null) { + return 0; + + } else { + return phraseSlop; + } + } + + /** + * Set the minimum similarity for fuzzy queries. Default is defined on + * {@link FuzzyQuery#defaultMinSimilarity}. + */ + public void setFuzzyMinSim(float fuzzyMinSim) { + QueryConfigHandler config = getQueryConfigHandler(); + FuzzyConfig fuzzyConfig = config.get(ConfigurationKeys.FUZZY_CONFIG); + + if (fuzzyConfig == null) { + fuzzyConfig = new FuzzyConfig(); + config.set(ConfigurationKeys.FUZZY_CONFIG, fuzzyConfig); + } + + fuzzyConfig.setMinSimilarity(fuzzyMinSim); + } + + /** + * Sets the boost used for each field. + * + * @param boosts + * a collection that maps a field to its boost + */ + public void setFieldsBoost(Map boosts) { + getQueryConfigHandler().set(ConfigurationKeys.FIELD_BOOST_MAP, boosts); + } + + /** + * Returns the field to boost map used to set boost for each field. + * + * @return the field to boost map + */ + public Map getFieldsBoost() { + return getQueryConfigHandler().get(ConfigurationKeys.FIELD_BOOST_MAP); + } + + /** + * Sets the default {@link Resolution} used for certain field when no + * {@link Resolution} is defined for this field. + * + * @param dateResolution + * the default {@link Resolution} + */ + public void setDateResolution(DateTools.Resolution dateResolution) { + getQueryConfigHandler().set(ConfigurationKeys.DATE_RESOLUTION, + dateResolution); + } + + /** + * Returns the default {@link Resolution} used for certain field when no + * {@link Resolution} is defined for this field. + * + * @return the default {@link Resolution} + */ + public DateTools.Resolution getDateResolution() { + return getQueryConfigHandler().get(ConfigurationKeys.DATE_RESOLUTION); + } + + /** + * Sets the {@link Resolution} used for each field + * + * @param dateRes + * a collection that maps a field to its {@link Resolution} + * + * @deprecated this method was renamed to {@link #setDateResolutionMap(Map)} + */ + @Deprecated + public void setDateResolution(Map dateRes) { + setDateResolutionMap(dateRes); + } + + /** + * Returns the field to {@link Resolution} map used to normalize each date + * field. + * + * @return the field to {@link Resolution} map + */ + public Map getDateResolutionMap() { + return getQueryConfigHandler().get( + ConfigurationKeys.FIELD_DATE_RESOLUTION_MAP); + } + + /** + * Sets the {@link Resolution} used for each field + * + * @param dateRes + * a collection that maps a field to its {@link Resolution} + */ + public void setDateResolutionMap( + Map dateRes) { + getQueryConfigHandler().set(ConfigurationKeys.FIELD_DATE_RESOLUTION_MAP, + dateRes); + } +} Index: lucene/queryparser/grammars/StandardLuceneGrammar.html =================================================================== --- lucene/queryparser/grammars/StandardLuceneGrammar.html (revision 0) +++ lucene/queryparser/grammars/StandardLuceneGrammar.html (revision 0) @@ -0,0 +1 @@ +To generate this file, run: ant -f aqp-build.xml generate-html Index: lucene/queryparser/grammars/StandardLuceneGrammar.g =================================================================== --- lucene/queryparser/grammars/StandardLuceneGrammar.g (revision 0) +++ lucene/queryparser/grammars/StandardLuceneGrammar.g (revision 0) @@ -0,0 +1,475 @@ +grammar StandardLuceneGrammar; + +// +// This is a re-implementation of the standard lucene syntax with ANTLR3 +// http://lucene.apache.org/core/4_3_0/queryparser/index.html +// +// The query syntax is complete and supports the same features as the +// original parser written in JCC. The advantage of this grammar is that it +// is 'pluggable' into Lucene's modern flexible parser, so that you can +// add custom logic on top of the 'rigid' query syntax. Besides...the grammar +// is not that 'rigid' - you can modify the grammar and easily recompile. +// +// # run this commad inside antlrqueryparser +// +// $ ant generate-antlr -Dgrammar=MyNewGrammar +// +// # or if you want to test things, do: +// +// $ ant try-view -Dgrammar=MyNewGrammar -Dquery="foo AND bar" +// +// +// Implementation note: I have tried hard to avoid putting language specific details +// into the grammar, unfortunately this was not always possible. But it is kept +// at minimum. You can generate parser code in your language of choice +// if you change the following: +// +// options : +// language= +// superClass= the default is to subclass 'UnforgivingParser', this java class +// lives in the package oal.queryparser.flixible.aqp.parser +// and its purpose is to bark everytime when an exception +// happens (otherwise, ANTLR tries to recover from some situations +// -- you may want to remove this definition, or add your own +// error recovery logic there) +// +// @header: this adds the java declaration to the generated parser file, +// feel free to remove (if you want to test the grammar using +// ANTLRWorks, you want to remove it) +// @lexer::header: dtto but for lexer +// @lexer::members: again, necessary for being strict and prevent error +// recovery, but this applies only to lexer errors. +// +// One last note - if you want to implement your own error recovery, have a look +// at the generated java class +// +// oal.queryparser.flixible.aqp.parser.SyntaxParser.java +// +// There we are raising parse exception as well +// + + +options { + language = Java; + output = AST; + superClass = UnforgivingParser; +} + +tokens { + OPERATOR; + ATOM; + MODIFIER; + TMODIFIER; + CLAUSE; + FIELD; + FUZZY; + BOOST; + QNORMAL; + QPHRASE; + QPHRASETRUNC; + QTRUNCATED; + QRANGEIN; + QRANGEEX; + QANYTHING; + QDATE; +} + + +// java-specific and error recovery-unfriendly details.... + +@header{ + package org.apache.lucene.queryparser.flexible.aqp.parser; +} +@lexer::header { + package org.apache.lucene.queryparser.flexible.aqp.parser; +} + +// this is for exceptions on lexer level - we are preventing error +// recovery (ANTLRv3 does not seem to have a better way to modify +// the default behaviour - eg. from a parent abstract class) +@lexer::members { + public void recover(RecognitionException re) { + // throw unchecked exception + throw new RuntimeException(re); + } +} + +// ...below this point, language agnostic EBNF grammar lives. + + + + + +mainQ : + clauseOr+ EOF -> ^(OPERATOR["DEFOP"] clauseOr+) + ; + + +clauseOr + : (first=clauseAnd -> $first) (or others=clauseAnd -> ^(OPERATOR["OR"] clauseAnd+ ))* + ; + +clauseAnd + : (first=clauseNot -> $first) (and others=clauseNot -> ^(OPERATOR["AND"] clauseNot+ ))* + ; + +clauseNot + : (first=clauseBasic -> $first) (not others=clauseBasic -> ^(OPERATOR["NOT"] clauseBasic+) )* + ; + + +clauseBasic + : + (modifier LPAREN clauseOr+ RPAREN )=> modifier? LPAREN clauseOr+ RPAREN term_modifier? + -> ^(CLAUSE ^(MODIFIER modifier? ^(TMODIFIER term_modifier? ^(OPERATOR["DEFOP"] clauseOr+)))) // Default operator + | (LPAREN clauseOr+ RPAREN term_modifier)=> modifier? LPAREN clauseOr+ RPAREN term_modifier? + -> ^(CLAUSE ^(MODIFIER modifier? ^(TMODIFIER term_modifier? ^(OPERATOR["DEFOP"] clauseOr+)))) // Default operator + | (LPAREN )=> LPAREN clauseOr+ RPAREN + -> clauseOr+ + | atom + ; + + +atom + : + modifier? field multi_value term_modifier? + -> ^(CLAUSE ^(MODIFIER modifier? ^(TMODIFIER term_modifier? ^(FIELD field multi_value)))) + | modifier? field? value term_modifier? + -> ^(MODIFIER modifier? ^(TMODIFIER term_modifier? ^(FIELD field? value))) + ; + + +field + : + TERM_NORMAL COLON -> TERM_NORMAL + ; + +value + : + range_term_in -> ^(QRANGEIN range_term_in) + | range_term_ex -> ^(QRANGEEX range_term_ex) + | normal -> ^(QNORMAL normal) + | truncated -> ^(QTRUNCATED truncated) + | quoted -> ^(QPHRASE quoted) + | quoted_truncated -> ^(QPHRASETRUNC quoted_truncated) + | QMARK -> ^(QTRUNCATED QMARK) + | STAR COLON b=STAR -> ^(QANYTHING $b) + | STAR -> ^(QTRUNCATED STAR) + ; + + + +range_term_in + : + LBRACK + (a=range_value -> range_value ^(QANYTHING QANYTHING["*"])) + ( TO? b=range_value -> $a $b? )? + RBRACK + ; + + +range_term_ex + : + LCURLY + ( a=range_value -> range_value ^(QANYTHING QANYTHING["*"])) + ( TO? b=range_value -> $a $b? )? + RCURLY + ; + +range_value + : + truncated -> ^(QTRUNCATED truncated) + | quoted -> ^(QPHRASE quoted) + | quoted_truncated -> ^(QPHRASETRUNC quoted_truncated) + | date -> ^(QNORMAL date) + | normal -> ^(QNORMAL normal) + | STAR -> ^(QANYTHING STAR) + ; + +multi_value + : + LPAREN multiClause RPAREN -> multiClause + ; + + + +multiClause + : + + //m:(a b AND c OR d OR e) + + // without duplicating the rules (but it allows recursion) + clauseOr+ -> ^(OPERATOR["DEFOP"] clauseOr+) + + // allows only limited set of operations + //multiDefault + + // this is also working, but i want operator precedence + //multiClause: + //(mterm+ -> mterm+) + //(op=operator rhs=fclause -> ^(OPERATOR ^(OPERATOR["DEFOP"] $mclause) $rhs))? + //; + //flause:mclause; + ; + +multiDefault + : + multiOr+ -> ^(OPERATOR["DEFOP"] multiOr+) + ; + +multiOr + : + (first=multiAnd -> $first) (or others=multiAnd-> ^(OPERATOR["OR"] multiAnd+ ))* + ; + +multiAnd + : + (first=multiNot -> $first) (and others=multiNot -> ^(OPERATOR["AND"] multiNot+ ))* + ; + +multiNot + : + (first=multiBasic -> $first) (not others=multiBasic-> ^(not multiBasic+ ))* + ; + + + +multiBasic + : + mterm + ; + +mterm + : + modifier? value -> ^(MODIFIER modifier? value) + ; + + +normal + : + TERM_NORMAL + | NUMBER + ; + + + + +truncated + : + TERM_TRUNCATED + ; + + +quoted_truncated + : + PHRASE_ANYTHING + ; + +quoted : + PHRASE + ; + + + + +operator: ( + AND -> OPERATOR["AND"] + | OR -> OPERATOR["OR"] + | NOT -> OPERATOR["NOT"] + ); + +modifier: + PLUS -> PLUS["+"] + | MINUS -> MINUS["-"]; + + +/* +This terribly convoluted grammar is here because of weird AST rewrite rules +and because we need to allow for default value when TILDE is not followed by +anything + +This grammar has problem with following + : term^4~ 9999 + where 999 is another term, not a fuzzy value +*/ +term_modifier : + TILDE CARAT? -> ^(BOOST CARAT?) ^(FUZZY TILDE) + | CARAT TILDE? -> ^(BOOST CARAT) ^(FUZZY TILDE?) +/* + // first alternative + ( + (CARAT b=NUMBER -> ^(BOOST $b) ^(FUZZY ) + ) + ( //syntactic predicate + (TILDE NUMBER )=>TILDE f=NUMBER -> ^(BOOST $b) ^(FUZZY $f) + | TILDE -> ^(BOOST $b) ^(FUZZY NUMBER["0.5"]) + )* // set the default value + + ) + // second alternative [only ~ | ~NUMBER] + | + (TILDE -> ^(BOOST) ^(FUZZY NUMBER["0.5"])) // set the default value + ((~(WS|TILDE|CARAT))=>f=NUMBER -> ^(BOOST) ^(FUZZY $f?) )* //replace the default but '~' must not be followed by WS +*/ + ; + + +boost : + (CARAT -> ^(BOOST NUMBER["DEF"])) // set the default value + (NUMBER -> ^(BOOST NUMBER))? //replace the default with user input + ; + +fuzzy : + (TILDE -> ^(FUZZY NUMBER["DEF"])) // set the default value + (NUMBER -> ^(FUZZY NUMBER))? //replace the default with user input + ; + +not : + (AND NOT)=> AND NOT + | NOT + ; + +and : + AND + ; + +or : + OR + ; + + +date : + //a=NUMBER '/' b=NUMBER '/' c=NUMBER -> ^(QDATE $a $b $c) + DATE_TOKEN + ; + +/* ================================================================ + * = LEXER = + * ================================================================ + */ + + + +LPAREN : '('; + +RPAREN : ')'; + +LBRACK : '['; + +RBRACK : ']'; + +COLON : ':' ; //this must NOT be fragment + +PLUS : '+' ; + +MINUS : ('-'|'\!'); + +STAR : '*' ; + +QMARK : '?'+ ; + +fragment VBAR : '|' ; + +fragment AMPER : '&' ; + +LCURLY : '{' ; + +RCURLY : '}' ; + +CARAT : '^' (INT+ ('.' INT+)?)?; + +TILDE : '~' (INT+ ('.' INT+)?)?; + +DQUOTE + : '\"'; + +SQUOTE + : '\''; + + + + +TO : 'TO'; + +/* We want to be case insensitive */ +AND : (('a' | 'A') ('n' | 'N') ('d' | 'D') | (AMPER AMPER?)) ; +OR : (('o' | 'O') ('r' | 'R') | (VBAR VBAR?)); +NOT : ('n' | 'N') ('o' | 'O') ('t' | 'T'); + + +WS : ( ' ' + | '\t' + | '\r' + | '\n' + | '\u3000' + ) + {$channel=HIDDEN;} + ; + + +/* +fragment TERM_CHAR : + ~(' ' | '\t' | '\n' | '\r' | '\u3000' + | '\\' | '\'' | '\"' + | '(' | ')' | '[' | ']' | '{' | '}' + | '+' | '-' | '!' | ':' | '~' | '^' + | '*' | '|' | '&' | '?' | '\\\"' | '/' //this line is not present in lucene StandardParser.jj + ); +*/ + + +fragment INT: '0' .. '9'; + + +fragment ESC_CHAR: '\\' .; + + +fragment TERM_START_CHAR + : + (~(' ' | '\t' | '\n' | '\r' | '\u3000' + | '\'' | '\"' + | '(' | ')' | '[' | ']' | '{' | '}' + | '+' | '-' | '!' | ':' | '~' | '^' + | '?' | '*' | '\\' + ) + | ESC_CHAR ); + + +fragment TERM_CHAR + : + (TERM_START_CHAR | '-' | '+') + ; + + +NUMBER + : + INT+ ('.' INT+)? + ; + +DATE_TOKEN + : + INT INT? ('/'|'-'|'.') INT INT? ('/'|'-'|'.') INT INT (INT INT)? + ; + +TERM_NORMAL + : + TERM_START_CHAR ( TERM_CHAR )* + ; + + +TERM_TRUNCATED: + (STAR|QMARK) (TERM_CHAR+ (QMARK|STAR))+ (TERM_CHAR)* + | TERM_START_CHAR (TERM_CHAR* (QMARK|STAR))+ (TERM_CHAR)* + | (STAR|QMARK) TERM_CHAR+ + ; + + +PHRASE + : + DQUOTE (ESC_CHAR|~('\"'|'\\'|'?'|'*'))+ DQUOTE + ; + +PHRASE_ANYTHING : + DQUOTE (ESC_CHAR|~('\"'|'\\'))+ DQUOTE + ; + Index: lucene/queryparser/grammars/StandardLuceneGrammar.xls =================================================================== --- lucene/queryparser/grammars/StandardLuceneGrammar.xls (revision 0) +++ lucene/queryparser/grammars/StandardLuceneGrammar.xls (revision 0) @@ -0,0 +1,1921 @@ +ࡱ;  +  !"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~Root Entry  \pCalc Ba==@ 8I@"1Arial1Arial1Arial1Arial GENERAL                + ) , *   `DSheet1hSheet2TbZ 3  @@   o<mainQ:0"title:\"X x\" AND text:go title:\"x y\" AND A" (DEFOP (AND (MODIFIER (TMODIFIER (FIELD title (QPHRASE \"X x\")))) (MODIFIER (TMODIFIER (FIELD text (QNORMAL go))))) (AND (MODIFIER (TMODIFIER (FIELD title (QPHRASE \"x y\")))) (MODIFIER (TMODIFIER (FIELD (QNORMAL A))))))"title:X Y Z" (DEFOP (MODIFIER (TMODIFIER (FIELD title (QNORMAL X)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL Y)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL Z)))))"jakarta^4 apache" y(DEFOP (MODIFIER (TMODIFIER (BOOST ^4) FUZZY (FIELD (QNORMAL jakarta)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL apache))))))"\"jakarta apache\"^4 \"Apache Lucene\"" (DEFOP (MODIFIER (TMODIFIER (BOOST ^4) FUZZY (FIELD (QPHRASE \"jakarta apache\")))) (MODIFIER (TMODIFIER (FIELD (QPHRASE \"Apache Lucene\"))))) "\"jakarta apache\" OR jakarta" y(DEFOP (OR (MODIFIER (TMODIFIER (FIELD (QPHRASE \"jakarta apache\")))) (MODIFIER (TMODIFIER (FIELD (QNORMAL jakarta))))))+"\"jakarta apache\" AND \"Apache Lucene\"" (DEFOP (AND (MODIFIER (TMODIFIER (FIELD (QPHRASE \"jakarta apache\")))) (MODIFIER (TMODIFIER (FIELD (QPHRASE \"Apache Lucene\"))))))"+jakarta lucene" j(DEFOP (MODIFIER + (TMODIFIER (FIELD (QNORMAL jakarta)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL lucene)))))+"\"jakarta apache\" NOT \"Apache Lucene\"" (DEFOP (NOT (MODIFIER (TMODIFIER (FIELD (QPHRASE \"jakarta apache\")))) (MODIFIER (TMODIFIER (FIELD (QPHRASE \"Apache Lucene\"))))))""(jakarta OR apache) AND website" (DEFOP (AND (OR (MODIFIER (TMODIFIER (FIELD (QNORMAL jakarta)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL apache))))) (MODIFIER (TMODIFIER (FIELD (QNORMAL website))))))"this (that)" c(DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL this)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL that)))))"this ((that))" "(this) ((((((that))))))" "(this) (that)" "this +(that)" (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL this)))) (CLAUSE (MODIFIER + (TMODIFIER (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL that)))))))))"this ((((+(that)))))" "this (+(((+(that)))))" (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL this)))) (CLAUSE (MODIFIER + (TMODIFIER (DEFOP (CLAUSE (MODIFIER + (TMODIFIER (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL that)))))))))))))"this +((((+(that)))))" "this +(+((((that)))))" "this (+(that)^7)" (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL this)))) (CLAUSE (MODIFIER + (TMODIFIER (BOOST ^7) FUZZY (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL that)))))))))"this (+(((+(that))))"FAIL"this (++(((+(that)))))""this +(that thus)^7" (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL this)))) (CLAUSE (MODIFIER + (TMODIFIER (BOOST ^7) FUZZY (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL that)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL thus))))))))) atom:"te?t" 0(MODIFIER (TMODIFIER (FIELD (QTRUNCATED te?t))))"test*" 1(MODIFIER (TMODIFIER (FIELD (QTRUNCATED test*))))"te*t" 0(MODIFIER (TMODIFIER (FIELD (QTRUNCATED te*t))))"*te*t" 1(MODIFIER (TMODIFIER (FIELD (QTRUNCATED *te*t)))) "*te*t*" 2(MODIFIER (TMODIFIER (FIELD (QTRUNCATED *te*t*)))) "?te*t?" 2(MODIFIER (TMODIFIER (FIELD (QTRUNCATED ?te*t?))))"te??t" 1(MODIFIER (TMODIFIER (FIELD (QTRUNCATED te??t))))"te*?t"OK "\"text\"" 1(MODIFIER (TMODIFIER (FIELD (QPHRASE \"text\")))) "\"te*t\"" 6(MODIFIER (TMODIFIER (FIELD (QPHRASETRUNC \"te*t\")))) "\"test*\"" 7(MODIFIER (TMODIFIER (FIELD (QPHRASETRUNC \"test*\")))) "\"*te*t\"" 7(MODIFIER (TMODIFIER (FIELD (QPHRASETRUNC \"*te*t\")))) "\"*te*t*\"" 8(MODIFIER (TMODIFIER (FIELD (QPHRASETRUNC \"*te*t*\")))) "\"?te*t?\"" 8(MODIFIER (TMODIFIER (FIELD (QPHRASETRUNC \"?te*t?\")))) "\"te?t\"" 6(MODIFIER (TMODIFIER (FIELD (QPHRASETRUNC \"te?t\")))) "\"te??t\"" 7(MODIFIER (TMODIFIER (FIELD (QPHRASETRUNC \"te??t\")))) "\"te*?t\"" 7(MODIFIER (TMODIFIER (FIELD (QPHRASETRUNC \"te*?t\"))))"roam~" =(MODIFIER (TMODIFIER BOOST (FUZZY ~) (FIELD (QNORMAL roam)))) "roam~0.8" @(MODIFIER (TMODIFIER BOOST (FUZZY ~0.8) (FIELD (QNORMAL roam))))"roam~0.899999999" H(MODIFIER (TMODIFIER BOOST (FUZZY ~0.899999999) (FIELD (QNORMAL roam)))) "roam~8" >(MODIFIER (TMODIFIER BOOST (FUZZY ~8) (FIELD (QNORMAL roam))))"roam^" =(MODIFIER (TMODIFIER (BOOST ^) FUZZY (FIELD (QNORMAL roam)))) "roam^0.8" @(MODIFIER (TMODIFIER (BOOST ^0.8) FUZZY (FIELD (QNORMAL roam))))"roam^0.899999999" H(MODIFIER (TMODIFIER (BOOST ^0.899999999) FUZZY (FIELD (QNORMAL roam)))) "roam^8" >(MODIFIER (TMODIFIER (BOOST ^8) FUZZY (FIELD (QNORMAL roam)))) "roam^~" A(MODIFIER (TMODIFIER (BOOST ^) (FUZZY ~) (FIELD (QNORMAL roam)))) "roam^0.8~" D(MODIFIER (TMODIFIER (BOOST ^0.8) (FUZZY ~) (FIELD (QNORMAL roam))))"roam^0.899999999~0.5" O(MODIFIER (TMODIFIER (BOOST ^0.899999999) (FUZZY ~0.5) (FIELD (QNORMAL roam)))) "roam~^" "roam~0.8^" D(MODIFIER (TMODIFIER (BOOST ^) (FUZZY ~0.8) (FIELD (QNORMAL roam))))"roam~0.899999999^0.5" O(MODIFIER (TMODIFIER (BOOST ^0.5) (FUZZY ~0.899999999) (FIELD (QNORMAL roam)))) +"this^ 5" =(MODIFIER (TMODIFIER (BOOST ^) FUZZY (FIELD (QNORMAL this)))) "this^5~ 9" B(MODIFIER (TMODIFIER (BOOST ^5) (FUZZY ~) (FIELD (QNORMAL this))))"9999" -(MODIFIER (TMODIFIER (FIELD (QNORMAL 9999)))) "9999.1" /(MODIFIER (TMODIFIER (FIELD (QNORMAL 9999.1)))) "0.9999" /(MODIFIER (TMODIFIER (FIELD (QNORMAL 0.9999))))"00000000.9999" 6(MODIFIER (TMODIFIER (FIELD (QNORMAL 00000000.9999))))"\"jakarta apache\"~10" M(MODIFIER (TMODIFIER BOOST (FUZZY ~10) (FIELD (QPHRASE \"jakarta apache\"))))"\"jakarta apache\"^10" M(MODIFIER (TMODIFIER (BOOST ^10) FUZZY (FIELD (QPHRASE \"jakarta apache\"))))"\"jakarta apache\"~10^" Q(MODIFIER (TMODIFIER (BOOST ^) (FUZZY ~10) (FIELD (QPHRASE \"jakarta apache\"))))"\"jakarta apache\"^10~" Q(MODIFIER (TMODIFIER (BOOST ^10) (FUZZY ~) (FIELD (QPHRASE \"jakarta apache\"))))"\"jakarta apache\"~10^0.6" T(MODIFIER (TMODIFIER (BOOST ^0.6) (FUZZY ~10) (FIELD (QPHRASE \"jakarta apache\"))))"\"jakarta apache\"^10~0.6" T(MODIFIER (TMODIFIER (BOOST ^10) (FUZZY ~0.6) (FIELD (QPHRASE \"jakarta apache\"))))"[20020101 TO 20030101]" O(MODIFIER (TMODIFIER (FIELD (QRANGEIN (QNORMAL 20020101) (QNORMAL 20030101)))))"[20020101 TO 20030101]^0.5" b(MODIFIER (TMODIFIER (BOOST ^0.5) FUZZY (FIELD (QRANGEIN (QNORMAL 20020101) (QNORMAL 20030101)))))"[20020101 TO 20030101]^0.5~" f(MODIFIER (TMODIFIER (BOOST ^0.5) (FUZZY ~) (FIELD (QRANGEIN (QNORMAL 20020101) (QNORMAL 20030101)))))"title:[20020101 TO 20030101]" U(MODIFIER (TMODIFIER (FIELD title (QRANGEIN (QNORMAL 20020101) (QNORMAL 20030101)))))#"title:[20020101 TO 20030101]^0.5" h(MODIFIER (TMODIFIER (BOOST ^0.5) FUZZY (FIELD title (QRANGEIN (QNORMAL 20020101) (QNORMAL 20030101)))))$"title:[20020101 TO 20030101]^0.5~" l(MODIFIER (TMODIFIER (BOOST ^0.5) (FUZZY ~) (FIELD title (QRANGEIN (QNORMAL 20020101) (QNORMAL 20030101)))))"[* TO 20030101]" J(MODIFIER (TMODIFIER (FIELD (QRANGEIN (QANYTHING *) (QNORMAL 20030101)))))"[20020101 TO *]^0.5" ](MODIFIER (TMODIFIER (BOOST ^0.5) FUZZY (FIELD (QRANGEIN (QNORMAL 20020101) (QANYTHING *)))))"[* 20030101]^0.5~" a(MODIFIER (TMODIFIER (BOOST ^0.5) (FUZZY ~) (FIELD (QRANGEIN (QANYTHING *) (QNORMAL 20030101)))))"[20020101 *]^0.5~" a(MODIFIER (TMODIFIER (BOOST ^0.5) (FUZZY ~) (FIELD (QRANGEIN (QNORMAL 20020101) (QANYTHING *)))))"{20020101 TO 20030101}" O(MODIFIER (TMODIFIER (FIELD (QRANGEEX (QNORMAL 20020101) (QNORMAL 20030101)))))"{20020101 TO 20030101}^0.5" b(MODIFIER (TMODIFIER (BOOST ^0.5) FUZZY (FIELD (QRANGEEX (QNORMAL 20020101) (QNORMAL 20030101)))))"{20020101 TO 20030101}^0.5~" f(MODIFIER (TMODIFIER (BOOST ^0.5) (FUZZY ~) (FIELD (QRANGEEX (QNORMAL 20020101) (QNORMAL 20030101)))))"title:{20020101 TO 20030101}" U(MODIFIER (TMODIFIER (FIELD title (QRANGEEX (QNORMAL 20020101) (QNORMAL 20030101)))))#"title:{20020101 TO 20030101}^0.5" h(MODIFIER (TMODIFIER (BOOST ^0.5) FUZZY (FIELD title (QRANGEEX (QNORMAL 20020101) (QNORMAL 20030101)))))$"title:{20020101 TO 20030101}^0.5~" l(MODIFIER (TMODIFIER (BOOST ^0.5) (FUZZY ~) (FIELD title (QRANGEEX (QNORMAL 20020101) (QNORMAL 20030101)))))"title:{Aida TO Carmen}" O(MODIFIER (TMODIFIER <(FIELD title (QRANGEEX (QNORMAL Aida) (QNORMAL Carmen)))))"{20020101 TO *}" J(MODIFIER (TMODIFIER (FIELD (QRANGEEX (QNORMAL 20020101) (QANYTHING *)))))"{* TO 20030101}^0.5" ](MODIFIER (TMODIFIER (BOOST ^0.5) FUZZY (FIELD (QRANGEEX (QANYTHING *) (QNORMAL 20030101)))))"{20020101 *}^0.5~" a(MODIFIER (TMODIFIER (BOOST ^0.5) (FUZZY ~) (FIELD (QRANGEEX (QNORMAL 20020101) (QANYTHING *)))))"{* 20030101}^0.5~" a(MODIFIER (TMODIFIER (BOOST ^0.5) (FUZZY ~) (FIELD (QRANGEEX (QANYTHING *) (QNORMAL 20030101)))))"[this TO that]" G(MODIFIER (TMODIFIER (FIELD (QRANGEIN (QNORMAL this) (QNORMAL that)))))"[this that]" "[this TO *]" F(MODIFIER (TMODIFIER (FIELD (QRANGEIN (QNORMAL this) (QANYTHING *))))) "[this]" "[* this]" F(MODIFIER (TMODIFIER (FIELD (QRANGEIN (QANYTHING *) (QNORMAL this)))))"[* TO this]" "[\"this\" TO \"that*\"]" U(MODIFIER (TMODIFIER (FIELD (QRANGEIN (QPHRASE \"this\") (QPHRASETRUNC \"that*\")))))"[\"#$%^&\" TO \"&*()\"]" U(MODIFIER (TMODIFIER (FIELD (QRANGEIN (QPHRASE \"#$%^&\") (QPHRASETRUNC \"&*()\")))))"{this TO that}" G(MODIFIER (TMODIFIER (FIELD (QRANGEEX (QNORMAL this) (QNORMAL that)))))"{this that}" "{this TO *}" F(MODIFIER (TMODIFIER (FIELD (QRANGEEX (QNORMAL this) (QANYTHING *))))) "{* this}" F(MODIFIER (TMODIFIER (FIELD (QRANGEEX (QANYTHING *) (QNORMAL this)))))"{* TO this}" "{\"this\" TO \"that*\"}" U(MODIFIER (TMODIFIER (FIELD (QRANGEEX (QPHRASE \"this\") (QPHRASETRUNC \"that*\")))))"{\"#$%^&\" TO \"&*()\"}" U(MODIFIER (TMODIFIER (FIELD (QRANGEEX (QPHRASE \"#$%^&\") (QPHRASETRUNC \"&*()\")))))"{}""[]""\(1\+1\)\:2" 8(MODIFIER (TMODIFIER (FIELD (QNORMAL \\(1\\+1\\)\\:2))))3"escape:(\+\-\&\&\|\|\!\(\)\{\}\[\]\^\"\~\*\?\:\\)""\"jakarta apache\"";(MODIFIER (TMODIFIER (FIELD (QPHRASE \"jakarta apache\"))))$"title:(+return +\"pink panther\")" (CLAUSE (MODIFIER (TMODIFIER (FIELD title (DEFOP (MODIFIER + (TMODIFIER (FIELD (QNORMAL return)))) (MODIFIER + (TMODIFIER (FIELD (QPHRASE \"pink panther\")))))))))"field:(one two three)" (CLAUSE (MODIFIER (TMODIFIER (FIELD field (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL one)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL two)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL three)))))))))"field:(one +two -three)"(CLAUSE (MODIFIER (TMODIFIER (FIELD field (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL one)))) (MODIFIER + (TMODIFIER (FIELD (QNORMAL two)))) (MODIFIER - (TMODIFIER (FIELD (QNORMAL three)))))))))"+field:(-one +two three)"(CLAUSE (MODIFIER + (TMODIFIER (FIELD field (DEFOP (MODIFIER - (TMODIFIER (FIELD (QNORMAL one)))) (MODIFIER + (TMODIFIER (FIELD (QNORMAL two)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL three)))))))))"-field:(-one +two three)"(CLAUSE (MODIFIER - (TMODIFIER (FIELD field (DEFOP (MODIFIER - (TMODIFIER (FIELD (QNORMAL one)))) (MODIFIER + (TMODIFIER (FIELD (QNORMAL two)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL three)))))))))"field:(one)" b(CLAUSE (MODIFIER (TMODIFIER (FIELD field (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL one)))))))))"field: (one)" "field:( one )" "+value" 0(MODIFIER + (TMODIFIER (FIELD (QNORMAL value))))"-value"0(MODIFIER - (TMODIFIER (FIELD (QNORMAL value)))) "+field:""+field:[this TO that]" O(MODIFIER + (TMODIFIER (FIELD field (QRANGEIN (QNORMAL this) (QNORMAL that)))))"+field:[ this TO that ]" "+field:{this TO that}" O(MODIFIER + (TMODIFIER (FIELD field (QRANGEEX (QNORMAL this) (QNORMAL that)))))"+field: {this TO that}" "m:(a b c)" (CLAUSE (MODIFIER (TMODIFIER (FIELD m (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL a)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL b)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL c))))))))) "+m:(a b c)" (CLAUSE (MODIFIER + (TMODIFIER (FIELD m (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL a)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL b)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL c))))))))) "m:(+a b c)" (CLAUSE (MODIFIER (TMODIFIER (FIELD m (DEFOP (MODIFIER + (TMODIFIER (FIELD (QNORMAL a)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL b)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL c)))))))))"m:(-a +b c)^0.6"(CLAUSE (MODIFIER (TMODIFIER (BOOST ^0.6) FUZZY (FIELD m (DEFOP (MODIFIER - (TMODIFIER (FIELD (QNORMAL a)))) (MODIFIER + (TMODIFIER (FIELD (QNORMAL b)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL c)))))))))"m:(a b c or d)" (CLAUSE (MODIFIER (TMODIFIER (FIELD m (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL a)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL b)))) (OR (MODIFIER (TMODIFIER (FIELD (QNORMAL c)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL d))))))))))"m:(a b c OR d)" "m:(a b c OR d NOT e)" (CLAUSE (MODIFIER (TMODIFIER (FIELD m (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL a)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL b)))) (OR (MODIFIER (TMODIFIER (FIELD (QNORMAL c)))) (NOT (MODIFIER (TMODIFIER (FIELD (QNORMAL d)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL e)))))))))))"m:(a b NEAR c)" (CLAUSE (MODIFIER (TMODIFIER (FIELD m (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL a)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL b)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL NEAR)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL c)))))))))"m:(a b NEAR c d AND e)" <(CLAUSE (MODIFIER (TMODIFIER (FIELD m (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL a)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL b)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL NEAR)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL c)))) (AND (MODIFIER (TMODIFIER (FIELD (QNORMAL d)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL e))))))))))"-m:(a b NEAR c d AND e)">(CLAUSE (MODIFIER - (TMODIFIER (FIELD m (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL a)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL b)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL NEAR)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL c)))) (AND (MODIFIER (TMODIFIER (FIELD (QNORMAL d)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL e))))))))))"*" -(MODIFIER (TMODIFIER (FIELD (QTRUNCATED *))))"*:*" ,(MODIFIER (TMODIFIER (FIELD (QANYTHING *))))"this"-(MODIFIER (TMODIFIER (FIELD (QNORMAL this)))) "th\*is" 0(MODIFIER (TMODIFIER (FIELD (QNORMAL th\\*is)))) +"this999" 0(MODIFIER (TMODIFIER (FIELD (QNORMAL this999)))) +"this0.9" 0(MODIFIER (TMODIFIER (FIELD (QNORMAL this0.9)))) "\"this\"" 1(MODIFIER (TMODIFIER (FIELD (QPHRASE \"this\")))) "\"this \"" 3(MODIFIER (TMODIFIER (FIELD (QPHRASE \"this \"))))"\"this \" " "\" this \"" 5(MODIFIER (TMODIFIER (FIELD (QPHRASE \" this \"))))"\"a \\\"b c\\\" d\"" <(MODIFIER (TMODIFIER (FIELD (QPHRASE \"a \\\"b c\\\" d\")))) "a\\\\\\+b" 2(MODIFIER (TMODIFIER (FIELD (QNORMAL a\\\\\\+b))))"\"a \\+b c d\"" 7(MODIFIER (TMODIFIER (FIELD (QPHRASE \"a \\+b c d\")))) "a\\u0062c" 2(MODIFIER (TMODIFIER (FIELD (QNORMAL a\\u0062c))))"\"+() AND that\"" 9(MODIFIER (TMODIFIER (FIELD (QPHRASE \"+() AND that\"))))"\"func(*) AND that\"" B(MODIFIER (TMODIFIER (FIELD (QPHRASETRUNC \"func(*) AND that\"))))"*t" .(MODIFIER (TMODIFIER (FIELD (QTRUNCATED *t))))"*t*" /(MODIFIER (TMODIFIER (FIELD (QTRUNCATED *t*))))"*t?" /(MODIFIER (TMODIFIER (FIELD (QTRUNCATED *t?))))"?t?" /(MODIFIER (TMODIFIER (FIELD (QTRUNCATED ?t?))))"?t*" /(MODIFIER (TMODIFIER (FIELD (QTRUNCATED ?t*))))"?t" .(MODIFIER (TMODIFIER (FIELD (QTRUNCATED ?t))))"*t*a" 0(MODIFIER (TMODIFIER (FIELD (QTRUNCATED *t*a))))"*t?a" 0(MODIFIER (TMODIFIER (FIELD (QTRUNCATED *t?a))))"*t*a*" 1(MODIFIER (TMODIFIER (FIELD (QTRUNCATED *t*a*))))"t*a" /(MODIFIER (TMODIFIER (FIELD (QTRUNCATED t*a))))"t*a?" 0(MODIFIER (TMODIFIER (FIELD (QTRUNCATED t*a?))))"t*a?a" 1(MODIFIER (TMODIFIER (FIELD (QTRUNCATED t*a?a))))"?" -(MODIFIER (TMODIFIER (FIELD (QTRUNCATED ?)))) "*t\\*a" 2(MODIFIER (TMODIFIER (FIELD (QTRUNCATED *t\\*a))))"\\*t" -(MODIFIER (TMODIFIER (FIELD (QNORMAL \\*t)))) +"*t*a\\*" 3(MODIFIER (TMODIFIER (FIELD (QTRUNCATED *t*a\\*)))) +"*t*a\\?" 3(MODIFIER (TMODIFIER (FIELD (QTRUNCATED *t*a\\?)))) "*t*\\a" 2(MODIFIER (TMODIFIER (FIELD (QTRUNCATED *t*\\a))))"foo:*" 1(MODIFIER (TMODIFIER (FIELD foo (QTRUNCATED *))))gunit StandardLuceneGrammar; @header {:package org.apache.lucene.queryparser.flexible.aqp.parser;} $ cc +   dMbP?_%*+$!&C&"Times New Roman,Regular"&12&A)&&C&"Times New Roman,Regular"&12Page &P&333333?'333333?(-؂-?)-؂-?"d,,333333?333333?U }    +    + + + + + +  +  + + + + +  +  + +  +  +  +  + + + + + + + + + + + + + + + + + + + + + + + + +! +" +# +" +$ +% +! +" +# +" +&&'()*+,-./0123456789:;<=>? +&' +'( +') +(* +(+ +), +)- +*, +*- ++. ++/ +,0 +,1 +-2 +-3 +.( +.) +/4 +/5 +06 +07 +18 +19 +2: +2; +3< +3= +4: +4; +5: +5; +6> +6? +7@ +7A +8B +8C +9D +9E +:F +:G +;H +;I +<J +<K +=L +=M +>N +>O +?P +?Q@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_ +@R +@S +AT +AU +BV +BW +CX +CY +DZ +D[ +E\ +E] +F^ +F_ +G` +G[ +Ha +Hb +Ic +Id +Je +Jf +Kg +Kh +Li +Lj +Mk +Ml +Nm +Nn +Oo +Op +Pq +Pr +Qs +Qt +Ru +Rv +Sw +Sx +Ty +Tz +U{ +U| +V} +V~ +W +W +X +X +Y +Y +Z +Z +[ +[ +\ +\ +] +] +^ +^ +_ +_`abcdefghijklmnopqrstuvwxyz{|}~ +` +` +a +a +b +b +c +c +d +d +e +e +f +f +g +g +h +h +i +i +j +j +k +k +l +l +m +m +n +n +o +o +p +p +q +q +r +r +s +s +t +t +u +u +v +v +w +w +x +x +y +y +z +z +{ +{ +| +| +} +} +~ +~" + +"@ + + + +" + + + + + + + + + + + + + + + + + + + + + + + +" + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +  + + + + + + +  +  +  + + + + + + + + + + + + + + + + + + + + + + +  +! +" +# +$ +% +& +' +( +) +* ++ +, +- +. +/ +0 +1 +2 +3 +4 +5 +6 +7PH0(  +>@ +gg +   dMbP?_%*+$!&C&"Times New Roman,Regular"&12&A)&&C&"Times New Roman,Regular"&12Page &P&333333?'333333?(-؂-?)-؂-?"d,,333333?333333?U } Z0} s}    +    +8 +9?)DBt:  ->B;%Z : +B;%Z : +B +: +;;%Z : +B;%Z : +B]GZ :,Z : +"B mainQ:xD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B] GZ  : ,Z  :  +"B xD `Z FAIL Z OK B%0D Bt:  ->"" +B D Bt: {Z FAIL Z OK B% : DD Bs"Ao "AoZ "AoBP +""B] +GZ + : +,Z + : + +"B +xD +`Z +FAIL Z +OK B%0D +Bt:  ->"" +B +D +Bt: {Z +FAIL Z +OK B% : +DD +Bs"Ao "AoZ +"AoBP +""B] GZ  : ,Z  :  +"B xD `Z FAIL Z OK B%0D Bt:  ->"" +B D Bt: {Z FAIL Z OK B% : DD Bs"Ao "AoZ "AoBP +""B] GZ  : ,Z  :  +"B xD `Z FAIL Z OK B%0D Bt:  ->"" +B D Bt: {Z FAIL Z OK B% : DD Bs"Ao "AoZ "AoBP +""B] GZ  : ,Z  :  +"B xD `Z FAIL Z OK B%0D Bt:  ->"" +B D Bt: {Z FAIL Z OK B% : DD Bs"Ao "AoZ "AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B !"#$%&'()*+,-./0123456789:;<=>?] GZ  : ,Z  :  +"B xD `Z FAIL Z OK B%0D Bt:  ->"" +B D Bt: {Z FAIL Z OK B% : DD Bs"Ao "AoZ "AoBP +""B]!GZ! :!,Z! :! +"B!xD!`Z!FAIL Z!OK B%0D!Bt:  ->"" +B!D!Bt: {Z!FAIL Z!OK B% :!DD!Bs"Ao "AoZ!"AoBP +""B]"GZ" :",Z" :" +"B"xD"`Z"FAIL Z"OK B%0D"Bt:  ->"" +B"D"Bt: {Z"FAIL Z"OK B% :"DD"Bs"Ao "AoZ""AoBP +""B]#GZ# :#,Z# :# +"B#xD#`Z#FAIL Z#OK B%0D#Bt:  ->"" +B#D#Bt: {Z#FAIL Z#OK B% :#DD#Bs"Ao "AoZ#"AoBP +""B]$GZ$ :$,Z$ :$ +"B$xD$`Z$FAIL Z$OK B%0D$Bt:  ->"" +B$D$Bt: {Z$FAIL Z$OK B% :$DD$Bs"Ao "AoZ$"AoBP +""B]%GZ% :%,Z% :% +"B%xD%`Z%FAIL Z%OK B%0D%Bt:  ->"" +B%D%Bt: {Z%FAIL Z%OK B% :%DD%Bs"Ao "AoZ%"AoBP +""B]&GZ& :&,Z& :& +"B&xD&`Z&FAIL Z&OK B%0D&Bt:  ->"" +B&D&Bt: {Z&FAIL Z&OK B% :&DD&Bs"Ao "AoZ&"AoBP +""B]'GZ' :',Z' :' +"B'xD'`Z'FAIL Z'OK B%0D'Bt:  ->"" +B'D'Bt: {Z'FAIL Z'OK B% :'DD'Bs"Ao "AoZ'"AoBP +""B](GZ( :(,Z( :( +"B(xD(`Z(FAIL Z(OK B%0D(Bt:  ->"" +B(D(Bt: {Z(FAIL Z(OK B% :(DD(Bs"Ao "AoZ("AoBP +""B])GZ) :),Z) :) +"B)xD)`Z)FAIL Z)OK B%0D)Bt:  ->"" +B)D)Bt: {Z)FAIL Z)OK B% :)DD)Bs"Ao "AoZ)"AoBP +""B]*GZ* :*,Z* :* +"B*xD*`Z*FAIL Z*OK B%0D*Bt:  ->"" +B*D*Bt: {Z*FAIL Z*OK B% :*DD*Bs"Ao "AoZ*"AoBP +""B]+GZ+ :+,Z+ :+ +"B+xD+`Z+FAIL Z+OK B%0D+Bt:  ->"" +B+D+Bt: {Z+FAIL Z+OK B% :+DD+Bs"Ao "AoZ+"AoBP +""B],GZ, :,,Z, :, +"B,xD,`Z,FAIL Z,OK B%0D,Bt:  ->"" +B,D,Bt: {Z,FAIL Z,OK B% :,DD,Bs"Ao "AoZ,"AoBP +""B]-GZ- :-,Z- :- +"B-xD-`Z-FAIL Z-OK B%0D-Bt:  ->"" +B-D-Bt: {Z-FAIL Z-OK B% :-DD-Bs"Ao "AoZ-"AoBP +""B].GZ. :.,Z. :. +"B.xD.`Z.FAIL Z.OK B%0D.Bt:  ->"" +B.D.Bt: {Z.FAIL Z.OK B% :.DD.Bs"Ao "AoZ."AoBP +""B]/GZ/ :/,Z/ :/ +"B/xD/`Z/FAIL Z/OK B%0D/Bt:  ->"" +B/D/Bt: {Z/FAIL Z/OK B% :/DD/Bs"Ao "AoZ/"AoBP +""B]0GZ0 :0,Z0 :0 +"B0xD0`Z0FAIL Z0OK B%0D0Bt:  ->"" +B0D0Bt: {Z0FAIL Z0OK B% :0DD0Bs"Ao "AoZ0"AoBP +""B]1GZ1 :1,Z1 :1 +"B1xD1`Z1FAIL Z1OK B%0D1Bt:  ->"" +B1D1Bt: {Z1FAIL Z1OK B% :1DD1Bs"Ao "AoZ1"AoBP +""B]2GZ2 :2,Z2 :2 +"B2xD2`Z2FAIL Z2OK B%0D2Bt:  ->"" +B2D2Bt: {Z2FAIL Z2OK B% :2DD2Bs"Ao "AoZ2"AoBP +""B]3GZ3 :3,Z3 :3 +"B3xD3`Z3FAIL Z3OK B%0D3Bt:  ->"" +B3D3Bt: {Z3FAIL Z3OK B% :3DD3Bs"Ao "AoZ3"AoBP +""B]4GZ4 :4,Z4 :4 +"B4xD4`Z4FAIL Z4OK B%0D4Bt:  ->"" +B4D4Bt: {Z4FAIL Z4OK B% :4DD4Bs"Ao "AoZ4"AoBP +""B]5GZ5 :5,Z5 :5 +"B5xD5`Z5FAIL Z5OK B%0D5Bt:  ->"" +B5D5Bt: {Z5FAIL Z5OK B% :5DD5Bs"Ao "AoZ5"AoBP +""B]6GZ6 :6,Z6 :6 +"B6xD6`Z6FAIL Z6OK B%0D6Bt:  ->"" +B6D6Bt: {Z6FAIL Z6OK B% :6DD6Bs"Ao "AoZ6"AoBP +""B]7GZ7 :7,Z7 :7 +"B7xD7`Z7FAIL Z7OK B%0D7Bt:  ->"" +B7D7Bt: {Z7FAIL Z7OK B% :7DD7Bs"Ao "AoZ7"AoBP +""B]8GZ8 :8,Z8 :8 +"B8xD8`Z8FAIL Z8OK B%0D8Bt:  ->"" +B8D8Bt: {Z8FAIL Z8OK B% :8DD8Bs"Ao "AoZ8"AoBP +""B]9GZ9 :9,Z9 :9 +"B9xD9`Z9FAIL Z9OK B%0D9Bt:  ->"" +B9D9Bt: {Z9FAIL Z9OK B% :9DD9Bs"Ao "AoZ9"AoBP +""B]:GZ: ::,Z: :: +"B:xD:`Z:FAIL Z:OK B%0D:Bt:  ->"" +B:D:Bt: {Z:FAIL Z:OK B% ::DD:Bs"Ao "AoZ:"AoBP +""B];GZ; :;,Z; :; +"B;xD;`Z;FAIL Z;OK B%0D;Bt:  ->"" +B;D;Bt: {Z;FAIL Z;OK B% :;DD;Bs"Ao "AoZ;"AoBP +""B]<GZ< :<,Z< :< +"B<xD<`Z<FAIL Z<OK B%0D<Bt:  ->"" +B<D<Bt: {Z<FAIL Z<OK B% :<DD<Bs"Ao "AoZ<"AoBP +""B]=GZ= :=,Z= := +"B=xD=`Z=FAIL Z=OK B%0D=Bt:  ->"" +B=D=Bt: {Z=FAIL Z=OK B% :=DD=Bs"Ao "AoZ="AoBP +""B]>GZ> :>,Z> :> +"B>xD>`Z>FAIL Z>OK B%0D>Bt:  ->"" +B>D>Bt: {Z>FAIL Z>OK B% :>DD>Bs"Ao "AoZ>"AoBP +""B]?GZ? :?,Z? :? +"B?xD?`Z?FAIL Z?OK B%0D?Bt:  ->"" +B?D?Bt: {Z?FAIL Z?OK B% :?DD?Bs"Ao "AoZ?"AoBP +""B@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_]@GZ@ :@,Z@ :@ +"B@xD@`Z@FAIL Z@OK B%0D@Bt:  ->"" +B@D@Bt: {Z@FAIL Z@OK B% :@DD@Bs"Ao "AoZ@"AoBP +""B]AGZA :A,ZA :A +"BAxDA`ZAFAIL ZAOK B%0DABt:  ->"" +BADABt: {ZAFAIL ZAOK B% :ADDABs"Ao "AoZA"AoBP +""B]BGZB :B,ZB :B +"BBxDB`ZBFAIL ZBOK B%0DBBt:  ->"" +BBDBBt: {ZBFAIL ZBOK B% :BDDBBs"Ao "AoZB"AoBP +""B]CGZC :C,ZC :C +"BCxDC`ZCFAIL ZCOK B%0DCBt:  ->"" +BCDCBt: {ZCFAIL ZCOK B% :CDDCBs"Ao "AoZC"AoBP +""B]DGZD :D,ZD :D +"BDxDD`ZDFAIL ZDOK B%0DDBt:  ->"" +BDDDBt: {ZDFAIL ZDOK B% :DDDDBs"Ao "AoZD"AoBP +""B]EGZE :E,ZE :E +"BExDE`ZEFAIL ZEOK B%0DEBt:  ->"" +BEDEBt: {ZEFAIL ZEOK B% :EDDEBs"Ao "AoZE"AoBP +""B]FGZF :F,ZF :F +"BFxDF`ZFFAIL ZFOK B%0DFBt:  ->"" +BFDFBt: {ZFFAIL ZFOK B% :FDDFBs"Ao "AoZF"AoBP +""B]GGZG :G,ZG :G +"BGxDG`ZGFAIL ZGOK B%0DGBt:  ->"" +BGDGBt: {ZGFAIL ZGOK B% :GDDGBs"Ao "AoZG"AoBP +""B]HGZH :H,ZH :H +"BHxDH`ZHFAIL ZHOK B%0DHBt:  ->"" +BHDHBt: {ZHFAIL ZHOK B% :HDDHBs"Ao "AoZH"AoBP +""B]IGZI :I,ZI :I +"BIxDI`ZIFAIL ZIOK B%0DIBt:  ->"" +BIDIBt: {ZIFAIL ZIOK B% :IDDIBs"Ao "AoZI"AoBP +""B]JGZJ :J,ZJ :J +"BJxDJ`ZJFAIL ZJOK B%0DJBt:  ->"" +BJDJBt: {ZJFAIL ZJOK B% :JDDJBs"Ao "AoZJ"AoBP +""B]KGZK :K,ZK :K +"BKxDK`ZKFAIL ZKOK B%0DKBt:  ->"" +BKDKBt: {ZKFAIL ZKOK B% :KDDKBs"Ao "AoZK"AoBP +""B]LGZL :L,ZL :L +"BLxDL`ZLFAIL ZLOK B%0DLBt:  ->"" +BLDLBt: {ZLFAIL ZLOK B% :LDDLBs"Ao "AoZL"AoBP +""B]MGZM :M,ZM :M +"BMxDM`ZMFAIL ZMOK B%0DMBt:  ->"" +BMDMBt: {ZMFAIL ZMOK B% :MDDMBs"Ao "AoZM"AoBP +""B]NGZN :N,ZN :N +"BNxDN`ZNFAIL ZNOK B%0DNBt:  ->"" +BNDNBt: {ZNFAIL ZNOK B% :NDDNBs"Ao "AoZN"AoBP +""B]OGZO :O,ZO :O +"BOxDO`ZOFAIL ZOOK B%0DOBt:  ->"" +BODOBt: {ZOFAIL ZOOK B% :ODDOBs"Ao "AoZO"AoBP +""B]PGZP :P,ZP :P +"BPxDP`ZPFAIL ZPOK B%0DPBt:  ->"" +BPDPBt: {ZPFAIL ZPOK B% :PDDPBs"Ao "AoZP"AoBP +""B]QGZQ :Q,ZQ :Q +"BQxDQ`ZQFAIL ZQOK B%0DQBt:  ->"" +BQDQBt: {ZQFAIL ZQOK B% :QDDQBs"Ao "AoZQ"AoBP +""B]RGZR :R,ZR :R +"BRxDR`ZRFAIL ZROK B%0DRBt:  ->"" +BRDRBt: {ZRFAIL ZROK B% :RDDRBs"Ao "AoZR"AoBP +""B]SGZS :S,ZS :S +"BSxDS`ZSFAIL ZSOK B%0DSBt:  ->"" +BSDSBt: {ZSFAIL ZSOK B% :SDDSBs"Ao "AoZS"AoBP +""B]TGZT :T,ZT :T +"BTxDT`ZTFAIL ZTOK B%0DTBt:  ->"" +BTDTBt: {ZTFAIL ZTOK B% :TDDTBs"Ao "AoZT"AoBP +""B]UGZU :U,ZU :U +"BUxDU`ZUFAIL ZUOK B%0DUBt:  ->"" +BUDUBt: {ZUFAIL ZUOK B% :UDDUBs"Ao "AoZU"AoBP +""B]VGZV :V,ZV :V +"BVxDV`ZVFAIL ZVOK B%0DVBt:  ->"" +BVDVBt: {ZVFAIL ZVOK B% :VDDVBs"Ao "AoZV"AoBP +""B]WGZW :W,ZW :W +"BWxDW`ZWFAIL ZWOK B%0DWBt:  ->"" +BWDWBt: {ZWFAIL ZWOK B% :WDDWBs"Ao "AoZW"AoBP +""B]XGZX :X,ZX :X +"BXxDX`ZXFAIL ZXOK B%0DXBt:  ->"" +BXDXBt: {ZXFAIL ZXOK B% :XDDXBs"Ao "AoZX"AoBP +""B]YGZY :Y,ZY :Y +"B  +    !"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~YxDY`ZYFAIL ZYOK B%0DYBt:  ->"" +BYDYBt: {ZYFAIL ZYOK B% :YDDYBs"Ao "AoZY"AoBP +""B]ZGZZ :Z,ZZ :Z +"BZxDZ`ZZFAIL ZZOK B%0DZBt:  ->"" +BZDZBt: {ZZFAIL ZZOK B% :ZDDZBs"Ao "AoZZ"AoBP +""B][GZ[ :[,Z[ :[ +"B[xD[`Z[FAIL Z[OK B%0D[Bt:  ->"" +B[D[Bt: {Z[FAIL Z[OK B% :[DD[Bs"Ao "AoZ["AoBP +""B]\GZ\ :\,Z\ :\ +"B\xD\`Z\FAIL Z\OK B%0D\Bt:  ->"" +B\D\Bt: {Z\FAIL Z\OK B% :\DD\Bs"Ao "AoZ\"AoBP +""B]]GZ] :],Z] :] +"B]xD]`Z]FAIL Z]OK B%0D]Bt:  ->"" +B]D]Bt: {Z]FAIL Z]OK B% :]DD]Bs"Ao "AoZ]"AoBP +""B]^GZ^ :^,Z^ :^ +"B^xD^`Z^FAIL Z^OK B%0D^Bt:  ->"" +B^D^Bt: {Z^FAIL Z^OK B% :^DD^Bs"Ao "AoZ^"AoBP +""B]_GZ_ :_,Z_ :_ +"B_xD_`Z_FAIL Z_OK B%0D_Bt:  ->"" +B_D_Bt: {Z_FAIL Z_OK B% :_DD_Bs"Ao "AoZ_"AoBP +""B`abcdefghijklmnopqrstuvwxyz{|}~]`GZ` :`,Z` :` +"B`xD``Z`FAIL Z`OK B%0D`Bt:  ->"" +B`D`Bt: {Z`FAIL Z`OK B% :`DD`Bs"Ao "AoZ`"AoBP +""B]aGZa :a,Za :a +"BaxDa`ZaFAIL ZaOK B%0DaBt:  ->"" +BaDaBt: {ZaFAIL ZaOK B% :aDDaBs"Ao "AoZa"AoBP +""B]bGZb :b,Zb :b +"BbxDb`ZbFAIL ZbOK B%0DbBt:  ->"" +BbDbBt: {ZbFAIL ZbOK B% :bDDbBs"Ao "AoZb"AoBP +""B]cGZc :c,Zc :c +"BcxDc`ZcFAIL ZcOK B%0DcBt:  ->"" +BcDcBt: {ZcFAIL ZcOK B% :cDDcBs"Ao "AoZc"AoBP +""B]dGZd :d,Zd :d +"BdxDd`ZdFAIL ZdOK B%0DdBt:  ->"" +BdDdBt: {ZdFAIL ZdOK B% :dDDdBs"Ao "AoZd"AoBP +""B]eGZe :e,Ze :e +"BexDe`ZeFAIL ZeOK B%0DeBt:  ->"" +BeDeBt: {ZeFAIL ZeOK B% :eDDeBs"Ao "AoZe"AoBP +""B]fGZf :f,Zf :f +"BfxDf`ZfFAIL ZfOK B%0DfBt:  ->"" +BfDfBt: {ZfFAIL ZfOK B% :fDDfBs"Ao "AoZf"AoBP +""B]gGZg :g,Zg :g +"BgxDg`ZgFAIL ZgOK B%0DgBt:  ->"" +BgDgBt: {ZgFAIL ZgOK B% :gDDgBs"Ao "AoZg"AoBP +""B]hGZh :h,Zh :h +"BhxDh`ZhFAIL ZhOK B%0DhBt:  ->"" +BhDhBt: {ZhFAIL ZhOK B% :hDDhBs"Ao "AoZh"AoBP +""B]iGZi :i,Zi :i +"BixDi`ZiFAIL ZiOK B%0DiBt:  ->"" +BiDiBt: {ZiFAIL ZiOK B% :iDDiBs"Ao "AoZi"AoBP +""B]jGZj :j,Zj :j +"BjxDj`ZjFAIL ZjOK B%0DjBt:  ->"" +BjDjBt: {ZjFAIL ZjOK B% :jDDjBs"Ao "AoZj"AoBP +""B]kGZk :k,Zk :k +"BkxDk`ZkFAIL ZkOK B%0DkBt:  ->"" +BkDkBt: {ZkFAIL ZkOK B% :kDDkBs"Ao "AoZk"AoBP +""B]lGZl :l,Zl :l +"BlxDl`ZlFAIL ZlOK B%0DlBt:  ->"" +BlDlBt: {ZlFAIL ZlOK B% :lDDlBs"Ao "AoZl"AoBP +""B]mGZm :m,Zm :m +"BmxDm`ZmFAIL ZmOK B%0DmBt:  ->"" +BmDmBt: {ZmFAIL ZmOK B% :mDDmBs"Ao "AoZm"AoBP +""B]nGZn :n,Zn :n +"BnxDn`ZnFAIL ZnOK B%0DnBt:  ->"" +BnDnBt: {ZnFAIL ZnOK B% :nDDnBs"Ao "AoZn"AoBP +""B]oGZo :o,Zo :o +"BoxDo`ZoFAIL ZoOK B%0DoBt:  ->"" +BoDoBt: {ZoFAIL ZoOK B% :oDDoBs"Ao "AoZo"AoBP +""B]pGZp :p,Zp :p +"BpxDp`ZpFAIL ZpOK B%0DpBt:  ->"" +BpDpBt: {ZpFAIL ZpOK B% :pDDpBs"Ao "AoZp"AoBP +""B]qGZq :q,Zq :q +"BqxDq`ZqFAIL ZqOK B%0DqBt:  ->"" +BqDqBt: {ZqFAIL ZqOK B% :qDDqBs"Ao "AoZq"AoBP +""B]rGZr :r,Zr :r +"BrxDr`ZrFAIL ZrOK B%0DrBt:  ->"" +BrDrBt: {ZrFAIL ZrOK B% :rDDrBs"Ao "AoZr"AoBP +""B]sGZs :s,Zs :s +"BsxDs`ZsFAIL ZsOK B%0DsBt:  ->"" +BsDsBt: {ZsFAIL ZsOK B% :sDDsBs"Ao "AoZs"AoBP +""B]tGZt :t,Zt :t +"BtxDt`ZtFAIL ZtOK B%0DtBt:  ->"" +BtDtBt: {ZtFAIL ZtOK B% :tDDtBs"Ao "AoZt"AoBP +""B]uGZu :u,Zu :u +"BuxDu`ZuFAIL ZuOK B%0DuBt:  ->"" +BuDuBt: {ZuFAIL ZuOK B% :uDDuBs"Ao "AoZu"AoBP +""B]vGZv :v,Zv :v +"BvxDv`ZvFAIL ZvOK B%0DvBt:  ->"" +BvDvBt: {ZvFAIL ZvOK B% :vDDvBs"Ao "AoZv"AoBP +""B]wGZw :w,Zw :w +"BwxDw`ZwFAIL ZwOK B%0DwBt:  ->"" +BwDwBt: {ZwFAIL ZwOK B% :wDDwBs"Ao "AoZw"AoBP +""B]xGZx :x,Zx :x +"BxxDx`ZxFAIL ZxOK B%0DxBt:  ->"" +BxDxBt: {ZxFAIL ZxOK B% :xDDxBs"Ao "AoZx"AoBP +""B]yGZy :y,Zy :y +"ByxDy`ZyFAIL ZyOK B%0DyBt:  ->"" +ByDyBt: {ZyFAIL ZyOK B% :yDDyBs"Ao "AoZy"AoBP +""B]zGZz :z,Zz :z +"BzxDz`ZzFAIL ZzOK B%0DzBt:  ->"" +BzDzBt: {ZzFAIL ZzOK B% :zDDzBs"Ao "AoZz"AoBP +""B]{GZ{ :{,Z{ :{ +"B{xD{`Z{FAIL Z{OK B%0D{Bt:  ->"" +B{D{Bt: {Z{FAIL Z{OK B% :{DD{Bs"Ao "AoZ{"AoBP +""B]|GZ| :|,Z| :| +"B|xD|`Z|FAIL Z|OK B%0D|Bt:  ->"" +B|D|Bt: {Z|FAIL Z|OK B% :|DD|Bs"Ao "AoZ|"AoBP +""B]}GZ} :},Z} :} +"B}xD}`Z}FAIL Z}OK B%0D}Bt:  ->"" +B}D}Bt: {Z}FAIL Z}OK B% :}DD}Bs"Ao "AoZ}"AoBP +""B]~GZ~ :~,Z~ :~ +"B~xD~`Z~FAIL Z~OK B%0D~Bt:  ->"" +B~D~Bt: {Z~FAIL Z~OK B% :~DD~Bs"Ao "AoZ~"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B  +   ]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B] GZ  : ,Z  :  +"B xD `Z FAIL Z OK B%0D Bt:  ->"" +B D Bt: {Z FAIL Z OK B% : DD Bs"Ao "AoZ "AoBP +""B] +GZ + : +,Z + : + +"B +xD +`Z +FAIL Z +OK B%0D +Bt:  ->"" +B +D +Bt: {Z +FAIL Z +OK B% : +DD +Bs"Ao "AoZ +"AoBP +""B] GZ  : ,Z  :  +"B xD `Z FAIL Z OK B%0D Bt:  ->"" +B D Bt: {Z FAIL Z OK B% : DD Bs"Ao "AoZ "AoBP +""B] GZ  : ,Z  :  +"B xD `Z FAIL Z OK B%0D Bt:  ->"" +B D Bt: {Z FAIL Z OK B% : DD Bs"Ao "AoZ "AoBP +""B] GZ  : ,Z  :  +"B xD `Z FAIL Z OK B%0D Bt:  ->"" +B D Bt: {Z FAIL Z OK B% : DD Bs"Ao "AoZ "AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B !"#$%&'()*+,-./0123456789:;<=>?] GZ  : ,Z  :  +"B xD `Z FAIL Z OK B%0D Bt:  ->"" +B D Bt: {Z FAIL Z OK B% : DD Bs"Ao "AoZ "AoBP +""B]!GZ! :!,Z! :! +"B!xD!`Z!FAIL Z!OK B%0D!Bt:  ->"" +B!D!Bt: {Z!FAIL Z!OK B% :!DD!Bs"Ao "AoZ!"AoBP +""B]"GZ" :",Z" :" +"B"xD"`Z"FAIL Z"OK B%0D"Bt:  ->"" +B"D"Bt: {Z"FAIL Z"OK B% :"DD"Bs"Ao "AoZ""AoBP +""B]#GZ# :#,Z# :# +"B#xD#`Z#FAIL Z#OK B%0D#Bt:  ->"" +B#D#Bt: {Z#FAIL Z#OK B% :#DD#Bs"Ao "AoZ#"AoBP +""B]$GZ$ :$,Z$ :$ +"B$xD$`Z$FAIL Z$OK B%0D$Bt:  ->"" +B$D$Bt: {Z$FAIL Z$OK B% :$DD$Bs"Ao "AoZ$"AoBP +""B]%GZ% :%,Z% :% +"B%xD%`Z%FAIL Z%OK B%0D%Bt:  ->"" +B%D%Bt: {Z%FAIL Z%OK B% :%DD%Bs"Ao "AoZ%"AoBP +""B]&GZ& :&,Z& :& +"B&xD&`Z&FAIL Z&OK B%0D&Bt:  ->"" +B&D&Bt: {Z&FAIL Z&OK B% :&DD&Bs"Ao "AoZ&"AoBP +""B]'GZ' :',Z' :' +"B'xD'`Z'FAIL Z'OK B%0D'Bt:  ->"" +B'D'Bt: {Z'FAIL Z'OK B% :'DD'Bs"Ao "AoZ'"AoBP +""B](GZ( :(,Z( :( +"B(xD(`Z(FAIL Z(OK B%0D(Bt:  ->"" +B(D(Bt: {Z(FAIL Z(OK B% :(DD(Bs"Ao "AoZ("AoBP +""B])GZ) :),Z) :) +"B)xD)`Z)FAIL Z)OK B%0D)Bt:  ->"" +B)D)Bt: {Z)FAIL Z)OK B% :)DD)Bs"Ao "AoZ)"AoBP +""B]*GZ* :*,Z* :* +"B*xD*`Z*FAIL Z*OK B%0D*Bt:  ->"" +B*D*Bt: {Z*FAIL Z*OK B% :*DD*Bs"Ao "AoZ*"AoBP +""B]+GZ+ :+,Z+ :+ +"B+xD+`Z+FAIL Z+OK B%0D+Bt:  ->"" +B+D+Bt: {Z+FAIL Z+OK B% :+DD+Bs"Ao "AoZ+"AoBP +""B],GZ, :,,Z, :, +"B,xD,`Z,FAIL Z,OK B%0D,Bt:  ->"" +B,D,Bt: {Z,FAIL Z,OK B% :,DD,Bs"Ao "AoZ,"AoBP +""B]-GZ- :-,Z- :- +"B-xD-`Z-FAIL Z-OK B%0D-Bt:  ->"" +B-D-Bt: {Z-FAIL Z-OK B% :-DD-Bs"Ao "AoZ-"AoBP +""B].GZ. :.,Z. :. +"B.xD.`Z.FAIL Z.OK B%0D.Bt:  ->"" +B.D.Bt: {Z.FAIL Z.OK B% :.DD.Bs"Ao "AoZ."AoBP +""B]/GZ/ :/,Z/ :/ +"B/xD/`Z/FAIL Z/OK B%0D/Bt:  ->"" +B/D/Bt: {Z/FAIL Z/OK B% :/DD/Bs"Ao "AoZ/"AoBP +""B]0GZ0 :0,Z0 :0 +"B0xD0`Z0FAIL Z0OK B%0D0Bt:  ->"" +B0D0Bt: {Z0FAIL Z0OK B% :0DD0Bs"Ao "AoZ0"AoBP +""B]1GZ1 :1,Z1 :1 +"B1xD1`Z1FAIL Z1OK B%0D1Bt:  ->"" +B1D1Bt: {Z1FAIL Z1OK B% :1DD1Bs"Ao "AoZ1"AoBP +""B]2GZ2 :2,Z2 :2 +"B2xD2`Z2FAIL Z2OK B%0D2Bt:  ->"" +B2D2Bt: {Z2FAIL Z2OK B% :2DD2Bs"Ao "AoZ2"AoBP +""B]3GZ3 :3,Z3 :3 +"B3xD3`Z3FAIL Z3OK B%0D3Bt:  ->"" +B3D3Bt: {Z3FAIL Z3OK B% :3DD3Bs"Ao "AoZ3"AoBP +""B]4GZ4 :4,Z4 :4 +"B4xD4`Z4FAIL Z4OK B%0D4Bt:  ->"" +B4D4Bt: {Z4FAIL Z4OK B% :4DD4Bs"Ao "AoZ4"AoBP +""B]5GZ5 :5,Z5 :5 +"B5xD5`Z5FAIL Z5OK B%0D5Bt:  ->"" +B5D5Bt: {Z5FAIL Z5OK B% :5DD5Bs"Ao "AoZ5"AoBP +""B]6GZ6 :6,Z6 :6 +"B6xD6`Z6FAIL Z6OK B%0D6Bt:  ->"" +B6D6Bt: {Z6FAIL Z6OK B% :6DD6Bs"Ao "AoZ6"AoBP +""B]7GZ7 :7,Z7 :7 +"B7xD7`Z7FAIL Z7OK B%0D7Bt:  ->"" +B7D7Bt: {Z7FAIL Z7OK B% :7DD7Bs"Ao "AoZ7"AoBP +""B]8GZ8 :8,Z8 :8 +"B8xD8`Z8FAIL Z8OK B%0D8Bt:  ->"" +B8D8Bt: {Z8FAIL Z8OK B% :8DD8Bs"Ao "AoZ8"AoBP +""B]9GZ9 :9,Z9 :9 +"B9xD9`Z9FAIL Z9OK B%0D9Bt:  ->"" +B9D9Bt: {Z9FAIL Z9OK B% :9DD9Bs"Ao "AoZ9"AoBP +""B]:GZ: ::,Z: :: +"B:xD:`Z:FAIL Z:OK B%0D:Bt:  ->"" +B:D:Bt: {Z:FAIL Z:OK B% ::DD:Bs"Ao "AoZ:"AoBP +""B];GZ; :;,Z; :; +"B;xD;`Z;FAIL Z;OK B%0D;Bt:  ->"" +B;D;Bt: {Z;FAIL Z;OK B% :;DD;Bs"Ao "AoZ;"AoBP +""B]<GZ< :<,Z< :< +"B<xD<`Z<FAIL Z<OK B%0D<Bt:  ->"" +B<D<Bt: {Z<FAIL Z<OK B% :<DD<Bs"Ao "AoZ<"AoBP +""B]=GZ= :=,Z= := +"B=xD=`Z=FAIL Z=OK B%0D=Bt:  ->"" +B=D=Bt: {Z=FAIL Z=OK B% :=DD=Bs"Ao "AoZ="AoBP +""B]>GZ> :>,Z> :> +"B>xD>`Z>FAIL Z>OK B%0D>Bt:  ->"" +B>D>Bt: {Z>FAIL Z>OK B% :>DD>Bs"Ao "AoZ>"AoBP +""B]?GZ? :?,Z? :? +"B?xD?`Z?FAIL Z?OK B%0D?Bt:  ->"" +B?D?Bt: {Z?FAIL Z?OK B% :?DD?Bs"Ao "AoZ?"AoBP +""B@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_]@GZ@ :@,Z@ :@ +"B@xD@`Z@FAIL Z@OK B%0D@Bt:  ->"" +B@D@Bt: {Z@FAIL Z@OK B% :@DD@Bs"Ao "AoZ@"AoBP +""B]AGZA :A,ZA :A +"BAxDA`ZAFAIL ZAOK B%0DABt:  ->"" +BADABt: {ZAFAIL ZAOK B% :ADDABs"Ao "AoZA"AoBP +""B]BGZB :B,ZB :B +"BBxDB`ZBFAIL ZBOK B%0DBBt:  ->"" +BBDBBt: {ZBFAIL ZBOK B% :BDDBBs"Ao "AoZB"AoBP +""B]CGZC :C,ZC :C +"BCxDC`ZCFAIL ZCOK B%0DCBt:  ->"" +BCDCBt: {ZCFAIL ZCOK B% :CDDCBs"Ao "AoZC"AoBP +""B]DGZD :D,ZD :D +"BDxDD`ZDFAIL ZDOK B%0DDBt:  ->"" +BDDDBt: {ZDFAIL ZDOK B% :DDDDBs"Ao "AoZD"AoBP +""B]EGZE :E,ZE :E +"BExDE`ZEFAIL ZEOK B%0DEBt:  ->"" +BEDEBt: {ZEFAIL ZEOK B% :EDDEBs"Ao "AoZE"AoBP +""B]FGZF :F,ZF :F +"BFxDF`ZFFAIL ZFOK B%0DFBt:  ->"" +BFDFBt: {ZFFAIL ZFOK B% :FDDFBs"Ao "AoZF"AoBP +""B]GGZG :G,ZG :G +"BGxDG`ZGFAIL ZGOK B%0DGBt:  ->"" +BGDGBt: {ZGFAIL ZGOK B% :GDDGBs"Ao "AoZG"AoBP +""B]HGZH :H,ZH :H +"BHxDH`ZHFAIL ZHOK B%0DHBt:  ->"" +BHDHBt: {ZHFAIL ZHOK B% :HDDHBs"Ao "AoZH"AoBP +""B]IGZI :I,ZI :I +"BIxDI`ZIFAIL ZIOK B%0DIBt:  ->"" +BIDIBt: {ZIFAIL ZIOK B% :IDDIBs"Ao "AoZI"AoBP +""B]JGZJ :J,ZJ :J +"BJxDJ`ZJFAIL ZJOK B%0DJBt:  ->"" +BJDJBt: {ZJFAIL ZJOK B% :JDDJBs"Ao "AoZJ"AoBP +""B]KGZK :K,ZK :K +"BKxDK`ZKFAIL ZKOK B%0DKBt:  ->"" +BKDKBt: {ZKFAIL ZKOK B% :KDDKBs"Ao "AoZK"AoBP +""B]LGZL :L,ZL :L +"BLxDL`ZLFAIL ZLOK B%0DLBt:  ->"" +BLDLBt: {ZLFAIL ZLOK B% :LDDLBs"Ao "AoZL"AoBP +""B]MGZM :M,ZM :M +"BMxDM`ZMFAIL ZMOK B%0DMBt:  ->"" +BMDMBt: {ZMFAIL ZMOK B% :MDDMBs"Ao "AoZM"AoBP +""B]NGZN :N,ZN :N +"BNxDN`ZNFAIL ZNOK B%0DNBt:  ->"" +BNDNBt: {ZNFAIL ZNOK B% :NDDNBs"Ao "AoZN"AoBP +""B]OGZO :O,ZO :O +"BOxDO`ZOFAIL ZOOK B%0DOBt:  ->"" +BODOBt: {ZOFAIL ZOOK B% :ODDOBs"Ao "AoZO"AoBP +""B]PGZP :P,ZP :P +"BPxDP`ZPFAIL ZPOK B%0DPBt:  ->"" +BPDPBt: {ZPFAIL ZPOK B% :PDDPBs"Ao "AoZP"AoBP +""B]QGZQ :Q,ZQ :Q +"BQxDQ`ZQFAIL ZQOK B%0DQBt:  ->"" +BQDQBt: {ZQFAIL ZQOK B% :QDDQBs"Ao "AoZQ"AoBP +""B]RGZR :R,ZR :R +"BRxDR`ZRFAIL ZROK B%0DRBt:  ->"" +BRDRBt: {ZRFAIL ZROK B% :RDDRBs"Ao "AoZR"AoBP +""B]SGZS :S,ZS :S +"BSxDS`ZSFAIL ZSOK B%0DSBt:  ->"" +BSDSBt: {ZSFAIL ZSOK B% :SDDSBs"Ao "AoZS"AoBP +""B]TGZT :T,ZT :T +"BTxDT`ZTFAIL ZTOK B%0DTBt:  ->"" +BTDTBt: {ZTFAIL ZTOK B% :TDDTBs"Ao "AoZT"AoBP +""B]UGZU :U,ZU :U +"BUxDU`ZUFAIL ZUOK B%0DUBt:  ->"" +BUDUBt: {ZUFAIL ZUOK B% :UDDUBs"Ao "AoZU"AoBP +""B]VGZV :V,ZV :V +"BVxDV`ZVFAIL ZVOK B%0DVBt:  ->"" +BVDVBt: {ZVFAIL ZVOK B% :VDDVBs"Ao "AoZV"AoBP +""B]WGZW :W,ZW :W +"BWxDW`ZWFAIL ZWOK B%0DWBt:  ->"" +BWDWBt: {ZWFAIL ZWOK B% :WDDWBs"Ao "AoZW"AoBP +""B]XGZX :X,ZX :X +"BXxDX`ZXFAIL ZXOK B%0DXBt:  ->"" +BXDXBt: {ZXFAIL ZXOK B% :XDDXBs"Ao "AoZX"AoBP +""B]YGZY :Y,ZY :Y +"BYxDY`ZYFAIL ZYOK B%0DYBt:  ->"" +BYDYBt: {ZYFAIL ZYOK B% :YDDYBs"Ao "AoZY"AoBP +""B]ZGZZ :Z,ZZ :Z +"BZxDZ`ZZFAIL ZZOK B%0DZBt:  ->"" +BZDZBt: {ZZFAIL ZZOK B% :ZDDZBs"Ao "AoZZ"AoBP +""B][GZ[ :[,Z[ :[ +"B[xD[`Z[FAIL Z[OK B%0D[Bt:  ->"" +B[D[Bt: {Z[FAIL Z[OK B% :[DD[Bs"Ao "AoZ["AoBP +""B]\GZ\ :\,Z\ :\ +"B\xD\`Z\FAIL Z\OK B%0D\Bt:  ->"" +B\D\Bt: {Z\FAIL Z\OK B% :\DD\Bs"Ao "AoZ\"AoBP +""B]]GZ] :],Z] :] +"B]xD]`Z]FAIL Z]OK B%0D]Bt:  ->"" +B]D]Bt: {Z]FAIL Z]OK B% :]DD]Bs"Ao "AoZ]"AoBP +""B]^GZ^ :^,Z^ :^ +"B^xD^`Z^FAIL Z^OK B%0D^Bt:  ->"" +B^D^Bt: {Z^FAIL Z^OK B% :^DD^Bs"Ao "AoZ^"AoBP +""B]_GZ_ :_,Z_ :_ +"B_xD_`Z_FAIL Z_OK B%0D_Bt:  ->"" +B_D_Bt: {Z_FAIL Z_OK B% :_DD_Bs"Ao "AoZ_"AoBP +""B`abcdefghijklmnopqrstuvwxyz{|}~]`GZ` :`,Z` :` +"B`xD``Z`FAIL Z`OK B%0D`Bt:  ->"" +B`D`Bt: {Z`FAIL Z`OK B% :`DD`Bs"Ao "AoZ`"AoBP +""B]aGZa :a,Za :a +"BaxDa`ZaFAIL ZaOK B%0DaBt:  ->"" +BaDaBt: {ZaFAIL ZaOK B% :aDDaBs"Ao "AoZa"AoBP +""B]bGZb :b,Zb :b +"BbxDb`ZbFAIL ZbOK B%0DbBt:  ->"" +BbDbBt: {ZbFAIL ZbOK B% :bDDbBs"Ao "AoZb"AoBP +""B]cGZc :c,Zc :c +"BcxDc`ZcFAIL ZcOK B%0DcBt:  ->"" +BcDcBt: {ZcFAIL ZcOK B% :cDDcBs"Ao "AoZc"AoBP +""B]dGZd :d,Zd :d +"BdxDd`ZdFAIL ZdOK B%0DdBt:  ->"" +BdDdBt: {ZdFAIL ZdOK B% :dDDdBs"Ao "AoZd"AoBP +""B]eGZe :e,Ze :e +"BexDe`ZeFAIL ZeOK B%0DeBt:  ->"" +BeDeBt: {ZeFAIL ZeOK B% :eDDeBs"Ao "AoZe"AoBP +""B]fGZf :f,Zf :f +"BfxDf`ZfFAIL ZfOK B%0DfBt:  ->"" +BfDfBt: {ZfFAIL ZfOK B% :fDDfBs"Ao "AoZf"AoBP +""B]gGZg :g,Zg :g +"BgxDg`ZgFAIL ZgOK B%0DgBt:  ->"" +BgDgBt: {ZgFAIL ZgOK B% :gDDgBs"Ao "AoZg"AoBP +""B]hGZh :h,Zh :h +"BhxDh`ZhFAIL ZhOK B%0DhBt:  ->"" +BhDhBt: {ZhFAIL ZhOK B% :hDDhBs"Ao "AoZh"AoBP +""B]iGZi :i,Zi :i +"BixDi`ZiFAIL ZiOK B%0DiBt:  ->"" +BiDiBt: {ZiFAIL ZiOK B% :iDDiBs"Ao "AoZi"AoBP +""B]jGZj :j,Zj :j +"BjxDj`ZjFAIL ZjOK B%0DjBt:  ->"" +BjDjBt: {ZjFAIL ZjOK B% :jDDjBs"Ao "AoZj"AoBP +""B]kGZk :k,Zk :k +"BkxDk`ZkFAIL ZkOK B%0DkBt:  ->"" +BkDkBt: {ZkFAIL ZkOK B% :kDDkBs"Ao "AoZk"AoBP +""B]lGZl :l,Zl :l +"BlxDl`ZlFAIL ZlOK B%0DlBt:  ->"" +BlDlBt: {ZlFAIL ZlOK B% :lDDlBs"Ao "AoZl"AoBP +""B]mGZm :m,Zm :m +"BmxDm`ZmFAIL ZmOK B%0DmBt:  ->"" +BmDmBt: {ZmFAIL ZmOK B% :mDDmBs"Ao "AoZm"AoBP +""B]nGZn :n,Zn :n +"BnxDn`ZnFAIL ZnOK B%0DnBt:  ->"" +BnDnBt: {ZnFAIL ZnOK B% :nDDnBs"Ao "AoZn"AoBP +""B]oGZo :o,Zo :o +"BoxDo`ZoFAIL ZoOK B%0DoBt:  ->"" +BoDoBt: {ZoFAIL ZoOK B% :oDDoBs"Ao "AoZo"AoBP +""B]pGZp :p,Zp :p +"BpxDp`ZpFAIL ZpOK B%0DpBt:  ->"" +BpDpBt: {ZpFAIL ZpOK B% :pDDpBs"Ao "AoZp"AoBP +""B]qGZq :q,Zq :q +"BqxDq`ZqFAIL ZqOK B%0DqBt:  ->"" +BqDqBt: {ZqFAIL ZqOK B% :qDDqBs"Ao "AoZq"AoBP +""B]rGZr :r,Zr :r +"BrxDr`ZrFAIL ZrOK B%0DrBt:  ->"" +BrDrBt: {ZrFAIL ZrOK B% :rDDrBs"Ao "AoZr"AoBP +""B]sGZs :s,Zs :s +"BsxDs`ZsFAIL ZsOK B%0DsBt:  ->"" +BsDsBt: {ZsFAIL ZsOK B% :sDDsBs"Ao "AoZs"AoBP +""B]tGZt :t,Zt :t +"BtxDt`ZtFAIL ZtOK B%0DtBt:  ->"" +BtDtBt: {ZtFAIL ZtOK B% :tDDtBs"Ao "AoZt"AoBP +""B]uGZu :u,Zu :u +"BuxDu`ZuFAIL ZuOK B%0DuBt:  ->"" +BuDuBt: {ZuFAIL ZuOK B% :uDDuBs"Ao "AoZu"AoBP +""B]vGZv :v,Zv :v +"BvxDv`ZvFAIL ZvOK B%0DvBt:  ->"" +BvDvBt: {ZvFAIL ZvOK B% :vDDvBs"Ao "AoZv"AoBP +""B]wGZw :w,Zw :w +"BwxDw`ZwFAIL ZwOK B%0DwBt:  ->"" +BwDwBt: {ZwFAIL ZwOK B% :wDDwBs"Ao "AoZw"AoBP +""B]xGZx :x,Zx :x +"BxxDx`ZxFAIL ZxOK B%0DxBt:  ->"" +BxDxBt: {ZxFAIL ZxOK B% :xDDxBs"Ao "AoZx"AoBP +""B]yGZy :y,Zy :y +"ByxDy`ZyFAIL ZyOK B%0DyBt:  ->"" +ByDyBt: {ZyFAIL ZyOK B% :yDDyBs"Ao "AoZy"AoBP +""B]zGZz :z,Zz :z +"BzxDz`ZzFAIL ZzOK B%0DzBt:  ->"" +BzDzBt: {ZzFAIL ZzOK B% :zDDzBs"Ao "AoZz"AoBP +""B]{GZ{ :{,Z{ :{ +"B{xD{`Z{FAIL Z{OK B%0D{Bt:  ->"" +B{D{Bt: {Z{FAIL Z{OK B% :{DD{Bs"Ao "AoZ{"AoBP +""B]|GZ| :|,Z| :| +"B|xD|`Z|FAIL Z|OK B%0D|Bt:  ->"" +B|D|Bt: {Z|FAIL Z|OK B% :|DD|Bs"Ao "AoZ|"AoBP +""B]}GZ} :},Z} :} +"B}xD}`Z}FAIL Z}OK B%0D}Bt:  ->"" +B}D}Bt: {Z}FAIL Z}OK B% :}DD}Bs"Ao "AoZ}"AoBP +""B]~GZ~ :~,Z~ :~ +"B~xD~`Z~FAIL Z~OK B%0D~Bt:  ->"" +B~D~Bt: {Z~FAIL Z~OK B% :~DD~Bs"Ao "AoZ~"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""B]GZ :,Z : +"BxD`ZFAIL ZOK B%0DBt:  ->"" +BDBt: {ZFAIL ZOK B% :DDBs"Ao "AoZ"AoBP +""BPH 0(  +>@ +gg + +FMicrosoft Excel 97-TabelleBiff8Oh+'0@H X +d p | rca 18@@@@9)s՜.+,D՜.+,\Root EntryF@WorkbookCompObjIOle +SummaryInformation(DocumentSummaryInformation8t \ No newline at end of file Property changes on: lucene/queryparser/grammars/StandardLuceneGrammar.xls ___________________________________________________________________ Added: svn:mime-type + application/octet-stream Index: lucene/queryparser/grammars/StandardLuceneGrammar.gunit =================================================================== --- lucene/queryparser/grammars/StandardLuceneGrammar.gunit (revision 0) +++ lucene/queryparser/grammars/StandardLuceneGrammar.gunit (revision 0) @@ -0,0 +1,503 @@ +gunit StandardLuceneGrammar; +@header { +package org.apache.lucene.queryparser.flexible.aqp.parser; +} + + +mainQ: +"title:\"X x\" AND text:go title:\"x y\" AND A" -> "(DEFOP (AND (MODIFIER (TMODIFIER (FIELD title (QPHRASE \"X x\")))) (MODIFIER (TMODIFIER (FIELD text (QNORMAL go))))) (AND (MODIFIER (TMODIFIER (FIELD title (QPHRASE \"x y\")))) (MODIFIER (TMODIFIER (FIELD (QNORMAL A))))))" +"title:X Y Z" -> "(DEFOP (MODIFIER (TMODIFIER (FIELD title (QNORMAL X)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL Y)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL Z)))))" +"jakarta^4 apache" -> "(DEFOP (MODIFIER (TMODIFIER (BOOST ^4) FUZZY (FIELD (QNORMAL jakarta)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL apache)))))" +"\"jakarta apache\"^4 \"Apache Lucene\"" -> "(DEFOP (MODIFIER (TMODIFIER (BOOST ^4) FUZZY (FIELD (QPHRASE \"jakarta apache\")))) (MODIFIER (TMODIFIER (FIELD (QPHRASE \"Apache Lucene\")))))" +"\"jakarta apache\" OR jakarta" -> "(DEFOP (OR (MODIFIER (TMODIFIER (FIELD (QPHRASE \"jakarta apache\")))) (MODIFIER (TMODIFIER (FIELD (QNORMAL jakarta))))))" +"\"jakarta apache\" AND \"Apache Lucene\"" -> "(DEFOP (AND (MODIFIER (TMODIFIER (FIELD (QPHRASE \"jakarta apache\")))) (MODIFIER (TMODIFIER (FIELD (QPHRASE \"Apache Lucene\"))))))" +"+jakarta lucene" -> "(DEFOP (MODIFIER + (TMODIFIER (FIELD (QNORMAL jakarta)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL lucene)))))" +"\"jakarta apache\" NOT \"Apache Lucene\"" -> "(DEFOP (NOT (MODIFIER (TMODIFIER (FIELD (QPHRASE \"jakarta apache\")))) (MODIFIER (TMODIFIER (FIELD (QPHRASE \"Apache Lucene\"))))))" +"(jakarta OR apache) AND website" -> "(DEFOP (AND (OR (MODIFIER (TMODIFIER (FIELD (QNORMAL jakarta)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL apache))))) (MODIFIER (TMODIFIER (FIELD (QNORMAL website))))))" +"this (that)" -> "(DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL this)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL that)))))" +"this ((that))" -> "(DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL this)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL that)))))" +"(this) ((((((that))))))" -> "(DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL this)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL that)))))" +"(this) (that)" -> "(DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL this)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL that)))))" +"this +(that)" -> "(DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL this)))) (CLAUSE (MODIFIER + (TMODIFIER (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL that)))))))))" +"this ((((+(that)))))" -> "(DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL this)))) (CLAUSE (MODIFIER + (TMODIFIER (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL that)))))))))" +"this (+(((+(that)))))" -> "(DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL this)))) (CLAUSE (MODIFIER + (TMODIFIER (DEFOP (CLAUSE (MODIFIER + (TMODIFIER (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL that)))))))))))))" +"this +((((+(that)))))" -> "(DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL this)))) (CLAUSE (MODIFIER + (TMODIFIER (DEFOP (CLAUSE (MODIFIER + (TMODIFIER (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL that)))))))))))))" +"this +(+((((that)))))" -> "(DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL this)))) (CLAUSE (MODIFIER + (TMODIFIER (DEFOP (CLAUSE (MODIFIER + (TMODIFIER (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL that)))))))))))))" +"this (+(that)^7)" -> "(DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL this)))) (CLAUSE (MODIFIER + (TMODIFIER (BOOST ^7) FUZZY (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL that)))))))))" +"this (+(((+(that))))" FAIL +"this (++(((+(that)))))" FAIL +"this +(that thus)^7" -> "(DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL this)))) (CLAUSE (MODIFIER + (TMODIFIER (BOOST ^7) FUZZY (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL that)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL thus)))))))))" +"this (+(((+(that))))" FAIL +"this (++(((+(that)))))" FAIL + + + + + + + +atom: +"te?t" -> "(MODIFIER (TMODIFIER (FIELD (QTRUNCATED te?t))))" +"test*" -> "(MODIFIER (TMODIFIER (FIELD (QTRUNCATED test*))))" +"te*t" -> "(MODIFIER (TMODIFIER (FIELD (QTRUNCATED te*t))))" +"te*t" -> "(MODIFIER (TMODIFIER (FIELD (QTRUNCATED te*t))))" +"*te*t" -> "(MODIFIER (TMODIFIER (FIELD (QTRUNCATED *te*t))))" +"*te*t*" -> "(MODIFIER (TMODIFIER (FIELD (QTRUNCATED *te*t*))))" +"?te*t?" -> "(MODIFIER (TMODIFIER (FIELD (QTRUNCATED ?te*t?))))" +"te?t" -> "(MODIFIER (TMODIFIER (FIELD (QTRUNCATED te?t))))" +"te??t" -> "(MODIFIER (TMODIFIER (FIELD (QTRUNCATED te??t))))" +"te*?t" OK +"\"text\"" -> "(MODIFIER (TMODIFIER (FIELD (QPHRASE \"text\"))))" +"\"te*t\"" -> "(MODIFIER (TMODIFIER (FIELD (QPHRASETRUNC \"te*t\"))))" +"\"test*\"" -> "(MODIFIER (TMODIFIER (FIELD (QPHRASETRUNC \"test*\"))))" +"\"te*t\"" -> "(MODIFIER (TMODIFIER (FIELD (QPHRASETRUNC \"te*t\"))))" +"\"te*t\"" -> "(MODIFIER (TMODIFIER (FIELD (QPHRASETRUNC \"te*t\"))))" +"\"*te*t\"" -> "(MODIFIER (TMODIFIER (FIELD (QPHRASETRUNC \"*te*t\"))))" +"\"*te*t*\"" -> "(MODIFIER (TMODIFIER (FIELD (QPHRASETRUNC \"*te*t*\"))))" +"\"?te*t?\"" -> "(MODIFIER (TMODIFIER (FIELD (QPHRASETRUNC \"?te*t?\"))))" +"\"te?t\"" -> "(MODIFIER (TMODIFIER (FIELD (QPHRASETRUNC \"te?t\"))))" +"\"te??t\"" -> "(MODIFIER (TMODIFIER (FIELD (QPHRASETRUNC \"te??t\"))))" +"\"te*?t\"" -> "(MODIFIER (TMODIFIER (FIELD (QPHRASETRUNC \"te*?t\"))))" +"roam~" -> "(MODIFIER (TMODIFIER BOOST (FUZZY ~) (FIELD (QNORMAL roam))))" +"roam~0.8" -> "(MODIFIER (TMODIFIER BOOST (FUZZY ~0.8) (FIELD (QNORMAL roam))))" +"roam~0.899999999" -> "(MODIFIER (TMODIFIER BOOST (FUZZY ~0.899999999) (FIELD (QNORMAL roam))))" +"roam~8" -> "(MODIFIER (TMODIFIER BOOST (FUZZY ~8) (FIELD (QNORMAL roam))))" +"roam^" -> "(MODIFIER (TMODIFIER (BOOST ^) FUZZY (FIELD (QNORMAL roam))))" +"roam^0.8" -> "(MODIFIER (TMODIFIER (BOOST ^0.8) FUZZY (FIELD (QNORMAL roam))))" +"roam^0.899999999" -> "(MODIFIER (TMODIFIER (BOOST ^0.899999999) FUZZY (FIELD (QNORMAL roam))))" +"roam^8" -> "(MODIFIER (TMODIFIER (BOOST ^8) FUZZY (FIELD (QNORMAL roam))))" +"roam^~" -> "(MODIFIER (TMODIFIER (BOOST ^) (FUZZY ~) (FIELD (QNORMAL roam))))" +"roam^0.8~" -> "(MODIFIER (TMODIFIER (BOOST ^0.8) (FUZZY ~) (FIELD (QNORMAL roam))))" +"roam^0.899999999~0.5" -> "(MODIFIER (TMODIFIER (BOOST ^0.899999999) (FUZZY ~0.5) (FIELD (QNORMAL roam))))" +"roam~^" -> "(MODIFIER (TMODIFIER (BOOST ^) (FUZZY ~) (FIELD (QNORMAL roam))))" +"roam~0.8^" -> "(MODIFIER (TMODIFIER (BOOST ^) (FUZZY ~0.8) (FIELD (QNORMAL roam))))" +"roam~0.899999999^0.5" -> "(MODIFIER (TMODIFIER (BOOST ^0.5) (FUZZY ~0.899999999) (FIELD (QNORMAL roam))))" +"this^ 5" -> "(MODIFIER (TMODIFIER (BOOST ^) FUZZY (FIELD (QNORMAL this))))" +"this^5~ 9" -> "(MODIFIER (TMODIFIER (BOOST ^5) (FUZZY ~) (FIELD (QNORMAL this))))" +"9999" -> "(MODIFIER (TMODIFIER (FIELD (QNORMAL 9999))))" +"9999.1" -> "(MODIFIER (TMODIFIER (FIELD (QNORMAL 9999.1))))" +"0.9999" -> "(MODIFIER (TMODIFIER (FIELD (QNORMAL 0.9999))))" +"00000000.9999" -> "(MODIFIER (TMODIFIER (FIELD (QNORMAL 00000000.9999))))" +"\"jakarta apache\"~10" -> "(MODIFIER (TMODIFIER BOOST (FUZZY ~10) (FIELD (QPHRASE \"jakarta apache\"))))" +"\"jakarta apache\"^10" -> "(MODIFIER (TMODIFIER (BOOST ^10) FUZZY (FIELD (QPHRASE \"jakarta apache\"))))" +"\"jakarta apache\"~10^" -> "(MODIFIER (TMODIFIER (BOOST ^) (FUZZY ~10) (FIELD (QPHRASE \"jakarta apache\"))))" +"\"jakarta apache\"^10~" -> "(MODIFIER (TMODIFIER (BOOST ^10) (FUZZY ~) (FIELD (QPHRASE \"jakarta apache\"))))" +"\"jakarta apache\"~10^0.6" -> "(MODIFIER (TMODIFIER (BOOST ^0.6) (FUZZY ~10) (FIELD (QPHRASE \"jakarta apache\"))))" +"\"jakarta apache\"^10~0.6" -> "(MODIFIER (TMODIFIER (BOOST ^10) (FUZZY ~0.6) (FIELD (QPHRASE \"jakarta apache\"))))" +"[20020101 TO 20030101]" -> "(MODIFIER (TMODIFIER (FIELD (QRANGEIN (QNORMAL 20020101) (QNORMAL 20030101)))))" +"[20020101 TO 20030101]^0.5" -> "(MODIFIER (TMODIFIER (BOOST ^0.5) FUZZY (FIELD (QRANGEIN (QNORMAL 20020101) (QNORMAL 20030101)))))" +"[20020101 TO 20030101]^0.5~" -> "(MODIFIER (TMODIFIER (BOOST ^0.5) (FUZZY ~) (FIELD (QRANGEIN (QNORMAL 20020101) (QNORMAL 20030101)))))" +"[20020101 TO 20030101]^0.5~" -> "(MODIFIER (TMODIFIER (BOOST ^0.5) (FUZZY ~) (FIELD (QRANGEIN (QNORMAL 20020101) (QNORMAL 20030101)))))" +"title:[20020101 TO 20030101]" -> "(MODIFIER (TMODIFIER (FIELD title (QRANGEIN (QNORMAL 20020101) (QNORMAL 20030101)))))" +"title:[20020101 TO 20030101]^0.5" -> "(MODIFIER (TMODIFIER (BOOST ^0.5) FUZZY (FIELD title (QRANGEIN (QNORMAL 20020101) (QNORMAL 20030101)))))" +"title:[20020101 TO 20030101]^0.5~" -> "(MODIFIER (TMODIFIER (BOOST ^0.5) (FUZZY ~) (FIELD title (QRANGEIN (QNORMAL 20020101) (QNORMAL 20030101)))))" +"title:[20020101 TO 20030101]^0.5~" -> "(MODIFIER (TMODIFIER (BOOST ^0.5) (FUZZY ~) (FIELD title (QRANGEIN (QNORMAL 20020101) (QNORMAL 20030101)))))" +"[* TO 20030101]" -> "(MODIFIER (TMODIFIER (FIELD (QRANGEIN (QANYTHING *) (QNORMAL 20030101)))))" +"[20020101 TO *]^0.5" -> "(MODIFIER (TMODIFIER (BOOST ^0.5) FUZZY (FIELD (QRANGEIN (QNORMAL 20020101) (QANYTHING *)))))" +"[* 20030101]^0.5~" -> "(MODIFIER (TMODIFIER (BOOST ^0.5) (FUZZY ~) (FIELD (QRANGEIN (QANYTHING *) (QNORMAL 20030101)))))" +"[20020101 *]^0.5~" -> "(MODIFIER (TMODIFIER (BOOST ^0.5) (FUZZY ~) (FIELD (QRANGEIN (QNORMAL 20020101) (QANYTHING *)))))" +"{20020101 TO 20030101}" -> "(MODIFIER (TMODIFIER (FIELD (QRANGEEX (QNORMAL 20020101) (QNORMAL 20030101)))))" +"{20020101 TO 20030101}^0.5" -> "(MODIFIER (TMODIFIER (BOOST ^0.5) FUZZY (FIELD (QRANGEEX (QNORMAL 20020101) (QNORMAL 20030101)))))" +"{20020101 TO 20030101}^0.5~" -> "(MODIFIER (TMODIFIER (BOOST ^0.5) (FUZZY ~) (FIELD (QRANGEEX (QNORMAL 20020101) (QNORMAL 20030101)))))" +"{20020101 TO 20030101}^0.5~" -> "(MODIFIER (TMODIFIER (BOOST ^0.5) (FUZZY ~) (FIELD (QRANGEEX (QNORMAL 20020101) (QNORMAL 20030101)))))" +"title:{20020101 TO 20030101}" -> "(MODIFIER (TMODIFIER (FIELD title (QRANGEEX (QNORMAL 20020101) (QNORMAL 20030101)))))" +"title:{20020101 TO 20030101}^0.5" -> "(MODIFIER (TMODIFIER (BOOST ^0.5) FUZZY (FIELD title (QRANGEEX (QNORMAL 20020101) (QNORMAL 20030101)))))" +"title:{20020101 TO 20030101}^0.5~" -> "(MODIFIER (TMODIFIER (BOOST ^0.5) (FUZZY ~) (FIELD title (QRANGEEX (QNORMAL 20020101) (QNORMAL 20030101)))))" +"title:{20020101 TO 20030101}^0.5~" -> "(MODIFIER (TMODIFIER (BOOST ^0.5) (FUZZY ~) (FIELD title (QRANGEEX (QNORMAL 20020101) (QNORMAL 20030101)))))" +"title:{Aida TO Carmen}" -> "(MODIFIER (TMODIFIER (FIELD title (QRANGEEX (QNORMAL Aida) (QNORMAL Carmen)))))" +"{20020101 TO *}" -> "(MODIFIER (TMODIFIER (FIELD (QRANGEEX (QNORMAL 20020101) (QANYTHING *)))))" +"{* TO 20030101}^0.5" -> "(MODIFIER (TMODIFIER (BOOST ^0.5) FUZZY (FIELD (QRANGEEX (QANYTHING *) (QNORMAL 20030101)))))" +"{20020101 *}^0.5~" -> "(MODIFIER (TMODIFIER (BOOST ^0.5) (FUZZY ~) (FIELD (QRANGEEX (QNORMAL 20020101) (QANYTHING *)))))" +"{* 20030101}^0.5~" -> "(MODIFIER (TMODIFIER (BOOST ^0.5) (FUZZY ~) (FIELD (QRANGEEX (QANYTHING *) (QNORMAL 20030101)))))" +"[this TO that]" -> "(MODIFIER (TMODIFIER (FIELD (QRANGEIN (QNORMAL this) (QNORMAL that)))))" +"[this that]" -> "(MODIFIER (TMODIFIER (FIELD (QRANGEIN (QNORMAL this) (QNORMAL that)))))" +"[this TO *]" -> "(MODIFIER (TMODIFIER (FIELD (QRANGEIN (QNORMAL this) (QANYTHING *)))))" +"[this]" -> "(MODIFIER (TMODIFIER (FIELD (QRANGEIN (QNORMAL this) (QANYTHING *)))))" +"[* this]" -> "(MODIFIER (TMODIFIER (FIELD (QRANGEIN (QANYTHING *) (QNORMAL this)))))" +"[* TO this]" -> "(MODIFIER (TMODIFIER (FIELD (QRANGEIN (QANYTHING *) (QNORMAL this)))))" +"[\"this\" TO \"that*\"]" -> "(MODIFIER (TMODIFIER (FIELD (QRANGEIN (QPHRASE \"this\") (QPHRASETRUNC \"that*\")))))" +"[\"#$%^&\" TO \"&*()\"]" -> "(MODIFIER (TMODIFIER (FIELD (QRANGEIN (QPHRASE \"#$%^&\") (QPHRASETRUNC \"&*()\")))))" +"{this TO that}" -> "(MODIFIER (TMODIFIER (FIELD (QRANGEEX (QNORMAL this) (QNORMAL that)))))" +"{this that}" -> "(MODIFIER (TMODIFIER (FIELD (QRANGEEX (QNORMAL this) (QNORMAL that)))))" +"{this TO *}" -> "(MODIFIER (TMODIFIER (FIELD (QRANGEEX (QNORMAL this) (QANYTHING *)))))" +"{* this}" -> "(MODIFIER (TMODIFIER (FIELD (QRANGEEX (QANYTHING *) (QNORMAL this)))))" +"{* TO this}" -> "(MODIFIER (TMODIFIER (FIELD (QRANGEEX (QANYTHING *) (QNORMAL this)))))" +"{\"this\" TO \"that*\"}" -> "(MODIFIER (TMODIFIER (FIELD (QRANGEEX (QPHRASE \"this\") (QPHRASETRUNC \"that*\")))))" +"{\"#$%^&\" TO \"&*()\"}" -> "(MODIFIER (TMODIFIER (FIELD (QRANGEEX (QPHRASE \"#$%^&\") (QPHRASETRUNC \"&*()\")))))" +"{}" FAIL +"[]" FAIL +"\(1\+1\)\:2" -> "(MODIFIER (TMODIFIER (FIELD (QNORMAL \\(1\\+1\\)\\:2))))" +"escape:(\+\-\&\&\|\|\!\(\)\{\}\[\]\^\"\~\*\?\:\\)" FAIL +"\"jakarta apache\"" -> "(MODIFIER (TMODIFIER (FIELD (QPHRASE \"jakarta apache\"))))" +"title:(+return +\"pink panther\")" -> "(CLAUSE (MODIFIER (TMODIFIER (FIELD title (DEFOP (MODIFIER + (TMODIFIER (FIELD (QNORMAL return)))) (MODIFIER + (TMODIFIER (FIELD (QPHRASE \"pink panther\")))))))))" +"field:(one two three)" -> "(CLAUSE (MODIFIER (TMODIFIER (FIELD field (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL one)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL two)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL three)))))))))" +"field:(one +two -three)" -> "(CLAUSE (MODIFIER (TMODIFIER (FIELD field (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL one)))) (MODIFIER + (TMODIFIER (FIELD (QNORMAL two)))) (MODIFIER - (TMODIFIER (FIELD (QNORMAL three)))))))))" +"+field:(-one +two three)" -> "(CLAUSE (MODIFIER + (TMODIFIER (FIELD field (DEFOP (MODIFIER - (TMODIFIER (FIELD (QNORMAL one)))) (MODIFIER + (TMODIFIER (FIELD (QNORMAL two)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL three)))))))))" +"-field:(-one +two three)" -> "(CLAUSE (MODIFIER - (TMODIFIER (FIELD field (DEFOP (MODIFIER - (TMODIFIER (FIELD (QNORMAL one)))) (MODIFIER + (TMODIFIER (FIELD (QNORMAL two)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL three)))))))))" +"field:(one)" -> "(CLAUSE (MODIFIER (TMODIFIER (FIELD field (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL one)))))))))" +"field: (one)" -> "(CLAUSE (MODIFIER (TMODIFIER (FIELD field (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL one)))))))))" +"field:( one )" -> "(CLAUSE (MODIFIER (TMODIFIER (FIELD field (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL one)))))))))" +"+value" -> "(MODIFIER + (TMODIFIER (FIELD (QNORMAL value))))" +"-value" -> "(MODIFIER - (TMODIFIER (FIELD (QNORMAL value))))" +"+field:" FAIL +"+field:[this TO that]" -> "(MODIFIER + (TMODIFIER (FIELD field (QRANGEIN (QNORMAL this) (QNORMAL that)))))" +"+field:[ this TO that ]" -> "(MODIFIER + (TMODIFIER (FIELD field (QRANGEIN (QNORMAL this) (QNORMAL that)))))" +"+field:{this TO that}" -> "(MODIFIER + (TMODIFIER (FIELD field (QRANGEEX (QNORMAL this) (QNORMAL that)))))" +"+field: {this TO that}" -> "(MODIFIER + (TMODIFIER (FIELD field (QRANGEEX (QNORMAL this) (QNORMAL that)))))" +"m:(a b c)" -> "(CLAUSE (MODIFIER (TMODIFIER (FIELD m (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL a)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL b)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL c)))))))))" +"+m:(a b c)" -> "(CLAUSE (MODIFIER + (TMODIFIER (FIELD m (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL a)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL b)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL c)))))))))" +"m:(+a b c)" -> "(CLAUSE (MODIFIER (TMODIFIER (FIELD m (DEFOP (MODIFIER + (TMODIFIER (FIELD (QNORMAL a)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL b)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL c)))))))))" +"m:(-a +b c)^0.6" -> "(CLAUSE (MODIFIER (TMODIFIER (BOOST ^0.6) FUZZY (FIELD m (DEFOP (MODIFIER - (TMODIFIER (FIELD (QNORMAL a)))) (MODIFIER + (TMODIFIER (FIELD (QNORMAL b)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL c)))))))))" +"m:(a b c or d)" -> "(CLAUSE (MODIFIER (TMODIFIER (FIELD m (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL a)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL b)))) (OR (MODIFIER (TMODIFIER (FIELD (QNORMAL c)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL d))))))))))" +"m:(a b c OR d)" -> "(CLAUSE (MODIFIER (TMODIFIER (FIELD m (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL a)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL b)))) (OR (MODIFIER (TMODIFIER (FIELD (QNORMAL c)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL d))))))))))" +"m:(a b c OR d NOT e)" -> "(CLAUSE (MODIFIER (TMODIFIER (FIELD m (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL a)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL b)))) (OR (MODIFIER (TMODIFIER (FIELD (QNORMAL c)))) (NOT (MODIFIER (TMODIFIER (FIELD (QNORMAL d)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL e)))))))))))" +"m:(a b NEAR c)" -> "(CLAUSE (MODIFIER (TMODIFIER (FIELD m (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL a)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL b)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL NEAR)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL c)))))))))" +"m:(a b NEAR c d AND e)" -> "(CLAUSE (MODIFIER (TMODIFIER (FIELD m (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL a)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL b)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL NEAR)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL c)))) (AND (MODIFIER (TMODIFIER (FIELD (QNORMAL d)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL e))))))))))" +"-m:(a b NEAR c d AND e)" -> "(CLAUSE (MODIFIER - (TMODIFIER (FIELD m (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL a)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL b)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL NEAR)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL c)))) (AND (MODIFIER (TMODIFIER (FIELD (QNORMAL d)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL e))))))))))" +"*" -> "(MODIFIER (TMODIFIER (FIELD (QTRUNCATED *))))" +"*:*" -> "(MODIFIER (TMODIFIER (FIELD (QANYTHING *))))" +"this" -> "(MODIFIER (TMODIFIER (FIELD (QNORMAL this))))" +"th\*is" -> "(MODIFIER (TMODIFIER (FIELD (QNORMAL th\\*is))))" +"this999" -> "(MODIFIER (TMODIFIER (FIELD (QNORMAL this999))))" +"this0.9" -> "(MODIFIER (TMODIFIER (FIELD (QNORMAL this0.9))))" +"\"this\"" -> "(MODIFIER (TMODIFIER (FIELD (QPHRASE \"this\"))))" +"\"this \"" -> "(MODIFIER (TMODIFIER (FIELD (QPHRASE \"this \"))))" +"\"this \" " -> "(MODIFIER (TMODIFIER (FIELD (QPHRASE \"this \"))))" +"\" this \"" -> "(MODIFIER (TMODIFIER (FIELD (QPHRASE \" this \"))))" +"\"a \\\"b c\\\" d\"" -> "(MODIFIER (TMODIFIER (FIELD (QPHRASE \"a \\\"b c\\\" d\"))))" +"a\\\\\\+b" -> "(MODIFIER (TMODIFIER (FIELD (QNORMAL a\\\\\\+b))))" +"\"a \\\"b c\\\" d\"" -> "(MODIFIER (TMODIFIER (FIELD (QPHRASE \"a \\\"b c\\\" d\"))))" +"\"a \\\"b c\\\" d\"" -> "(MODIFIER (TMODIFIER (FIELD (QPHRASE \"a \\\"b c\\\" d\"))))" +"\"a \\+b c d\"" -> "(MODIFIER (TMODIFIER (FIELD (QPHRASE \"a \\+b c d\"))))" +"a\\u0062c" -> "(MODIFIER (TMODIFIER (FIELD (QNORMAL a\\u0062c))))" +"\"+() AND that\"" -> "(MODIFIER (TMODIFIER (FIELD (QPHRASE \"+() AND that\"))))" +"\"func(*) AND that\"" -> "(MODIFIER (TMODIFIER (FIELD (QPHRASETRUNC \"func(*) AND that\"))))" +"\"+() AND that\"" -> "(MODIFIER (TMODIFIER (FIELD (QPHRASE \"+() AND that\"))))" +"\"func(*) AND that\"" -> "(MODIFIER (TMODIFIER (FIELD (QPHRASETRUNC \"func(*) AND that\"))))" +"*t" -> "(MODIFIER (TMODIFIER (FIELD (QTRUNCATED *t))))" +"*t*" -> "(MODIFIER (TMODIFIER (FIELD (QTRUNCATED *t*))))" +"*t?" -> "(MODIFIER (TMODIFIER (FIELD (QTRUNCATED *t?))))" +"?t?" -> "(MODIFIER (TMODIFIER (FIELD (QTRUNCATED ?t?))))" +"?t*" -> "(MODIFIER (TMODIFIER (FIELD (QTRUNCATED ?t*))))" +"?t" -> "(MODIFIER (TMODIFIER (FIELD (QTRUNCATED ?t))))" +"*t*a" -> "(MODIFIER (TMODIFIER (FIELD (QTRUNCATED *t*a))))" +"*t?a" -> "(MODIFIER (TMODIFIER (FIELD (QTRUNCATED *t?a))))" +"*t*a*" -> "(MODIFIER (TMODIFIER (FIELD (QTRUNCATED *t*a*))))" +"t*a" -> "(MODIFIER (TMODIFIER (FIELD (QTRUNCATED t*a))))" +"t*a?" -> "(MODIFIER (TMODIFIER (FIELD (QTRUNCATED t*a?))))" +"t*a?a" -> "(MODIFIER (TMODIFIER (FIELD (QTRUNCATED t*a?a))))" +"?" -> "(MODIFIER (TMODIFIER (FIELD (QTRUNCATED ?))))" +"*t\\*a" -> "(MODIFIER (TMODIFIER (FIELD (QTRUNCATED *t\\*a))))" +"\\*t" -> "(MODIFIER (TMODIFIER (FIELD (QNORMAL \\*t))))" +"*t*a\\*" -> "(MODIFIER (TMODIFIER (FIELD (QTRUNCATED *t*a\\*))))" +"*t*a\\?" -> "(MODIFIER (TMODIFIER (FIELD (QTRUNCATED *t*a\\?))))" +"*t*\\a" -> "(MODIFIER (TMODIFIER (FIELD (QTRUNCATED *t*\\a))))" +"foo:*" -> "(MODIFIER (TMODIFIER (FIELD foo (QTRUNCATED *))))" + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Index: lucene/queryparser/grammars/ExtendedLuceneGrammar.gunit =================================================================== --- lucene/queryparser/grammars/ExtendedLuceneGrammar.gunit (revision 0) +++ lucene/queryparser/grammars/ExtendedLuceneGrammar.gunit (revision 0) @@ -0,0 +1,506 @@ +gunit ExtendedLuceneGrammar; +@header { +package org.apache.lucene.queryparser.flexible.aqp.parser; +} + + +mainQ: +"title:\"X x\" AND text:go title:\"x y\" AND A" -> "(DEFOP (AND (MODIFIER (TMODIFIER (FIELD title (QPHRASE \"X x\")))) (MODIFIER (TMODIFIER (FIELD text (QNORMAL go))))) (AND (MODIFIER (TMODIFIER (FIELD title (QPHRASE \"x y\")))) (MODIFIER (TMODIFIER (FIELD (QNORMAL A))))))" +"title:X Y Z" -> "(DEFOP (MODIFIER (TMODIFIER (FIELD title (QNORMAL X)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL Y)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL Z)))))" +"jakarta^4 apache" -> "(DEFOP (MODIFIER (TMODIFIER (BOOST ^4) FUZZY (FIELD (QNORMAL jakarta)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL apache)))))" +"\"jakarta apache\"^4 \"Apache Lucene\"" -> "(DEFOP (MODIFIER (TMODIFIER (BOOST ^4) FUZZY (FIELD (QPHRASE \"jakarta apache\")))) (MODIFIER (TMODIFIER (FIELD (QPHRASE \"Apache Lucene\")))))" +"\"jakarta apache\" OR jakarta" -> "(DEFOP (OR (MODIFIER (TMODIFIER (FIELD (QPHRASE \"jakarta apache\")))) (MODIFIER (TMODIFIER (FIELD (QNORMAL jakarta))))))" +"\"jakarta apache\" AND \"Apache Lucene\"" -> "(DEFOP (AND (MODIFIER (TMODIFIER (FIELD (QPHRASE \"jakarta apache\")))) (MODIFIER (TMODIFIER (FIELD (QPHRASE \"Apache Lucene\"))))))" +"+jakarta lucene" -> "(DEFOP (MODIFIER + (TMODIFIER (FIELD (QNORMAL jakarta)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL lucene)))))" +"\"jakarta apache\" NOT \"Apache Lucene\"" -> "(DEFOP (NOT (MODIFIER (TMODIFIER (FIELD (QPHRASE \"jakarta apache\")))) (MODIFIER (TMODIFIER (FIELD (QPHRASE \"Apache Lucene\"))))))" +"(jakarta OR apache) AND website" -> "(DEFOP (AND (OR (MODIFIER (TMODIFIER (FIELD (QNORMAL jakarta)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL apache))))) (MODIFIER (TMODIFIER (FIELD (QNORMAL website))))))" +"this (that)" -> "(DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL this)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL that)))))" +"this ((that))" -> "(DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL this)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL that)))))" +"(this) ((((((that))))))" -> "(DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL this)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL that)))))" +"(this) (that)" -> "(DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL this)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL that)))))" +"this +(that)" -> "(DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL this)))) (CLAUSE (MODIFIER + (TMODIFIER (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL that)))))))))" +"this ((((+(that)))))" -> "(DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL this)))) (CLAUSE (MODIFIER + (TMODIFIER (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL that)))))))))" +"this (+(((+(that)))))" -> "(DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL this)))) (CLAUSE (MODIFIER + (TMODIFIER (DEFOP (CLAUSE (MODIFIER + (TMODIFIER (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL that)))))))))))))" +"this +((((+(that)))))" -> "(DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL this)))) (CLAUSE (MODIFIER + (TMODIFIER (DEFOP (CLAUSE (MODIFIER + (TMODIFIER (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL that)))))))))))))" +"this +(+((((that)))))" -> "(DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL this)))) (CLAUSE (MODIFIER + (TMODIFIER (DEFOP (CLAUSE (MODIFIER + (TMODIFIER (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL that)))))))))))))" +"this (+(that)^7)" -> "(DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL this)))) (CLAUSE (MODIFIER + (TMODIFIER (BOOST ^7) FUZZY (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL that)))))))))" +"this (+(((+(that))))" FAIL +"this (++(((+(that)))))" FAIL +"this +(that thus)^7" -> "(DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL this)))) (CLAUSE (MODIFIER + (TMODIFIER (BOOST ^7) FUZZY (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL that)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL thus)))))))))" +"this (+(((+(that))))" FAIL +"this (++(((+(that)))))" FAIL +"this NEAR that" -> "(DEFOP (NEAR (MODIFIER (TMODIFIER (FIELD (QNORMAL this)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL that))))))" +"this NEAR5 that" -> "(DEFOP (NEAR5 (MODIFIER (TMODIFIER (FIELD (QNORMAL this)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL that))))))" +"this NEAR (that OR bar)" -> "(DEFOP (NEAR (MODIFIER (TMODIFIER (FIELD (QNORMAL this)))) (OR (MODIFIER (TMODIFIER (FIELD (QNORMAL that)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL bar)))))))" +"(this foo) NEAR (that bar)" -> "(DEFOP (NEAR (MODIFIER (TMODIFIER (FIELD (QNORMAL this)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL foo)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL that)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL bar))))))" + + + + + + +atom: +"te?t" -> "(MODIFIER (TMODIFIER (FIELD (QTRUNCATED te?t))))" +"test*" -> "(MODIFIER (TMODIFIER (FIELD (QTRUNCATED test*))))" +"te*t" -> "(MODIFIER (TMODIFIER (FIELD (QTRUNCATED te*t))))" +"te*t" -> "(MODIFIER (TMODIFIER (FIELD (QTRUNCATED te*t))))" +"*te*t" -> "(MODIFIER (TMODIFIER (FIELD (QTRUNCATED *te*t))))" +"*te*t*" -> "(MODIFIER (TMODIFIER (FIELD (QTRUNCATED *te*t*))))" +"?te*t?" -> "(MODIFIER (TMODIFIER (FIELD (QTRUNCATED ?te*t?))))" +"te?t" -> "(MODIFIER (TMODIFIER (FIELD (QTRUNCATED te?t))))" +"te??t" -> "(MODIFIER (TMODIFIER (FIELD (QTRUNCATED te??t))))" +"te*?t" OK +"\"text\"" -> "(MODIFIER (TMODIFIER (FIELD (QPHRASE \"text\"))))" +"\"te*t\"" -> "(MODIFIER (TMODIFIER (FIELD (QPHRASETRUNC \"te*t\"))))" +"\"test*\"" -> "(MODIFIER (TMODIFIER (FIELD (QPHRASETRUNC \"test*\"))))" +"\"te*t\"" -> "(MODIFIER (TMODIFIER (FIELD (QPHRASETRUNC \"te*t\"))))" +"\"te*t\"" -> "(MODIFIER (TMODIFIER (FIELD (QPHRASETRUNC \"te*t\"))))" +"\"*te*t\"" -> "(MODIFIER (TMODIFIER (FIELD (QPHRASETRUNC \"*te*t\"))))" +"\"*te*t*\"" -> "(MODIFIER (TMODIFIER (FIELD (QPHRASETRUNC \"*te*t*\"))))" +"\"?te*t?\"" -> "(MODIFIER (TMODIFIER (FIELD (QPHRASETRUNC \"?te*t?\"))))" +"\"te?t\"" -> "(MODIFIER (TMODIFIER (FIELD (QPHRASETRUNC \"te?t\"))))" +"\"te??t\"" -> "(MODIFIER (TMODIFIER (FIELD (QPHRASETRUNC \"te??t\"))))" +"\"te*?t\"" -> "(MODIFIER (TMODIFIER (FIELD (QPHRASETRUNC \"te*?t\"))))" +"roam~" -> "(MODIFIER (TMODIFIER BOOST (FUZZY ~) (FIELD (QNORMAL roam))))" +"roam~0.8" -> "(MODIFIER (TMODIFIER BOOST (FUZZY ~0.8) (FIELD (QNORMAL roam))))" +"roam~0.899999999" -> "(MODIFIER (TMODIFIER BOOST (FUZZY ~0.899999999) (FIELD (QNORMAL roam))))" +"roam~8" -> "(MODIFIER (TMODIFIER BOOST (FUZZY ~8) (FIELD (QNORMAL roam))))" +"roam^" -> "(MODIFIER (TMODIFIER (BOOST ^) FUZZY (FIELD (QNORMAL roam))))" +"roam^0.8" -> "(MODIFIER (TMODIFIER (BOOST ^0.8) FUZZY (FIELD (QNORMAL roam))))" +"roam^0.899999999" -> "(MODIFIER (TMODIFIER (BOOST ^0.899999999) FUZZY (FIELD (QNORMAL roam))))" +"roam^8" -> "(MODIFIER (TMODIFIER (BOOST ^8) FUZZY (FIELD (QNORMAL roam))))" +"roam^~" -> "(MODIFIER (TMODIFIER (BOOST ^) (FUZZY ~) (FIELD (QNORMAL roam))))" +"roam^0.8~" -> "(MODIFIER (TMODIFIER (BOOST ^0.8) (FUZZY ~) (FIELD (QNORMAL roam))))" +"roam^0.899999999~0.5" -> "(MODIFIER (TMODIFIER (BOOST ^0.899999999) (FUZZY ~0.5) (FIELD (QNORMAL roam))))" +"roam~^" -> "(MODIFIER (TMODIFIER (BOOST ^) (FUZZY ~) (FIELD (QNORMAL roam))))" +"roam~0.8^" -> "(MODIFIER (TMODIFIER (BOOST ^) (FUZZY ~0.8) (FIELD (QNORMAL roam))))" +"roam~0.899999999^0.5" -> "(MODIFIER (TMODIFIER (BOOST ^0.5) (FUZZY ~0.899999999) (FIELD (QNORMAL roam))))" +"this^ 5" -> "(MODIFIER (TMODIFIER (BOOST ^) FUZZY (FIELD (QNORMAL this))))" +"this^5~ 9" -> "(MODIFIER (TMODIFIER (BOOST ^5) (FUZZY ~) (FIELD (QNORMAL this))))" +"9999" -> "(MODIFIER (TMODIFIER (FIELD (QNORMAL 9999))))" +"9999.1" -> "(MODIFIER (TMODIFIER (FIELD (QNORMAL 9999.1))))" +"0.9999" -> "(MODIFIER (TMODIFIER (FIELD (QNORMAL 0.9999))))" +"00000000.9999" -> "(MODIFIER (TMODIFIER (FIELD (QNORMAL 00000000.9999))))" +"\"jakarta apache\"~10" -> "(MODIFIER (TMODIFIER BOOST (FUZZY ~10) (FIELD (QPHRASE \"jakarta apache\"))))" +"\"jakarta apache\"^10" -> "(MODIFIER (TMODIFIER (BOOST ^10) FUZZY (FIELD (QPHRASE \"jakarta apache\"))))" +"\"jakarta apache\"~10^" -> "(MODIFIER (TMODIFIER (BOOST ^) (FUZZY ~10) (FIELD (QPHRASE \"jakarta apache\"))))" +"\"jakarta apache\"^10~" -> "(MODIFIER (TMODIFIER (BOOST ^10) (FUZZY ~) (FIELD (QPHRASE \"jakarta apache\"))))" +"\"jakarta apache\"~10^0.6" -> "(MODIFIER (TMODIFIER (BOOST ^0.6) (FUZZY ~10) (FIELD (QPHRASE \"jakarta apache\"))))" +"\"jakarta apache\"^10~0.6" -> "(MODIFIER (TMODIFIER (BOOST ^10) (FUZZY ~0.6) (FIELD (QPHRASE \"jakarta apache\"))))" +"[20020101 TO 20030101]" -> "(MODIFIER (TMODIFIER (FIELD (QRANGEIN (QNORMAL 20020101) (QNORMAL 20030101)))))" +"[20020101 TO 20030101]^0.5" -> "(MODIFIER (TMODIFIER (BOOST ^0.5) FUZZY (FIELD (QRANGEIN (QNORMAL 20020101) (QNORMAL 20030101)))))" +"[20020101 TO 20030101]^0.5~" -> "(MODIFIER (TMODIFIER (BOOST ^0.5) (FUZZY ~) (FIELD (QRANGEIN (QNORMAL 20020101) (QNORMAL 20030101)))))" +"[20020101 TO 20030101]^0.5~" -> "(MODIFIER (TMODIFIER (BOOST ^0.5) (FUZZY ~) (FIELD (QRANGEIN (QNORMAL 20020101) (QNORMAL 20030101)))))" +"title:[20020101 TO 20030101]" -> "(MODIFIER (TMODIFIER (FIELD title (QRANGEIN (QNORMAL 20020101) (QNORMAL 20030101)))))" +"title:[20020101 TO 20030101]^0.5" -> "(MODIFIER (TMODIFIER (BOOST ^0.5) FUZZY (FIELD title (QRANGEIN (QNORMAL 20020101) (QNORMAL 20030101)))))" +"title:[20020101 TO 20030101]^0.5~" -> "(MODIFIER (TMODIFIER (BOOST ^0.5) (FUZZY ~) (FIELD title (QRANGEIN (QNORMAL 20020101) (QNORMAL 20030101)))))" +"title:[20020101 TO 20030101]^0.5~" -> "(MODIFIER (TMODIFIER (BOOST ^0.5) (FUZZY ~) (FIELD title (QRANGEIN (QNORMAL 20020101) (QNORMAL 20030101)))))" +"[* TO 20030101]" -> "(MODIFIER (TMODIFIER (FIELD (QRANGEIN (QANYTHING *) (QNORMAL 20030101)))))" +"[20020101 TO *]^0.5" -> "(MODIFIER (TMODIFIER (BOOST ^0.5) FUZZY (FIELD (QRANGEIN (QNORMAL 20020101) (QANYTHING *)))))" +"[* 20030101]^0.5~" -> "(MODIFIER (TMODIFIER (BOOST ^0.5) (FUZZY ~) (FIELD (QRANGEIN (QANYTHING *) (QNORMAL 20030101)))))" +"[20020101 *]^0.5~" -> "(MODIFIER (TMODIFIER (BOOST ^0.5) (FUZZY ~) (FIELD (QRANGEIN (QNORMAL 20020101) (QANYTHING *)))))" +"{20020101 TO 20030101}" -> "(MODIFIER (TMODIFIER (FIELD (QRANGEEX (QNORMAL 20020101) (QNORMAL 20030101)))))" +"{20020101 TO 20030101}^0.5" -> "(MODIFIER (TMODIFIER (BOOST ^0.5) FUZZY (FIELD (QRANGEEX (QNORMAL 20020101) (QNORMAL 20030101)))))" +"{20020101 TO 20030101}^0.5~" -> "(MODIFIER (TMODIFIER (BOOST ^0.5) (FUZZY ~) (FIELD (QRANGEEX (QNORMAL 20020101) (QNORMAL 20030101)))))" +"{20020101 TO 20030101}^0.5~" -> "(MODIFIER (TMODIFIER (BOOST ^0.5) (FUZZY ~) (FIELD (QRANGEEX (QNORMAL 20020101) (QNORMAL 20030101)))))" +"title:{20020101 TO 20030101}" -> "(MODIFIER (TMODIFIER (FIELD title (QRANGEEX (QNORMAL 20020101) (QNORMAL 20030101)))))" +"title:{20020101 TO 20030101}^0.5" -> "(MODIFIER (TMODIFIER (BOOST ^0.5) FUZZY (FIELD title (QRANGEEX (QNORMAL 20020101) (QNORMAL 20030101)))))" +"title:{20020101 TO 20030101}^0.5~" -> "(MODIFIER (TMODIFIER (BOOST ^0.5) (FUZZY ~) (FIELD title (QRANGEEX (QNORMAL 20020101) (QNORMAL 20030101)))))" +"title:{20020101 TO 20030101}^0.5~" -> "(MODIFIER (TMODIFIER (BOOST ^0.5) (FUZZY ~) (FIELD title (QRANGEEX (QNORMAL 20020101) (QNORMAL 20030101)))))" +"title:{Aida TO Carmen}" -> "(MODIFIER (TMODIFIER (FIELD title (QRANGEEX (QNORMAL Aida) (QNORMAL Carmen)))))" +"{20020101 TO *}" -> "(MODIFIER (TMODIFIER (FIELD (QRANGEEX (QNORMAL 20020101) (QANYTHING *)))))" +"{* TO 20030101}^0.5" -> "(MODIFIER (TMODIFIER (BOOST ^0.5) FUZZY (FIELD (QRANGEEX (QANYTHING *) (QNORMAL 20030101)))))" +"{20020101 *}^0.5~" -> "(MODIFIER (TMODIFIER (BOOST ^0.5) (FUZZY ~) (FIELD (QRANGEEX (QNORMAL 20020101) (QANYTHING *)))))" +"{* 20030101}^0.5~" -> "(MODIFIER (TMODIFIER (BOOST ^0.5) (FUZZY ~) (FIELD (QRANGEEX (QANYTHING *) (QNORMAL 20030101)))))" +"[this TO that]" -> "(MODIFIER (TMODIFIER (FIELD (QRANGEIN (QNORMAL this) (QNORMAL that)))))" +"[this that]" -> "(MODIFIER (TMODIFIER (FIELD (QRANGEIN (QNORMAL this) (QNORMAL that)))))" +"[this TO *]" -> "(MODIFIER (TMODIFIER (FIELD (QRANGEIN (QNORMAL this) (QANYTHING *)))))" +"[this]" -> "(MODIFIER (TMODIFIER (FIELD (QRANGEIN (QNORMAL this) (QANYTHING *)))))" +"[* this]" -> "(MODIFIER (TMODIFIER (FIELD (QRANGEIN (QANYTHING *) (QNORMAL this)))))" +"[* TO this]" -> "(MODIFIER (TMODIFIER (FIELD (QRANGEIN (QANYTHING *) (QNORMAL this)))))" +"[\"this\" TO \"that*\"]" -> "(MODIFIER (TMODIFIER (FIELD (QRANGEIN (QPHRASE \"this\") (QPHRASETRUNC \"that*\")))))" +"[\"#$%^&\" TO \"&*()\"]" -> "(MODIFIER (TMODIFIER (FIELD (QRANGEIN (QPHRASE \"#$%^&\") (QPHRASETRUNC \"&*()\")))))" +"{this TO that}" -> "(MODIFIER (TMODIFIER (FIELD (QRANGEEX (QNORMAL this) (QNORMAL that)))))" +"{this that}" -> "(MODIFIER (TMODIFIER (FIELD (QRANGEEX (QNORMAL this) (QNORMAL that)))))" +"{this TO *}" -> "(MODIFIER (TMODIFIER (FIELD (QRANGEEX (QNORMAL this) (QANYTHING *)))))" +"{* this}" -> "(MODIFIER (TMODIFIER (FIELD (QRANGEEX (QANYTHING *) (QNORMAL this)))))" +"{* TO this}" -> "(MODIFIER (TMODIFIER (FIELD (QRANGEEX (QANYTHING *) (QNORMAL this)))))" +"{\"this\" TO \"that*\"}" -> "(MODIFIER (TMODIFIER (FIELD (QRANGEEX (QPHRASE \"this\") (QPHRASETRUNC \"that*\")))))" +"{\"#$%^&\" TO \"&*()\"}" -> "(MODIFIER (TMODIFIER (FIELD (QRANGEEX (QPHRASE \"#$%^&\") (QPHRASETRUNC \"&*()\")))))" +"{}" FAIL +"[]" FAIL +"\(1\+1\)\:2" -> "(MODIFIER (TMODIFIER (FIELD (QNORMAL \\(1\\+1\\)\\:2))))" +"escape:(\+\-\&\&\|\|\!\(\)\{\}\[\]\^\"\~\*\?\:\\)" FAIL +"\"jakarta apache\"" -> "(MODIFIER (TMODIFIER (FIELD (QPHRASE \"jakarta apache\"))))" +"title:(+return +\"pink panther\")" -> "(CLAUSE (MODIFIER (TMODIFIER (FIELD title (DEFOP (MODIFIER + (TMODIFIER (FIELD (QNORMAL return)))) (MODIFIER + (TMODIFIER (FIELD (QPHRASE \"pink panther\")))))))))" +"field:(one two three)" -> "(CLAUSE (MODIFIER (TMODIFIER (FIELD field (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL one)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL two)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL three)))))))))" +"field:(one +two -three)" -> "(CLAUSE (MODIFIER (TMODIFIER (FIELD field (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL one)))) (MODIFIER + (TMODIFIER (FIELD (QNORMAL two)))) (MODIFIER - (TMODIFIER (FIELD (QNORMAL three)))))))))" +"+field:(-one +two three)" -> "(CLAUSE (MODIFIER + (TMODIFIER (FIELD field (DEFOP (MODIFIER - (TMODIFIER (FIELD (QNORMAL one)))) (MODIFIER + (TMODIFIER (FIELD (QNORMAL two)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL three)))))))))" +"-field:(-one +two three)" -> "(CLAUSE (MODIFIER - (TMODIFIER (FIELD field (DEFOP (MODIFIER - (TMODIFIER (FIELD (QNORMAL one)))) (MODIFIER + (TMODIFIER (FIELD (QNORMAL two)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL three)))))))))" +"field:(one)" -> "(CLAUSE (MODIFIER (TMODIFIER (FIELD field (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL one)))))))))" +"field: (one)" -> "(CLAUSE (MODIFIER (TMODIFIER (FIELD field (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL one)))))))))" +"field:( one )" -> "(CLAUSE (MODIFIER (TMODIFIER (FIELD field (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL one)))))))))" +"+value" -> "(MODIFIER + (TMODIFIER (FIELD (QNORMAL value))))" +"-value" -> "(MODIFIER - (TMODIFIER (FIELD (QNORMAL value))))" +"+field:" FAIL +"+field:[this TO that]" -> "(MODIFIER + (TMODIFIER (FIELD field (QRANGEIN (QNORMAL this) (QNORMAL that)))))" +"+field:[ this TO that ]" -> "(MODIFIER + (TMODIFIER (FIELD field (QRANGEIN (QNORMAL this) (QNORMAL that)))))" +"+field:{this TO that}" -> "(MODIFIER + (TMODIFIER (FIELD field (QRANGEEX (QNORMAL this) (QNORMAL that)))))" +"+field: {this TO that}" -> "(MODIFIER + (TMODIFIER (FIELD field (QRANGEEX (QNORMAL this) (QNORMAL that)))))" +"m:(a b c)" -> "(CLAUSE (MODIFIER (TMODIFIER (FIELD m (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL a)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL b)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL c)))))))))" +"+m:(a b c)" -> "(CLAUSE (MODIFIER + (TMODIFIER (FIELD m (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL a)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL b)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL c)))))))))" +"m:(+a b c)" -> "(CLAUSE (MODIFIER (TMODIFIER (FIELD m (DEFOP (MODIFIER + (TMODIFIER (FIELD (QNORMAL a)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL b)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL c)))))))))" +"m:(-a +b c)^0.6" -> "(CLAUSE (MODIFIER (TMODIFIER (BOOST ^0.6) FUZZY (FIELD m (DEFOP (MODIFIER - (TMODIFIER (FIELD (QNORMAL a)))) (MODIFIER + (TMODIFIER (FIELD (QNORMAL b)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL c)))))))))" +"m:(a b c or d)" -> "(CLAUSE (MODIFIER (TMODIFIER (FIELD m (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL a)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL b)))) (OR (MODIFIER (TMODIFIER (FIELD (QNORMAL c)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL d))))))))))" +"m:(a b c OR d)" -> "(CLAUSE (MODIFIER (TMODIFIER (FIELD m (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL a)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL b)))) (OR (MODIFIER (TMODIFIER (FIELD (QNORMAL c)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL d))))))))))" +"m:(a b c OR d NOT e)" -> "(CLAUSE (MODIFIER (TMODIFIER (FIELD m (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL a)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL b)))) (OR (MODIFIER (TMODIFIER (FIELD (QNORMAL c)))) (NOT (MODIFIER (TMODIFIER (FIELD (QNORMAL d)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL e)))))))))))" +"m:(a b NEAR c)" -> "(CLAUSE (MODIFIER (TMODIFIER (FIELD m (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL a)))) (NEAR (MODIFIER (TMODIFIER (FIELD (QNORMAL b)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL c))))))))))" +"m:(a b NEAR c d AND e)" -> "(CLAUSE (MODIFIER (TMODIFIER (FIELD m (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL a)))) (NEAR (MODIFIER (TMODIFIER (FIELD (QNORMAL b)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL c))))) (AND (MODIFIER (TMODIFIER (FIELD (QNORMAL d)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL e))))))))))" +"-m:(a b NEAR c d AND e)" -> "(CLAUSE (MODIFIER - (TMODIFIER (FIELD m (DEFOP (MODIFIER (TMODIFIER (FIELD (QNORMAL a)))) (NEAR (MODIFIER (TMODIFIER (FIELD (QNORMAL b)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL c))))) (AND (MODIFIER (TMODIFIER (FIELD (QNORMAL d)))) (MODIFIER (TMODIFIER (FIELD (QNORMAL e))))))))))" +"*" -> "(MODIFIER (TMODIFIER (FIELD (QTRUNCATED *))))" +"*:*" -> "(MODIFIER (TMODIFIER (FIELD (QANYTHING *))))" +"this" -> "(MODIFIER (TMODIFIER (FIELD (QNORMAL this))))" +"th\*is" -> "(MODIFIER (TMODIFIER (FIELD (QNORMAL th\\*is))))" +"this999" -> "(MODIFIER (TMODIFIER (FIELD (QNORMAL this999))))" +"this0.9" -> "(MODIFIER (TMODIFIER (FIELD (QNORMAL this0.9))))" +"\"this\"" -> "(MODIFIER (TMODIFIER (FIELD (QPHRASE \"this\"))))" +"\"this \"" -> "(MODIFIER (TMODIFIER (FIELD (QPHRASE \"this \"))))" +"\"this \" " -> "(MODIFIER (TMODIFIER (FIELD (QPHRASE \"this \"))))" +"\" this \"" -> "(MODIFIER (TMODIFIER (FIELD (QPHRASE \" this \"))))" +"\"a \\\"b c\\\" d\"" -> "(MODIFIER (TMODIFIER (FIELD (QPHRASE \"a \\\"b c\\\" d\"))))" +"a\\\\\\+b" -> "(MODIFIER (TMODIFIER (FIELD (QNORMAL a\\\\\\+b))))" +"\"a \\\"b c\\\" d\"" -> "(MODIFIER (TMODIFIER (FIELD (QPHRASE \"a \\\"b c\\\" d\"))))" +"\"a \\\"b c\\\" d\"" -> "(MODIFIER (TMODIFIER (FIELD (QPHRASE \"a \\\"b c\\\" d\"))))" +"\"a \\+b c d\"" -> "(MODIFIER (TMODIFIER (FIELD (QPHRASE \"a \\+b c d\"))))" +"a\\u0062c" -> "(MODIFIER (TMODIFIER (FIELD (QNORMAL a\\u0062c))))" +"\"+() AND that\"" -> "(MODIFIER (TMODIFIER (FIELD (QPHRASE \"+() AND that\"))))" +"\"func(*) AND that\"" -> "(MODIFIER (TMODIFIER (FIELD (QPHRASETRUNC \"func(*) AND that\"))))" +"\"+() AND that\"" -> "(MODIFIER (TMODIFIER (FIELD (QPHRASE \"+() AND that\"))))" +"\"func(*) AND that\"" -> "(MODIFIER (TMODIFIER (FIELD (QPHRASETRUNC \"func(*) AND that\"))))" +"*t" -> "(MODIFIER (TMODIFIER (FIELD (QTRUNCATED *t))))" +"*t*" -> "(MODIFIER (TMODIFIER (FIELD (QTRUNCATED *t*))))" +"*t?" -> "(MODIFIER (TMODIFIER (FIELD (QTRUNCATED *t?))))" +"?t?" -> "(MODIFIER (TMODIFIER (FIELD (QTRUNCATED ?t?))))" +"?t*" -> "(MODIFIER (TMODIFIER (FIELD (QTRUNCATED ?t*))))" +"?t" -> "(MODIFIER (TMODIFIER (FIELD (QTRUNCATED ?t))))" +"*t*a" -> "(MODIFIER (TMODIFIER (FIELD (QTRUNCATED *t*a))))" +"*t?a" -> "(MODIFIER (TMODIFIER (FIELD (QTRUNCATED *t?a))))" +"*t*a*" -> "(MODIFIER (TMODIFIER (FIELD (QTRUNCATED *t*a*))))" +"t*a" -> "(MODIFIER (TMODIFIER (FIELD (QTRUNCATED t*a))))" +"t*a?" -> "(MODIFIER (TMODIFIER (FIELD (QTRUNCATED t*a?))))" +"t*a?a" -> "(MODIFIER (TMODIFIER (FIELD (QTRUNCATED t*a?a))))" +"?" -> "(MODIFIER (TMODIFIER (FIELD (QTRUNCATED ?))))" +"*t\\*a" -> "(MODIFIER (TMODIFIER (FIELD (QTRUNCATED *t\\*a))))" +"\\*t" -> "(MODIFIER (TMODIFIER (FIELD (QNORMAL \\*t))))" +"*t*a\\*" -> "(MODIFIER (TMODIFIER (FIELD (QTRUNCATED *t*a\\*))))" +"*t*a\\?" -> "(MODIFIER (TMODIFIER (FIELD (QTRUNCATED *t*a\\?))))" +"*t*\\a" -> "(MODIFIER (TMODIFIER (FIELD (QTRUNCATED *t*\\a))))" +"foo:*" -> "(MODIFIER (TMODIFIER (FIELD foo (QTRUNCATED *))))" + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Index: lucene/queryparser/grammars/ExtendedLuceneGrammar.g =================================================================== --- lucene/queryparser/grammars/ExtendedLuceneGrammar.g (revision 0) +++ lucene/queryparser/grammars/ExtendedLuceneGrammar.g (revision 0) @@ -0,0 +1,486 @@ +grammar ExtendedLuceneGrammar; + +// +// This is a re-implementation of the standard lucene syntax with ANTLR3 +// http://lucene.apache.org/core/4_3_0/queryparser/index.html +// +// The query syntax is complete and supports the same features as the +// original parser written in JCC. The advantage of this grammar is that it +// is 'pluggable' into Lucene's modern flexible parser, so that you can +// add custom logic on top of the 'rigid' query syntax. Besides...the grammar +// is not that 'rigid' - you can modify the grammar and easily recompile. +// +// # run this commad inside antlrqueryparser +// +// $ ant generate-antlr -Dgrammar=MyNewGrammar +// +// # or if you want to test things, do: +// +// $ ant try-view -Dgrammar=MyNewGrammar -Dquery="foo AND bar" +// +// +// Implementation note: I have tried hard to avoid putting language specific details +// into the grammar, unfortunately this was not always possible. But it is kept +// at minimum. You can generate parser code in your language of choice +// if you change the following: +// +// options : +// language= +// superClass= the default is to subclass 'UnforgivingParser', this java class +// lives in the package oal.queryparser.flixible.aqp.parser +// and its purpose is to bark everytime when an exception +// happens (otherwise, ANTLR tries to recover from some situations +// -- you may want to remove this definition, or add your own +// error recovery logic there) +// +// @header: this adds the java declaration to the generated parser file, +// feel free to remove (if you want to test the grammar using +// ANTLRWorks, you want to remove it) +// @lexer::header: dtto but for lexer +// @lexer::members: again, necessary for being strict and prevent error +// recovery, but this applies only to lexer errors. +// +// One last note - if you want to implement your own error recovery, have a look +// at the generated java class +// +// oal.queryparser.flixible.aqp.parser.SyntaxParser.java +// +// There we are raising parse exception as well +// + + +options { + language = Java; + output = AST; + superClass = UnforgivingParser; +} + +tokens { + OPERATOR; + ATOM; + MODIFIER; + TMODIFIER; + CLAUSE; + FIELD; + FUZZY; + BOOST; + QNORMAL; + QPHRASE; + QPHRASETRUNC; + QTRUNCATED; + QRANGEIN; + QRANGEEX; + QANYTHING; + QDATE; +} + + +// java-specific and error recovery-unfriendly details.... + +@header{ + package org.apache.lucene.queryparser.flexible.aqp.parser; +} +@lexer::header { + package org.apache.lucene.queryparser.flexible.aqp.parser; +} + +// this is for exceptions on lexer level - we are preventing error +// recovery (ANTLRv3 does not seem to have a better way to modify +// the default behaviour - eg. from a parent abstract class) +@lexer::members { + public void recover(RecognitionException re) { + // throw unchecked exception + throw new RuntimeException(re); + } +} + +// ...below this point, language agnostic EBNF grammar lives. + + + + + +mainQ : + clauseOr+ EOF -> ^(OPERATOR["DEFOP"] clauseOr+) + ; + +clauseOr + : (first=clauseAnd -> $first) (or others=clauseAnd -> ^(OPERATOR["OR"] clauseAnd+ ))* + ; + +clauseAnd + : (first=clauseNot -> $first) (and others=clauseNot -> ^(OPERATOR["AND"] clauseNot+ ))* + ; + +clauseNot + : (first=clauseNear -> $first) (not others=clauseNear -> ^(OPERATOR["NOT"] clauseNear+ ))* + ; + +clauseNear + : (first=clauseBasic -> $first) (near others=clauseBasic -> ^(near clauseBasic+) )* + ; + +clauseBasic + : + (modifier LPAREN clauseOr+ RPAREN )=> modifier? LPAREN clauseOr+ RPAREN term_modifier? + -> ^(CLAUSE ^(MODIFIER modifier? ^(TMODIFIER term_modifier? ^(OPERATOR["DEFOP"] clauseOr+)))) // Default operator + | (LPAREN clauseOr+ RPAREN term_modifier)=> modifier? LPAREN clauseOr+ RPAREN term_modifier? + -> ^(CLAUSE ^(MODIFIER modifier? ^(TMODIFIER term_modifier? ^(OPERATOR["DEFOP"] clauseOr+)))) // Default operator + | (LPAREN )=> LPAREN clauseOr+ RPAREN + -> clauseOr+ + | atom + ; + + +atom + : + modifier? field multi_value term_modifier? + -> ^(CLAUSE ^(MODIFIER modifier? ^(TMODIFIER term_modifier? ^(FIELD field multi_value)))) + | modifier? field? value term_modifier? + -> ^(MODIFIER modifier? ^(TMODIFIER term_modifier? ^(FIELD field? value))) + ; + + +field + : + TERM_NORMAL COLON -> TERM_NORMAL + ; + +value + : + range_term_in -> ^(QRANGEIN range_term_in) + | range_term_ex -> ^(QRANGEEX range_term_ex) + | normal -> ^(QNORMAL normal) + | truncated -> ^(QTRUNCATED truncated) + | quoted -> ^(QPHRASE quoted) + | quoted_truncated -> ^(QPHRASETRUNC quoted_truncated) + | QMARK -> ^(QTRUNCATED QMARK) + | STAR COLON b=STAR -> ^(QANYTHING $b) + | STAR -> ^(QTRUNCATED STAR) + ; + + + +range_term_in + : + LBRACK + (a=range_value -> range_value ^(QANYTHING QANYTHING["*"])) + ( TO? b=range_value -> $a $b? )? + RBRACK + ; + + +range_term_ex + : + LCURLY + ( a=range_value -> range_value ^(QANYTHING QANYTHING["*"])) + ( TO? b=range_value -> $a $b? )? + RCURLY + ; + +range_value + : + truncated -> ^(QTRUNCATED truncated) + | quoted -> ^(QPHRASE quoted) + | quoted_truncated -> ^(QPHRASETRUNC quoted_truncated) + | date -> ^(QNORMAL date) + | normal -> ^(QNORMAL normal) + | STAR -> ^(QANYTHING STAR) + ; + +multi_value + : + LPAREN multiClause RPAREN -> multiClause + ; + + + +multiClause + : + + //m:(a b NEAR c OR d OR e) + + // without duplicating the rules (but it allows recursion) + clauseOr+ -> ^(OPERATOR["DEFOP"] clauseOr+) + + // allows only limited set of operations + //multiDefault + + // this is also working, but i want operator precedence + //multiClause: + //(mterm+ -> mterm+) + //(op=operator rhs=fclause -> ^(OPERATOR ^(OPERATOR["DEFOP"] $mclause) $rhs))? + //; + //flause:mclause; + ; + +multiDefault + : + multiOr+ -> ^(OPERATOR["DEFOP"] multiOr+) + ; + +multiOr + : + (first=multiAnd -> $first) (or others=multiAnd-> ^(OPERATOR["OR"] multiAnd+ ))* + ; + +multiAnd + : + (first=multiNot -> $first) (and others=multiNot -> ^(OPERATOR["AND"] multiNot+ ))* + ; + +multiNot + : + (first=multiNear -> $first) (not others=multiNear-> ^(OPERATOR["NOT"] multiNear+ ))* + ; + +multiNear + : + (first=multiBasic -> $first) (near others=multiBasic-> ^(near multiBasic+ ))* + ; + + +multiBasic + : + mterm + ; + +mterm + : + modifier? value -> ^(MODIFIER modifier? value) + ; + + +normal + : + TERM_NORMAL + | NUMBER + ; + + + + +truncated + : + TERM_TRUNCATED + ; + + +quoted_truncated + : + PHRASE_ANYTHING + ; + +quoted : + PHRASE + ; + + + + +operator: ( + AND -> OPERATOR["AND"] + | OR -> OPERATOR["OR"] + | NOT -> OPERATOR["NOT"] + | NEAR -> OPERATOR["NEAR"] + ); + +modifier: + PLUS -> PLUS["+"] + | MINUS -> MINUS["-"]; + + +/* +This terribly convoluted grammar is here because of weird AST rewrite rules +and because we need to allow for default value when TILDE is not followed by +anything + +This grammar has problem with following + : term^4~ 9999 + where 999 is another term, not a fuzzy value +*/ +term_modifier : + TILDE CARAT? -> ^(BOOST CARAT?) ^(FUZZY TILDE) + | CARAT TILDE? -> ^(BOOST CARAT) ^(FUZZY TILDE?) +/* + // first alternative + ( + (CARAT b=NUMBER -> ^(BOOST $b) ^(FUZZY ) + ) + ( //syntactic predicate + (TILDE NUMBER )=>TILDE f=NUMBER -> ^(BOOST $b) ^(FUZZY $f) + | TILDE -> ^(BOOST $b) ^(FUZZY NUMBER["0.5"]) + )* // set the default value + + ) + // second alternative [only ~ | ~NUMBER] + | + (TILDE -> ^(BOOST) ^(FUZZY NUMBER["0.5"])) // set the default value + ((~(WS|TILDE|CARAT))=>f=NUMBER -> ^(BOOST) ^(FUZZY $f?) )* //replace the default but '~' must not be followed by WS +*/ + ; + + +boost : + (CARAT -> ^(BOOST NUMBER["DEF"])) // set the default value + (NUMBER -> ^(BOOST NUMBER))? //replace the default with user input + ; + +fuzzy : + (TILDE -> ^(FUZZY NUMBER["DEF"])) // set the default value + (NUMBER -> ^(FUZZY NUMBER))? //replace the default with user input + ; + +not : + (AND NOT)=> AND NOT + | NOT + ; + +and : + AND + ; + +or : + OR + ; + +near : + (NEAR -> ^(OPERATOR[$NEAR]) ) + ; + +date : + //a=NUMBER '/' b=NUMBER '/' c=NUMBER -> ^(QDATE $a $b $c) + DATE_TOKEN + ; + +/* ================================================================ + * = LEXER = + * ================================================================ + */ + + + +LPAREN : '('; + +RPAREN : ')'; + +LBRACK : '['; + +RBRACK : ']'; + +COLON : ':' ; //this must NOT be fragment + +PLUS : '+' ; + +MINUS : ('-'|'\!'); + +STAR : '*' ; + +QMARK : '?'+ ; + +fragment VBAR : '|' ; + +fragment AMPER : '&' ; + +LCURLY : '{' ; + +RCURLY : '}' ; + +CARAT : '^' (INT+ ('.' INT+)?)?; + +TILDE : '~' (INT+ ('.' INT+)?)?; + +DQUOTE + : '\"'; + +SQUOTE + : '\''; + + + + +TO : 'TO'; + +/* We want to be case insensitive */ +AND : (('a' | 'A') ('n' | 'N') ('d' | 'D') | (AMPER AMPER?)) ; +OR : (('o' | 'O') ('r' | 'R') | (VBAR VBAR?)); +NOT : ('n' | 'N') ('o' | 'O') ('t' | 'T'); +NEAR : ('n' | 'N') ('e' | 'E') ('a' | 'A') ('r' | 'R') ('0'..'9')*; + + +WS : ( ' ' + | '\t' + | '\r' + | '\n' + | '\u3000' + ) + {$channel=HIDDEN;} + ; + + +/* +fragment TERM_CHAR : + ~(' ' | '\t' | '\n' | '\r' | '\u3000' + | '\\' | '\'' | '\"' + | '(' | ')' | '[' | ']' | '{' | '}' + | '+' | '-' | '!' | ':' | '~' | '^' + | '*' | '|' | '&' | '?' | '\\\"' | '/' //this line is not present in lucene StandardParser.jj + ); +*/ + + +fragment INT: '0' .. '9'; + + +fragment ESC_CHAR: '\\' .; + + +fragment TERM_START_CHAR + : + (~(' ' | '\t' | '\n' | '\r' | '\u3000' + | '\'' | '\"' + | '(' | ')' | '[' | ']' | '{' | '}' + | '+' | '-' | '!' | ':' | '~' | '^' + | '?' | '*' | '\\' + ) + | ESC_CHAR ); + + +fragment TERM_CHAR + : + (TERM_START_CHAR | '-' | '+') + ; + + +NUMBER + : + INT+ ('.' INT+)? + ; + +DATE_TOKEN + : + INT INT? ('/'|'-'|'.') INT INT? ('/'|'-'|'.') INT INT (INT INT)? + ; + +TERM_NORMAL + : + TERM_START_CHAR ( TERM_CHAR )* + ; + + +TERM_TRUNCATED: + (STAR|QMARK) (TERM_CHAR+ (QMARK|STAR))+ (TERM_CHAR)* + | TERM_START_CHAR (TERM_CHAR* (QMARK|STAR))+ (TERM_CHAR)* + | (STAR|QMARK) TERM_CHAR+ + ; + + +PHRASE + : + DQUOTE (ESC_CHAR|~('\"'|'\\'|'?'|'*'))+ DQUOTE + ; + +PHRASE_ANYTHING : + DQUOTE (ESC_CHAR|~('\"'|'\\'))+ DQUOTE + ; + Index: lucene/queryparser/grammars/build.properties =================================================================== --- lucene/queryparser/grammars/build.properties (revision 0) +++ lucene/queryparser/grammars/build.properties (revision 0) @@ -0,0 +1,16 @@ + +# Adjust these if you are building grammars using AQP +# they are used only for generating the nice html charts +# of parse trees for a grammar + +# command for the dot viewer +# on mac, this might be "open -a graphviz" +# or on linux /usr/bin/xdot + +dot_viewer=xdot + +svg_generator=dot + +java_executable=java + +python=python \ No newline at end of file Index: lucene/queryparser/grammars/generate_asts.py =================================================================== --- lucene/queryparser/grammars/generate_asts.py (revision 0) +++ lucene/queryparser/grammars/generate_asts.py (revision 0) @@ -0,0 +1,201 @@ + +import sys +import subprocess as sub +import os + +""" +Simple utility script to generate HTML charts of how ANTLR parses +every query and what is the resulting AST. +""" + +def run(grammar_name, basedir='', + cp='.:../lib/antlr-3.4-complete.jar', + grammardir='', + java_executable='java', + dot_executable='dot' + ): + + + if not basedir: + basedir = os.path.abspath('../../build/queryparser') + + old_dir = os.getcwd() + + thisdir = grammardir + if not thisdir: + thisdir = os.path.dirname(os.path.abspath(__file__)) + os.chdir(thisdir) + + cp += os.pathsep + basedir + + print "We'll generate ANTLR graphs\ngramar: %s\nbasedir: %s\nclasspath: %s\nparserdir: %s" % (grammar_name, basedir, cp, thisdir) + + grammar_file = os.path.join(thisdir, grammar_name + '.g') + + if not os.path.exists(grammar_file): + raise Exception('Grammar %s does not exist in classpath: %s' % (grammar_file, cp)) + + tmp_file = os.path.join(basedir, 'ast-tree.dot') + index_file = os.path.join(basedir, '%s.html' % grammar_name) + gunit_file = os.path.join(thisdir, grammar_name + '.gunit') + generate_ast_command = '%s -cp %s org.apache.lucene.queryparser.flexible.aqp.parser.BuildAST %s "%%s"' % (java_executable, cp, grammar_name) + + + generate_svg_command = '%s -Tsvg %s' % (dot_executable, tmp_file) + + test_cases = load_gunit_file(gunit_file) + + index_fo = open(index_file, 'w') + index_fo.write('

Test cases generated from grammar: %s

\n' % grammar_name) + + out_lines = [] + i = 0 + cmds = generate_ast_command.split() + cmds_svg = generate_svg_command.split() + + total = sum(map(lambda x: len(x), test_cases.values())) + + toc = [] + data = [] + + toc.append('') + + for section,values in test_cases.items(): + output = tree = svg = '' + + toc.append('The rule:
%s

' % (section, section)) + + # generate AST tree + for query in values: + i += 1 + cmds[-1] = query + + #tmp_dot = os.path.join(basedir, 'tmp-%s.dot' % i) + + tmp_dot = tmp_file + + if os.path.exists(tmp_dot): + os.remove(tmp_dot) + + toc.append('%s.
%s

' % (i, i, query)) + + print '// %s/%s :: %s' % (i, total, query) + + + #generate graph + p = sub.Popen(cmds,stdout=sub.PIPE,stderr=sub.PIPE) + + output, errors = p.communicate() + if output: + fo = open(tmp_dot, 'w') + fo.write(output) + fo.close() + else: + print 'Error generating AST for: ' + query + print errors + if 'java.lang.ClassNotFoundException' in errors: + raise Exception('Please fix your classpath') + continue + + #generate tree + cmds.append(section) + cmds.append("tree") + p = sub.Popen(cmds,stdout=sub.PIPE,stderr=sub.PIPE) + + tree, errors = p.communicate() + if tree: + q = query.replace('\\', '\\\\').replace('"', '\\"').replace('\'', '\\\'') + t = tree.strip().replace('\\', '\\\\').replace('"', '\\"').replace("'", "\\'") + print "\"%s\" -> \"%s\"" % (q, t) + else: + print 'Error generating AST for: ' + query + print errors + tree = errors + + cmds.pop() + cmds.pop() + + cmds_svg[-1] = tmp_dot + + try: + p = sub.Popen(cmds_svg,stdout=sub.PIPE,stderr=sub.PIPE) + except Exception, e: + print "The following command failed:" + print ' '.join(cmds_svg) + raise e + + output, errors = p.communicate() + + data.append('

%s. %s   ^

' % (i, i, query)) + data.append(output) + data.append('
' + tree + '
') + data.append('
') + + index_fo.write(''' + + + + + + + ''') + index_fo.write('\n'.join(toc)) + index_fo.write('\n'.join(data)) + + index_fo.write(''' + + + ''') + index_fo.close() + + print 'HTML charts generated into:', index_fo.name + os.chdir(old_dir) + + +def load_gunit_file(gunit_file): + fi = open(gunit_file, 'r') + test_cases = {} + section = None + for line in fi: + l = line.strip() + if not l or l[:2] == '//': + continue + parts = split_line(l) + if len(parts) == 1 and parts[0][-1] == ':': + section = parts[0][:-1] + test_cases.setdefault(section, []) + elif len(parts) > 1 and parts[1].lower() != 'fails': + query = parts[0] + query = query.replace('\\\"', '"').replace('\\\'', '\'').replace('\\\\', '\\') + test_cases[section].append(query) + fi.close() + return test_cases + + +def split_line(line): + line = line.replace('->', '') + start = 0 + last_pos = None + parts = [] + while line.find('"', start) > -1: + p = line.index('"', start) + start = p+1 + if line[p-1] != '\\': + if last_pos is None: + last_pos = p + else: + parts.append(line[last_pos+1:p]) + parts.append(line[p+1:].strip()) + last_pos = None + break + if not parts: + parts.append(line.strip()) + return parts + + +if __name__ == '__main__': + if len(sys.argv) == 1: + sys.argv.insert(1, "StandardLuceneGrammar") + run(*sys.argv[1:]) Index: lucene/queryparser/grammars/demo.sh =================================================================== --- lucene/queryparser/grammars/demo.sh (revision 0) +++ lucene/queryparser/grammars/demo.sh (revision 0) @@ -0,0 +1,67 @@ +#!/bin/bash +set +e + +# this is just a helper utility to test complex queries (when some escape chars are involved +# and which are screwed by ant). The grammar must be built by ant first! + +QUERY=$1 + +GRAMMAR=${2:-StandardLuceneGrammar} + +RULE=${3:-mainQ} + +BINDIR=../../build/queryparser + +CP=.:/../lib/antlr-3.4-complete.jar:../../build/queryparser/lib/antlr-3.4-complete.jar:$BINDIR:$BINDIR/classes/test:$BINDIR/classes/java + +echo $BINDIR +echo $CP + +TGTDIR=$BINDIR/org/apache/lucene/queryparser/flexible/aqp/parser + +TGTFILE=$BINDIR/ast-tree.dot + + + +echo 'Input:' $QUERY + + +rm -fR $TGTDIR +mkdir -p $TGTDIR +rm $TGTFILE + +#echo "Regenerating grammar $GRAMMAR..." + +#java -cp $CP org.antlr.Tool -o $TGTDIR $GRAMMAR.g +#java -cp $CP org.antlr.Tool $GRAMMAR.g +#cp BuildAST.java $TGTDIR +#javac -cp $CP $TGTDIR/*.java + + + +#if [ "$1" = "build-only" ]; +#then +# echo "finished..." +# exit 0 +#fi + +java -cp $CP org.apache.lucene.queryparser.flexible.aqp.parser.BuildAST $GRAMMAR "$QUERY" $RULE tree +java -cp $CP org.apache.lucene.queryparser.flexible.aqp.parser.BuildAST $GRAMMAR "$QUERY" $RULE > $TGTFILE + +if ! grep -q "digraph" $TGTFILE ; +then + echo "No DOT file generated!" + exit 1 +fi + + + +XDOT=`which xdot` +if [ $XDOT ]; +then + echo "executing: $XDOT $TGTFILE" + $XDOT $TGTFILE +else + echo "executing: open -a graphviz $TGTFILE" + open -a graphviz $TGTFILE +fi Index: lucene/queryparser/grammars/gunit.sh =================================================================== --- lucene/queryparser/grammars/gunit.sh (revision 0) +++ lucene/queryparser/grammars/gunit.sh (revision 0) @@ -0,0 +1,13 @@ +#!/bin/bash + +GRAMMAR=${1:-StandardLuceneGrammar} + +BINDIR=../../build/queryparser + +CP=.:/../lib/antlr-3.4-complete.jar:../../build/queryparser/lib/antlr-3.4-complete.jar:$BINDIR:$BINDIR/classes/test:$BINDIR/classes/java + +./demo.sh build-only $GRAMMAR + + +java -cp $CP org.antlr.gunit.Interp $GRAMMAR.gunit + Index: lucene/queryparser/aqp-build.xml =================================================================== --- lucene/queryparser/aqp-build.xml (revision 0) +++ lucene/queryparser/aqp-build.xml (revision 0) @@ -0,0 +1,224 @@ + + + + + + + + ANTLR Flexible Query Parser + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ${dotprop.dot_viewer} ${aqp.dotfile} + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Generating graphs for the grammar ${aqp.grammar} + Note: the ${dotprop.svg_generator} must exist and be executable + You can fix the path in ${common.dir}/queryparser/grammars/build.properties + + + + + + + + + + + + Index: lucene/queryparser/build.xml =================================================================== --- lucene/queryparser/build.xml (revision 1484512) +++ lucene/queryparser/build.xml (working copy) @@ -25,7 +25,19 @@ + + + + + + + + + + + + Index: solr/core/ivy.xml =================================================================== --- solr/core/ivy.xml (revision 1484512) +++ solr/core/ivy.xml (working copy) @@ -32,6 +32,7 @@ + Index: solr/core/src/test/org/apache/solr/search/TestAqpLuceneQParserPlugin.java =================================================================== --- solr/core/src/test/org/apache/solr/search/TestAqpLuceneQParserPlugin.java (revision 0) +++ solr/core/src/test/org/apache/solr/search/TestAqpLuceneQParserPlugin.java (revision 0) @@ -0,0 +1,95 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.search; + +import java.io.IOException; + +import org.apache.lucene.search.Query; +import org.apache.solr.SolrTestCaseJ4; +import org.apache.solr.common.SolrException; +import org.apache.solr.common.params.CommonParams; +import org.apache.solr.request.SolrQueryRequest; +import org.junit.BeforeClass; +import org.junit.Test; + +public class TestAqpLuceneQParserPlugin extends SolrTestCaseJ4 { + + @BeforeClass + public static void beforeClass() throws Exception { + initCore("solrconfig-extended-lucene-qparser.xml","schema.xml"); + } + + public void createIndex() { + assertU(adoc("id","1", "text", "who")); + assertU(adoc("id","2", "text", "is stopword")); + assertU(adoc("id","3", "text", "able")); + assertU(adoc("id","4", "text", "to stopword")); + assertU(adoc("id","5", "text", "exchange")); + assertU(adoc("id","16", "text", "liberty")); + assertU(adoc("id","17", "text", "for stopword")); + assertU(adoc("id","18", "text", "safety")); + assertU(adoc("id","19", "text", "deserves")); + assertU(adoc("id","20", "text", "neither")); + assertU(adoc("id","21", "text", "Benjamin Franklin must not be a hero to NSA officers!")); + assertU(commit()); + } + + @Override + public void setUp() throws Exception { + super.setUp(); + createIndex(); + } + + + @Test + public void test() throws IOException, Exception { + + AqpLuceneQParserPlugin a = new AqpLuceneQParserPlugin(); + SolrQueryRequest r = req(CommonParams.Q, "franklin NEAR hero", CommonParams.DF, "text"); + QParser parser = a.createParser("franklin NEAR hero", r.getParams(), r.getParams(), r); + Query query = parser.parse(); + assertEquals("spanNear([text:franklin, text:hero], 5, true)", query.toString()); + r.close(); + + QParserPlugin qp = h.getCore().getQueryPlugin("lucene2"); + assertTrue("parserPlugin is not an instanceof " + AqpLuceneQParserPlugin.class, qp instanceof AqpLuceneQParserPlugin); + + + assertQ(req("q", "{!lucene2} franklin NEAR hero"), + "//*[@numFound='1']", + "//doc/int[@name='id'][.='21']" + ); + + assertQEx("Configuration for proximity search not heeded", + req("q", "{!lucene2} franklin NEAR10 officers"), + SolrException.ErrorCode.BAD_REQUEST); + + + assertQ(req("q", "{!lucene2} benjamin NEAR2 nsa"), + "//*[@numFound='0']" + ); + + assertQ(req("q", "{!lucene2} benjamin NEAR1 franklin"), + "//*[@numFound='1']", + "//doc/int[@name='id'][.='21']" + ); + + } + +} + + Index: solr/core/src/test-files/solr/collection1/conf/solrconfig-extended-lucene-qparser.xml =================================================================== --- solr/core/src/test-files/solr/collection1/conf/solrconfig-extended-lucene-qparser.xml (revision 0) +++ solr/core/src/test-files/solr/collection1/conf/solrconfig-extended-lucene-qparser.xml (revision 0) @@ -0,0 +1,32 @@ + + + + + + + ${tests.luceneMatchVersion:LUCENE_CURRENT} + + + + text + + + + + + Index: solr/core/src/java/org/apache/solr/search/AqpLuceneQParser.java =================================================================== --- solr/core/src/java/org/apache/solr/search/AqpLuceneQParser.java (revision 0) +++ solr/core/src/java/org/apache/solr/search/AqpLuceneQParser.java (revision 0) @@ -0,0 +1,128 @@ +package org.apache.solr.search; + +import org.apache.lucene.queryparser.classic.ParseException; +import org.apache.lucene.queryparser.flexible.core.QueryNodeException; +import org.apache.lucene.queryparser.flexible.core.QueryNodeParseException; +import org.apache.lucene.queryparser.flexible.core.config.QueryConfigHandler; +import org.apache.lucene.queryparser.flexible.standard.config.StandardQueryConfigHandler; +import org.apache.lucene.queryparser.flexible.standard.config.StandardQueryConfigHandler.ConfigurationKeys; +import org.apache.lucene.queryparser.flexible.standard.config.StandardQueryConfigHandler.Operator; +import org.apache.lucene.queryparser.flexible.aqp.AqpQueryParser; +import org.apache.lucene.queryparser.flexible.aqp.parser.AqpStandardLuceneParser; +import org.apache.lucene.queryparser.flexible.aqp.parser.AqpStandardQueryConfigHandler; +import org.apache.lucene.queryparser.flexible.aqp.parser.AqpStandardQueryNodeProcessorPipeline; +import org.apache.lucene.queryparser.flexible.aqp.parser.AqpStandardQueryTreeBuilder; +import org.apache.lucene.search.Query; +import org.apache.solr.common.SolrException; +import org.apache.solr.common.params.CommonParams; +import org.apache.solr.common.params.SolrParams; +import org.apache.solr.request.SolrQueryRequest; +import org.apache.solr.schema.IndexSchema; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * This is the MAIN solr entry point - this instantiates a query + * parser - it sets some default parameters from the config and prepares + * ulr parameters. + * + *

+ * + * TODO: the parser needs to know configuration of data/numeric fields + * so that the analysis is done correctly. It is not done here + * for the moment (as this is SOLR-specific and needs more + * components) + *

+ * In principle, it is configured like this: + *

+ * + *

+ *      HashMap ncm = new HashMap();
+ *   		config.set(StandardQueryConfigHandler.ConfigurationKeys.NUMERIC_CONFIG_MAP, ncm);
+ *      ncm.put(field_name, new NumericConfig(4, NumberFormat.getNumberInstance(Locale.US), NumericType.INT));
+ * 
+ * + * @see AqpLuceneQParserPlugin + * @see AqpStandardLuceneParser + * @see AqpStandardQueryConfigHandler + * @see AqpStandardQueryNodeProcessorPipeline + * @see AqpStandardQueryTreeBuilder + * + */ +public class AqpLuceneQParser extends QParser { + + public static final Logger log = LoggerFactory + .getLogger(AqpLuceneQParser.class); + + private AqpQueryParser qParser; + + public AqpLuceneQParser(AqpQueryParser parser, String qstr, SolrParams localParams, + SolrParams params, SolrQueryRequest req) + throws QueryNodeParseException { + + super(qstr, localParams, params, req); + qParser = parser; + + if (getString() == null) { + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, + "The query is empty"); + } + + + IndexSchema schema = req.getSchema(); + + + // now configure the parser using the request params, likely incomplete (yet) + QueryConfigHandler config = qParser.getQueryConfigHandler(); + qParser.setAnalyzer(schema.getAnalyzer()); + qParser.setDefaultField(params.get(CommonParams.DF, qParser.getDefaultField())); + + String opParam = getParam(QueryParsing.OP); + if (opParam != null) { + // other operators could also become default, eg. NEAR + qParser.setDefaultOperator("AND".equals(opParam.toUpperCase()) ? Operator.AND + : Operator.OR); + } + else { + qParser.setDefaultOperator(Operator.OR); + } + + + // this is not useful in solr world (?) - but must be available + config.set(ConfigurationKeys.MULTI_FIELDS, new CharSequence[0]); + + if (params.getBool("debugQuery", false) != false) { + try { + qParser.setDebug(true); + } catch (Exception e) { + e.printStackTrace(); + } + } + + // your components may need access to solr, if there is some 'semantic analysis' + // involved, eg. multi-step parsing of a query. I do it this way (the component + // may be committed to solr in other tickets) + // AqpRequestParams reqAttr = config.get(AqpAdsabsQueryConfigHandler.ConfigurationKeys.SOLR_REQUEST); + // reqAttr.setQueryString(getString()); + // reqAttr.setRequest(req); + // reqAttr.setLocalParams(localParams); + // reqAttr.setParams(params); + + } + + + public Query parse() throws ParseException { + try { + return qParser.parse(getString(), null); + } catch (QueryNodeException e) { + throw new ParseException(e.getMessage()); + } + catch (SolrException e1) { + throw new ParseException(e1.getMessage()); + } + } + + public AqpQueryParser getParser() { + return qParser; + } +} Index: solr/core/src/java/org/apache/solr/search/AqpLuceneQParserPlugin.java =================================================================== --- solr/core/src/java/org/apache/solr/search/AqpLuceneQParserPlugin.java (revision 0) +++ solr/core/src/java/org/apache/solr/search/AqpLuceneQParserPlugin.java (revision 0) @@ -0,0 +1,47 @@ +package org.apache.solr.search; + +import org.apache.lucene.queryparser.flexible.aqp.AqpQueryParser; +import org.apache.lucene.queryparser.flexible.aqp.parser.AqpExtendedLuceneParser; +import org.apache.lucene.queryparser.flexible.core.QueryNodeParseException; +import org.apache.solr.common.SolrException; +import org.apache.solr.common.params.SolrParams; +import org.apache.solr.common.util.NamedList; +import org.apache.solr.request.SolrQueryRequest; + +/** + * An instance of ANTLR query parser. Implements the + * standard lucene grammar together with support for + * span queries. + */ + +public class AqpLuceneQParserPlugin extends QParserPlugin { + public static String NAME = "lucene2"; + + private String defaultField = "all"; + + @SuppressWarnings("rawtypes") + public void init(NamedList args) { + NamedList defaults = (NamedList) args.get("defaults"); + if (defaults != null) { + if (defaults.get("defaultField") != null) { + defaultField = (String) defaults.get("defaultField"); + } + } + } + + + public QParser createParser(String qstr, SolrParams localParams, + SolrParams params, SolrQueryRequest req) { + try { + AqpQueryParser parser = AqpExtendedLuceneParser.init(); + + parser.setDefaultField(defaultField); + + return new AqpLuceneQParser(parser, qstr, localParams, params, req); + } catch (QueryNodeParseException e) { + throw new SolrException(SolrException.ErrorCode.SERVICE_UNAVAILABLE, e.getLocalizedMessage()); + } catch (Exception e) { + throw new SolrException(SolrException.ErrorCode.SERVICE_UNAVAILABLE, e.getLocalizedMessage()); + } + } +}