_regex_core.py 138 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246424742484249425042514252425342544255425642574258425942604261426242634264426542664267426842694270427142724273427442754276427742784279428042814282428342844285428642874288428942904291429242934294429542964297429842994300430143024303430443054306430743084309431043114312431343144315431643174318431943204321432243234324432543264327432843294330433143324333433443354336433743384339434043414342434343444345434643474348434943504351435243534354435543564357435843594360436143624363436443654366436743684369437043714372437343744375437643774378437943804381438243834384438543864387438843894390439143924393439443954396439743984399440044014402440344044405440644074408440944104411441244134414441544164417441844194420442144224423442444254426442744284429443044314432443344344435443644374438443944404441444244434444444544464447444844494450445144524453445444554456445744584459446044614462446344644465446644674468446944704471447244734474447544764477447844794480448144824483448444854486448744884489449044914492449344944495
  1. #
  2. # Secret Labs' Regular Expression Engine core module
  3. #
  4. # Copyright (c) 1998-2001 by Secret Labs AB. All rights reserved.
  5. #
  6. # This version of the SRE library can be redistributed under CNRI's
  7. # Python 1.6 license. For any other use, please contact Secret Labs
  8. # AB (info@pythonware.com).
  9. #
  10. # Portions of this engine have been developed in cooperation with
  11. # CNRI. Hewlett-Packard provided funding for 1.6 integration and
  12. # other compatibility work.
  13. #
  14. # 2010-01-16 mrab Python front-end re-written and extended
  15. import enum
  16. import string
  17. import unicodedata
  18. from collections import defaultdict
  19. import regex._regex as _regex
  20. __all__ = ["A", "ASCII", "B", "BESTMATCH", "D", "DEBUG", "E", "ENHANCEMATCH",
  21. "F", "FULLCASE", "I", "IGNORECASE", "L", "LOCALE", "M", "MULTILINE", "P",
  22. "POSIX", "R", "REVERSE", "S", "DOTALL", "T", "TEMPLATE", "U", "UNICODE",
  23. "V0", "VERSION0", "V1", "VERSION1", "W", "WORD", "X", "VERBOSE", "error",
  24. "Scanner", "RegexFlag"]
  25. # The regex exception.
  26. class error(Exception):
  27. """Exception raised for invalid regular expressions.
  28. Attributes:
  29. msg: The unformatted error message
  30. pattern: The regular expression pattern
  31. pos: The position in the pattern where compilation failed, or None
  32. lineno: The line number where compilation failed, unless pos is None
  33. colno: The column number where compilation failed, unless pos is None
  34. """
  35. def __init__(self, message, pattern=None, pos=None):
  36. newline = '\n' if isinstance(pattern, str) else b'\n'
  37. self.msg = message
  38. self.pattern = pattern
  39. self.pos = pos
  40. if pattern is not None and pos is not None:
  41. self.lineno = pattern.count(newline, 0, pos) + 1
  42. self.colno = pos - pattern.rfind(newline, 0, pos)
  43. message = "{} at position {}".format(message, pos)
  44. if newline in pattern:
  45. message += " (line {}, column {})".format(self.lineno,
  46. self.colno)
  47. Exception.__init__(self, message)
  48. # The exception for when a positional flag has been turned on in the old
  49. # behaviour.
  50. class _UnscopedFlagSet(Exception):
  51. pass
  52. # The exception for when parsing fails and we want to try something else.
  53. class ParseError(Exception):
  54. pass
  55. # The exception for when there isn't a valid first set.
  56. class _FirstSetError(Exception):
  57. pass
  58. # Flags.
  59. class RegexFlag(enum.IntFlag):
  60. A = ASCII = 0x80 # Assume ASCII locale.
  61. B = BESTMATCH = 0x1000 # Best fuzzy match.
  62. D = DEBUG = 0x200 # Print parsed pattern.
  63. E = ENHANCEMATCH = 0x8000 # Attempt to improve the fit after finding the first
  64. # fuzzy match.
  65. F = FULLCASE = 0x4000 # Unicode full case-folding.
  66. I = IGNORECASE = 0x2 # Ignore case.
  67. L = LOCALE = 0x4 # Assume current 8-bit locale.
  68. M = MULTILINE = 0x8 # Make anchors look for newline.
  69. P = POSIX = 0x10000 # POSIX-style matching (leftmost longest).
  70. R = REVERSE = 0x400 # Search backwards.
  71. S = DOTALL = 0x10 # Make dot match newline.
  72. U = UNICODE = 0x20 # Assume Unicode locale.
  73. V0 = VERSION0 = 0x2000 # Old legacy behaviour.
  74. V1 = VERSION1 = 0x100 # New enhanced behaviour.
  75. W = WORD = 0x800 # Default Unicode word breaks.
  76. X = VERBOSE = 0x40 # Ignore whitespace and comments.
  77. T = TEMPLATE = 0x1 # Template (present because re module has it).
  78. def __repr__(self):
  79. if self._name_ is not None:
  80. return 'regex.%s' % self._name_
  81. value = self._value_
  82. members = []
  83. negative = value < 0
  84. if negative:
  85. value = ~value
  86. for m in self.__class__:
  87. if value & m._value_:
  88. value &= ~m._value_
  89. members.append('regex.%s' % m._name_)
  90. if value:
  91. members.append(hex(value))
  92. res = '|'.join(members)
  93. if negative:
  94. if len(members) > 1:
  95. res = '~(%s)' % res
  96. else:
  97. res = '~%s' % res
  98. return res
  99. __str__ = object.__str__
  100. globals().update(RegexFlag.__members__)
  101. DEFAULT_VERSION = VERSION1
  102. _ALL_VERSIONS = VERSION0 | VERSION1
  103. _ALL_ENCODINGS = ASCII | LOCALE | UNICODE
  104. # The default flags for the various versions.
  105. DEFAULT_FLAGS = {VERSION0: 0, VERSION1: FULLCASE}
  106. # The mask for the flags.
  107. GLOBAL_FLAGS = (_ALL_VERSIONS | BESTMATCH | DEBUG | ENHANCEMATCH | POSIX |
  108. REVERSE)
  109. SCOPED_FLAGS = (FULLCASE | IGNORECASE | MULTILINE | DOTALL | WORD | VERBOSE |
  110. _ALL_ENCODINGS)
  111. ALPHA = frozenset(string.ascii_letters)
  112. DIGITS = frozenset(string.digits)
  113. ALNUM = ALPHA | DIGITS
  114. OCT_DIGITS = frozenset(string.octdigits)
  115. HEX_DIGITS = frozenset(string.hexdigits)
  116. SPECIAL_CHARS = frozenset("()|?*+{^$.[\\#") | frozenset([""])
  117. NAMED_CHAR_PART = ALNUM | frozenset(" -")
  118. PROPERTY_NAME_PART = ALNUM | frozenset(" &_-.")
  119. SET_OPS = ("||", "~~", "&&", "--")
  120. # The width of the code words inside the regex engine.
  121. BYTES_PER_CODE = _regex.get_code_size()
  122. BITS_PER_CODE = BYTES_PER_CODE * 8
  123. # The repeat count which represents infinity.
  124. UNLIMITED = (1 << BITS_PER_CODE) - 1
  125. # The regular expression flags.
  126. REGEX_FLAGS = {"a": ASCII, "b": BESTMATCH, "e": ENHANCEMATCH, "f": FULLCASE,
  127. "i": IGNORECASE, "L": LOCALE, "m": MULTILINE, "p": POSIX, "r": REVERSE,
  128. "s": DOTALL, "u": UNICODE, "V0": VERSION0, "V1": VERSION1, "w": WORD, "x":
  129. VERBOSE}
  130. # The case flags.
  131. CASE_FLAGS = FULLCASE | IGNORECASE
  132. NOCASE = 0
  133. FULLIGNORECASE = FULLCASE | IGNORECASE
  134. FULL_CASE_FOLDING = UNICODE | FULLIGNORECASE
  135. CASE_FLAGS_COMBINATIONS = {0: 0, FULLCASE: 0, IGNORECASE: IGNORECASE,
  136. FULLIGNORECASE: FULLIGNORECASE}
  137. # The number of digits in hexadecimal escapes.
  138. HEX_ESCAPES = {"x": 2, "u": 4, "U": 8}
  139. # The names of the opcodes.
  140. OPCODES = """
  141. FAILURE
  142. SUCCESS
  143. ANY
  144. ANY_ALL
  145. ANY_ALL_REV
  146. ANY_REV
  147. ANY_U
  148. ANY_U_REV
  149. ATOMIC
  150. BOUNDARY
  151. BRANCH
  152. CALL_REF
  153. CHARACTER
  154. CHARACTER_IGN
  155. CHARACTER_IGN_REV
  156. CHARACTER_REV
  157. CONDITIONAL
  158. DEFAULT_BOUNDARY
  159. DEFAULT_END_OF_WORD
  160. DEFAULT_START_OF_WORD
  161. END
  162. END_OF_LINE
  163. END_OF_LINE_U
  164. END_OF_STRING
  165. END_OF_STRING_LINE
  166. END_OF_STRING_LINE_U
  167. END_OF_WORD
  168. FUZZY
  169. GRAPHEME_BOUNDARY
  170. GREEDY_REPEAT
  171. GROUP
  172. GROUP_CALL
  173. GROUP_EXISTS
  174. KEEP
  175. LAZY_REPEAT
  176. LOOKAROUND
  177. NEXT
  178. PROPERTY
  179. PROPERTY_IGN
  180. PROPERTY_IGN_REV
  181. PROPERTY_REV
  182. PRUNE
  183. RANGE
  184. RANGE_IGN
  185. RANGE_IGN_REV
  186. RANGE_REV
  187. REF_GROUP
  188. REF_GROUP_FLD
  189. REF_GROUP_FLD_REV
  190. REF_GROUP_IGN
  191. REF_GROUP_IGN_REV
  192. REF_GROUP_REV
  193. SEARCH_ANCHOR
  194. SET_DIFF
  195. SET_DIFF_IGN
  196. SET_DIFF_IGN_REV
  197. SET_DIFF_REV
  198. SET_INTER
  199. SET_INTER_IGN
  200. SET_INTER_IGN_REV
  201. SET_INTER_REV
  202. SET_SYM_DIFF
  203. SET_SYM_DIFF_IGN
  204. SET_SYM_DIFF_IGN_REV
  205. SET_SYM_DIFF_REV
  206. SET_UNION
  207. SET_UNION_IGN
  208. SET_UNION_IGN_REV
  209. SET_UNION_REV
  210. SKIP
  211. START_OF_LINE
  212. START_OF_LINE_U
  213. START_OF_STRING
  214. START_OF_WORD
  215. STRING
  216. STRING_FLD
  217. STRING_FLD_REV
  218. STRING_IGN
  219. STRING_IGN_REV
  220. STRING_REV
  221. FUZZY_EXT
  222. """
  223. # Define the opcodes in a namespace.
  224. class Namespace:
  225. pass
  226. OP = Namespace()
  227. for i, op in enumerate(OPCODES.split()):
  228. setattr(OP, op, i)
  229. def _shrink_cache(cache_dict, args_dict, locale_sensitive, max_length, divisor=5):
  230. """Make room in the given cache.
  231. Args:
  232. cache_dict: The cache dictionary to modify.
  233. args_dict: The dictionary of named list args used by patterns.
  234. max_length: Maximum # of entries in cache_dict before it is shrunk.
  235. divisor: Cache will shrink to max_length - 1/divisor*max_length items.
  236. """
  237. # Toss out a fraction of the entries at random to make room for new ones.
  238. # A random algorithm was chosen as opposed to simply cache_dict.popitem()
  239. # as popitem could penalize the same regular expression repeatedly based
  240. # on its internal hash value. Being random should spread the cache miss
  241. # love around.
  242. cache_keys = tuple(cache_dict.keys())
  243. overage = len(cache_keys) - max_length
  244. if overage < 0:
  245. # Cache is already within limits. Normally this should not happen
  246. # but it could due to multithreading.
  247. return
  248. number_to_toss = max_length // divisor + overage
  249. # The import is done here to avoid a circular dependency.
  250. import random
  251. if not hasattr(random, 'sample'):
  252. # Do nothing while resolving the circular dependency:
  253. # re->random->warnings->tokenize->string->re
  254. return
  255. for doomed_key in random.sample(cache_keys, number_to_toss):
  256. try:
  257. del cache_dict[doomed_key]
  258. except KeyError:
  259. # Ignore problems if the cache changed from another thread.
  260. pass
  261. # Rebuild the arguments and locale-sensitivity dictionaries.
  262. args_dict.clear()
  263. sensitivity_dict = {}
  264. for pattern, pattern_type, flags, args, default_version, locale in tuple(cache_dict):
  265. args_dict[pattern, pattern_type, flags, default_version, locale] = args
  266. try:
  267. sensitivity_dict[pattern_type, pattern] = locale_sensitive[pattern_type, pattern]
  268. except KeyError:
  269. pass
  270. locale_sensitive.clear()
  271. locale_sensitive.update(sensitivity_dict)
  272. def _fold_case(info, string):
  273. "Folds the case of a string."
  274. flags = info.flags
  275. if (flags & _ALL_ENCODINGS) == 0:
  276. flags |= info.guess_encoding
  277. return _regex.fold_case(flags, string)
  278. def is_cased_i(info, char):
  279. "Checks whether a character is cased."
  280. return len(_regex.get_all_cases(info.flags, char)) > 1
  281. def is_cased_f(flags, char):
  282. "Checks whether a character is cased."
  283. return len(_regex.get_all_cases(flags, char)) > 1
  284. def _compile_firstset(info, fs):
  285. "Compiles the firstset for the pattern."
  286. reverse = bool(info.flags & REVERSE)
  287. fs = _check_firstset(info, reverse, fs)
  288. if not fs:
  289. return []
  290. # Compile the firstset.
  291. return fs.compile(reverse)
  292. def _check_firstset(info, reverse, fs):
  293. "Checks the firstset for the pattern."
  294. if not fs or None in fs:
  295. return None
  296. # If we ignore the case, for simplicity we won't build a firstset.
  297. members = set()
  298. case_flags = NOCASE
  299. for i in fs:
  300. if isinstance(i, Character) and not i.positive:
  301. return None
  302. # if i.case_flags:
  303. # if isinstance(i, Character):
  304. # if is_cased_i(info, i.value):
  305. # return []
  306. # elif isinstance(i, SetBase):
  307. # return []
  308. case_flags |= i.case_flags
  309. members.add(i.with_flags(case_flags=NOCASE))
  310. if case_flags == (FULLCASE | IGNORECASE):
  311. return None
  312. # Build the firstset.
  313. fs = SetUnion(info, list(members), case_flags=case_flags & ~FULLCASE,
  314. zerowidth=True)
  315. fs = fs.optimise(info, reverse, in_set=True)
  316. return fs
  317. def _flatten_code(code):
  318. "Flattens the code from a list of tuples."
  319. flat_code = []
  320. for c in code:
  321. flat_code.extend(c)
  322. return flat_code
  323. def make_case_flags(info):
  324. "Makes the case flags."
  325. flags = info.flags & CASE_FLAGS
  326. # Turn off FULLCASE if ASCII is turned on.
  327. if info.flags & ASCII:
  328. flags &= ~FULLCASE
  329. return flags
  330. def make_character(info, value, in_set=False):
  331. "Makes a character literal."
  332. if in_set:
  333. # A character set is built case-sensitively.
  334. return Character(value)
  335. return Character(value, case_flags=make_case_flags(info))
  336. def make_ref_group(info, name, position):
  337. "Makes a group reference."
  338. return RefGroup(info, name, position, case_flags=make_case_flags(info))
  339. def make_string_set(info, name):
  340. "Makes a string set."
  341. return StringSet(info, name, case_flags=make_case_flags(info))
  342. def make_property(info, prop, in_set):
  343. "Makes a property."
  344. if in_set:
  345. return prop
  346. return prop.with_flags(case_flags=make_case_flags(info))
  347. def _parse_pattern(source, info):
  348. "Parses a pattern, eg. 'a|b|c'."
  349. branches = [parse_sequence(source, info)]
  350. while source.match("|"):
  351. branches.append(parse_sequence(source, info))
  352. if len(branches) == 1:
  353. return branches[0]
  354. return Branch(branches)
  355. def parse_sequence(source, info):
  356. "Parses a sequence, eg. 'abc'."
  357. sequence = [None]
  358. case_flags = make_case_flags(info)
  359. while True:
  360. saved_pos = source.pos
  361. ch = source.get()
  362. if ch in SPECIAL_CHARS:
  363. if ch in ")|":
  364. # The end of a sequence. At the end of the pattern ch is "".
  365. source.pos = saved_pos
  366. break
  367. elif ch == "\\":
  368. # An escape sequence outside a set.
  369. sequence.append(parse_escape(source, info, False))
  370. elif ch == "(":
  371. # A parenthesised subpattern or a flag.
  372. element = parse_paren(source, info)
  373. if element is None:
  374. case_flags = make_case_flags(info)
  375. else:
  376. sequence.append(element)
  377. elif ch == ".":
  378. # Any character.
  379. if info.flags & DOTALL:
  380. sequence.append(AnyAll())
  381. elif info.flags & WORD:
  382. sequence.append(AnyU())
  383. else:
  384. sequence.append(Any())
  385. elif ch == "[":
  386. # A character set.
  387. sequence.append(parse_set(source, info))
  388. elif ch == "^":
  389. # The start of a line or the string.
  390. if info.flags & MULTILINE:
  391. if info.flags & WORD:
  392. sequence.append(StartOfLineU())
  393. else:
  394. sequence.append(StartOfLine())
  395. else:
  396. sequence.append(StartOfString())
  397. elif ch == "$":
  398. # The end of a line or the string.
  399. if info.flags & MULTILINE:
  400. if info.flags & WORD:
  401. sequence.append(EndOfLineU())
  402. else:
  403. sequence.append(EndOfLine())
  404. else:
  405. if info.flags & WORD:
  406. sequence.append(EndOfStringLineU())
  407. else:
  408. sequence.append(EndOfStringLine())
  409. elif ch in "?*+{":
  410. # Looks like a quantifier.
  411. counts = parse_quantifier(source, info, ch)
  412. if counts:
  413. # It _is_ a quantifier.
  414. apply_quantifier(source, info, counts, case_flags, ch,
  415. saved_pos, sequence)
  416. sequence.append(None)
  417. else:
  418. # It's not a quantifier. Maybe it's a fuzzy constraint.
  419. constraints = parse_fuzzy(source, info, ch, case_flags)
  420. if constraints:
  421. # It _is_ a fuzzy constraint.
  422. apply_constraint(source, info, constraints, case_flags,
  423. saved_pos, sequence)
  424. sequence.append(None)
  425. else:
  426. # The element was just a literal.
  427. sequence.append(Character(ord(ch),
  428. case_flags=case_flags))
  429. else:
  430. # A literal.
  431. sequence.append(Character(ord(ch), case_flags=case_flags))
  432. else:
  433. # A literal.
  434. sequence.append(Character(ord(ch), case_flags=case_flags))
  435. sequence = [item for item in sequence if item is not None]
  436. return Sequence(sequence)
  437. def apply_quantifier(source, info, counts, case_flags, ch, saved_pos,
  438. sequence):
  439. element = sequence.pop()
  440. if element is None:
  441. if sequence:
  442. raise error("multiple repeat", source.string, saved_pos)
  443. raise error("nothing to repeat", source.string, saved_pos)
  444. if isinstance(element, (GreedyRepeat, LazyRepeat, PossessiveRepeat)):
  445. raise error("multiple repeat", source.string, saved_pos)
  446. min_count, max_count = counts
  447. saved_pos = source.pos
  448. ch = source.get()
  449. if ch == "?":
  450. # The "?" suffix that means it's a lazy repeat.
  451. repeated = LazyRepeat
  452. elif ch == "+":
  453. # The "+" suffix that means it's a possessive repeat.
  454. repeated = PossessiveRepeat
  455. else:
  456. # No suffix means that it's a greedy repeat.
  457. source.pos = saved_pos
  458. repeated = GreedyRepeat
  459. # Ignore the quantifier if it applies to a zero-width item or the number of
  460. # repeats is fixed at 1.
  461. if not element.is_empty() and (min_count != 1 or max_count != 1):
  462. element = repeated(element, min_count, max_count)
  463. sequence.append(element)
  464. def apply_constraint(source, info, constraints, case_flags, saved_pos,
  465. sequence):
  466. element = sequence.pop()
  467. if element is None:
  468. raise error("nothing for fuzzy constraint", source.string, saved_pos)
  469. # If a group is marked as fuzzy then put all of the fuzzy part in the
  470. # group.
  471. if isinstance(element, Group):
  472. element.subpattern = Fuzzy(element.subpattern, constraints)
  473. sequence.append(element)
  474. else:
  475. sequence.append(Fuzzy(element, constraints))
  476. _QUANTIFIERS = {"?": (0, 1), "*": (0, None), "+": (1, None)}
  477. def parse_quantifier(source, info, ch):
  478. "Parses a quantifier."
  479. q = _QUANTIFIERS.get(ch)
  480. if q:
  481. # It's a quantifier.
  482. return q
  483. if ch == "{":
  484. # Looks like a limited repeated element, eg. 'a{2,3}'.
  485. counts = parse_limited_quantifier(source)
  486. if counts:
  487. return counts
  488. return None
  489. def is_above_limit(count):
  490. "Checks whether a count is above the maximum."
  491. return count is not None and count >= UNLIMITED
  492. def parse_limited_quantifier(source):
  493. "Parses a limited quantifier."
  494. saved_pos = source.pos
  495. min_count = parse_count(source)
  496. if source.match(","):
  497. max_count = parse_count(source)
  498. # No minimum means 0 and no maximum means unlimited.
  499. min_count = int(min_count or 0)
  500. max_count = int(max_count) if max_count else None
  501. else:
  502. if not min_count:
  503. source.pos = saved_pos
  504. return None
  505. min_count = max_count = int(min_count)
  506. if not source.match ("}"):
  507. source.pos = saved_pos
  508. return None
  509. if is_above_limit(min_count) or is_above_limit(max_count):
  510. raise error("repeat count too big", source.string, saved_pos)
  511. if max_count is not None and min_count > max_count:
  512. raise error("min repeat greater than max repeat", source.string,
  513. saved_pos)
  514. return min_count, max_count
  515. def parse_fuzzy(source, info, ch, case_flags):
  516. "Parses a fuzzy setting, if present."
  517. saved_pos = source.pos
  518. if ch != "{":
  519. return None
  520. constraints = {}
  521. try:
  522. parse_fuzzy_item(source, constraints)
  523. while source.match(","):
  524. parse_fuzzy_item(source, constraints)
  525. except ParseError:
  526. source.pos = saved_pos
  527. return None
  528. if source.match(":"):
  529. constraints["test"] = parse_fuzzy_test(source, info, case_flags)
  530. if not source.match("}"):
  531. raise error("expected }", source.string, source.pos)
  532. return constraints
  533. def parse_fuzzy_item(source, constraints):
  534. "Parses a fuzzy setting item."
  535. saved_pos = source.pos
  536. try:
  537. parse_cost_constraint(source, constraints)
  538. except ParseError:
  539. source.pos = saved_pos
  540. parse_cost_equation(source, constraints)
  541. def parse_cost_constraint(source, constraints):
  542. "Parses a cost constraint."
  543. saved_pos = source.pos
  544. ch = source.get()
  545. if ch in ALPHA:
  546. # Syntax: constraint [("<=" | "<") cost]
  547. constraint = parse_constraint(source, constraints, ch)
  548. max_inc = parse_fuzzy_compare(source)
  549. if max_inc is None:
  550. # No maximum cost.
  551. constraints[constraint] = 0, None
  552. else:
  553. # There's a maximum cost.
  554. cost_pos = source.pos
  555. max_cost = parse_cost_limit(source)
  556. # Inclusive or exclusive limit?
  557. if not max_inc:
  558. max_cost -= 1
  559. if max_cost < 0:
  560. raise error("bad fuzzy cost limit", source.string, cost_pos)
  561. constraints[constraint] = 0, max_cost
  562. elif ch in DIGITS:
  563. # Syntax: cost ("<=" | "<") constraint ("<=" | "<") cost
  564. source.pos = saved_pos
  565. # Minimum cost.
  566. cost_pos = source.pos
  567. min_cost = parse_cost_limit(source)
  568. min_inc = parse_fuzzy_compare(source)
  569. if min_inc is None:
  570. raise ParseError()
  571. constraint = parse_constraint(source, constraints, source.get())
  572. max_inc = parse_fuzzy_compare(source)
  573. if max_inc is None:
  574. raise ParseError()
  575. # Maximum cost.
  576. cost_pos = source.pos
  577. max_cost = parse_cost_limit(source)
  578. # Inclusive or exclusive limits?
  579. if not min_inc:
  580. min_cost += 1
  581. if not max_inc:
  582. max_cost -= 1
  583. if not 0 <= min_cost <= max_cost:
  584. raise error("bad fuzzy cost limit", source.string, cost_pos)
  585. constraints[constraint] = min_cost, max_cost
  586. else:
  587. raise ParseError()
  588. def parse_cost_limit(source):
  589. "Parses a cost limit."
  590. cost_pos = source.pos
  591. digits = parse_count(source)
  592. try:
  593. return int(digits)
  594. except ValueError:
  595. pass
  596. raise error("bad fuzzy cost limit", source.string, cost_pos)
  597. def parse_constraint(source, constraints, ch):
  598. "Parses a constraint."
  599. if ch not in "deis":
  600. raise ParseError()
  601. if ch in constraints:
  602. raise ParseError()
  603. return ch
  604. def parse_fuzzy_compare(source):
  605. "Parses a cost comparator."
  606. if source.match("<="):
  607. return True
  608. elif source.match("<"):
  609. return False
  610. else:
  611. return None
  612. def parse_cost_equation(source, constraints):
  613. "Parses a cost equation."
  614. if "cost" in constraints:
  615. raise error("more than one cost equation", source.string, source.pos)
  616. cost = {}
  617. parse_cost_term(source, cost)
  618. while source.match("+"):
  619. parse_cost_term(source, cost)
  620. max_inc = parse_fuzzy_compare(source)
  621. if max_inc is None:
  622. raise ParseError()
  623. max_cost = int(parse_count(source))
  624. if not max_inc:
  625. max_cost -= 1
  626. if max_cost < 0:
  627. raise error("bad fuzzy cost limit", source.string, source.pos)
  628. cost["max"] = max_cost
  629. constraints["cost"] = cost
  630. def parse_cost_term(source, cost):
  631. "Parses a cost equation term."
  632. coeff = parse_count(source)
  633. ch = source.get()
  634. if ch not in "dis":
  635. raise ParseError()
  636. if ch in cost:
  637. raise error("repeated fuzzy cost", source.string, source.pos)
  638. cost[ch] = int(coeff or 1)
  639. def parse_fuzzy_test(source, info, case_flags):
  640. saved_pos = source.pos
  641. ch = source.get()
  642. if ch in SPECIAL_CHARS:
  643. if ch == "\\":
  644. # An escape sequence outside a set.
  645. return parse_escape(source, info, False)
  646. elif ch == ".":
  647. # Any character.
  648. if info.flags & DOTALL:
  649. return AnyAll()
  650. elif info.flags & WORD:
  651. return AnyU()
  652. else:
  653. return Any()
  654. elif ch == "[":
  655. # A character set.
  656. return parse_set(source, info)
  657. else:
  658. raise error("expected character set", source.string, saved_pos)
  659. elif ch:
  660. # A literal.
  661. return Character(ord(ch), case_flags=case_flags)
  662. else:
  663. raise error("expected character set", source.string, saved_pos)
  664. def parse_count(source):
  665. "Parses a quantifier's count, which can be empty."
  666. return source.get_while(DIGITS)
  667. def parse_paren(source, info):
  668. """Parses a parenthesised subpattern or a flag. Returns FLAGS if it's an
  669. inline flag.
  670. """
  671. saved_pos = source.pos
  672. ch = source.get(True)
  673. if ch == "?":
  674. # (?...
  675. saved_pos_2 = source.pos
  676. ch = source.get(True)
  677. if ch == "<":
  678. # (?<...
  679. saved_pos_3 = source.pos
  680. ch = source.get()
  681. if ch in ("=", "!"):
  682. # (?<=... or (?<!...: lookbehind.
  683. return parse_lookaround(source, info, True, ch == "=")
  684. # (?<...: a named capture group.
  685. source.pos = saved_pos_3
  686. name = parse_name(source)
  687. group = info.open_group(name)
  688. source.expect(">")
  689. saved_flags = info.flags
  690. try:
  691. subpattern = _parse_pattern(source, info)
  692. source.expect(")")
  693. finally:
  694. info.flags = saved_flags
  695. source.ignore_space = bool(info.flags & VERBOSE)
  696. info.close_group()
  697. return Group(info, group, subpattern)
  698. if ch in ("=", "!"):
  699. # (?=... or (?!...: lookahead.
  700. return parse_lookaround(source, info, False, ch == "=")
  701. if ch == "P":
  702. # (?P...: a Python extension.
  703. return parse_extension(source, info)
  704. if ch == "#":
  705. # (?#...: a comment.
  706. return parse_comment(source)
  707. if ch == "(":
  708. # (?(...: a conditional subpattern.
  709. return parse_conditional(source, info)
  710. if ch == ">":
  711. # (?>...: an atomic subpattern.
  712. return parse_atomic(source, info)
  713. if ch == "|":
  714. # (?|...: a common/reset groups branch.
  715. return parse_common(source, info)
  716. if ch == "R" or "0" <= ch <= "9":
  717. # (?R...: probably a call to a group.
  718. return parse_call_group(source, info, ch, saved_pos_2)
  719. if ch == "&":
  720. # (?&...: a call to a named group.
  721. return parse_call_named_group(source, info, saved_pos_2)
  722. # (?...: probably a flags subpattern.
  723. source.pos = saved_pos_2
  724. return parse_flags_subpattern(source, info)
  725. if ch == "*":
  726. # (*...
  727. saved_pos_2 = source.pos
  728. word = source.get_while(set(")>"), include=False)
  729. if word[ : 1].isalpha():
  730. verb = VERBS.get(word)
  731. if not verb:
  732. raise error("unknown verb", source.string, saved_pos_2)
  733. source.expect(")")
  734. return verb
  735. # (...: an unnamed capture group.
  736. source.pos = saved_pos
  737. group = info.open_group()
  738. saved_flags = info.flags
  739. try:
  740. subpattern = _parse_pattern(source, info)
  741. source.expect(")")
  742. finally:
  743. info.flags = saved_flags
  744. source.ignore_space = bool(info.flags & VERBOSE)
  745. info.close_group()
  746. return Group(info, group, subpattern)
  747. def parse_extension(source, info):
  748. "Parses a Python extension."
  749. saved_pos = source.pos
  750. ch = source.get()
  751. if ch == "<":
  752. # (?P<...: a named capture group.
  753. name = parse_name(source)
  754. group = info.open_group(name)
  755. source.expect(">")
  756. saved_flags = info.flags
  757. try:
  758. subpattern = _parse_pattern(source, info)
  759. source.expect(")")
  760. finally:
  761. info.flags = saved_flags
  762. source.ignore_space = bool(info.flags & VERBOSE)
  763. info.close_group()
  764. return Group(info, group, subpattern)
  765. if ch == "=":
  766. # (?P=...: a named group reference.
  767. name = parse_name(source, allow_numeric=True)
  768. source.expect(")")
  769. if info.is_open_group(name):
  770. raise error("cannot refer to an open group", source.string,
  771. saved_pos)
  772. return make_ref_group(info, name, saved_pos)
  773. if ch == ">" or ch == "&":
  774. # (?P>...: a call to a group.
  775. return parse_call_named_group(source, info, saved_pos)
  776. source.pos = saved_pos
  777. raise error("unknown extension", source.string, saved_pos)
  778. def parse_comment(source):
  779. "Parses a comment."
  780. while True:
  781. saved_pos = source.pos
  782. c = source.get(True)
  783. if not c or c == ")":
  784. break
  785. if c == "\\":
  786. c = source.get(True)
  787. source.pos = saved_pos
  788. source.expect(")")
  789. return None
  790. def parse_lookaround(source, info, behind, positive):
  791. "Parses a lookaround."
  792. saved_flags = info.flags
  793. try:
  794. subpattern = _parse_pattern(source, info)
  795. source.expect(")")
  796. finally:
  797. info.flags = saved_flags
  798. source.ignore_space = bool(info.flags & VERBOSE)
  799. return LookAround(behind, positive, subpattern)
  800. def parse_conditional(source, info):
  801. "Parses a conditional subpattern."
  802. saved_flags = info.flags
  803. saved_pos = source.pos
  804. ch = source.get()
  805. if ch == "?":
  806. # (?(?...
  807. ch = source.get()
  808. if ch in ("=", "!"):
  809. # (?(?=... or (?(?!...: lookahead conditional.
  810. return parse_lookaround_conditional(source, info, False, ch == "=")
  811. if ch == "<":
  812. # (?(?<...
  813. ch = source.get()
  814. if ch in ("=", "!"):
  815. # (?(?<=... or (?(?<!...: lookbehind conditional.
  816. return parse_lookaround_conditional(source, info, True, ch ==
  817. "=")
  818. source.pos = saved_pos
  819. raise error("expected lookaround conditional", source.string,
  820. source.pos)
  821. source.pos = saved_pos
  822. try:
  823. group = parse_name(source, True)
  824. source.expect(")")
  825. yes_branch = parse_sequence(source, info)
  826. if source.match("|"):
  827. no_branch = parse_sequence(source, info)
  828. else:
  829. no_branch = Sequence()
  830. source.expect(")")
  831. finally:
  832. info.flags = saved_flags
  833. source.ignore_space = bool(info.flags & VERBOSE)
  834. if yes_branch.is_empty() and no_branch.is_empty():
  835. return Sequence()
  836. return Conditional(info, group, yes_branch, no_branch, saved_pos)
  837. def parse_lookaround_conditional(source, info, behind, positive):
  838. saved_flags = info.flags
  839. try:
  840. subpattern = _parse_pattern(source, info)
  841. source.expect(")")
  842. finally:
  843. info.flags = saved_flags
  844. source.ignore_space = bool(info.flags & VERBOSE)
  845. yes_branch = parse_sequence(source, info)
  846. if source.match("|"):
  847. no_branch = parse_sequence(source, info)
  848. else:
  849. no_branch = Sequence()
  850. source.expect(")")
  851. return LookAroundConditional(behind, positive, subpattern, yes_branch,
  852. no_branch)
  853. def parse_atomic(source, info):
  854. "Parses an atomic subpattern."
  855. saved_flags = info.flags
  856. try:
  857. subpattern = _parse_pattern(source, info)
  858. source.expect(")")
  859. finally:
  860. info.flags = saved_flags
  861. source.ignore_space = bool(info.flags & VERBOSE)
  862. return Atomic(subpattern)
  863. def parse_common(source, info):
  864. "Parses a common groups branch."
  865. # Capture group numbers in different branches can reuse the group numbers.
  866. initial_group_count = info.group_count
  867. branches = [parse_sequence(source, info)]
  868. final_group_count = info.group_count
  869. while source.match("|"):
  870. info.group_count = initial_group_count
  871. branches.append(parse_sequence(source, info))
  872. final_group_count = max(final_group_count, info.group_count)
  873. info.group_count = final_group_count
  874. source.expect(")")
  875. if len(branches) == 1:
  876. return branches[0]
  877. return Branch(branches)
  878. def parse_call_group(source, info, ch, pos):
  879. "Parses a call to a group."
  880. if ch == "R":
  881. group = "0"
  882. else:
  883. group = ch + source.get_while(DIGITS)
  884. source.expect(")")
  885. return CallGroup(info, group, pos)
  886. def parse_call_named_group(source, info, pos):
  887. "Parses a call to a named group."
  888. group = parse_name(source)
  889. source.expect(")")
  890. return CallGroup(info, group, pos)
  891. def parse_flag_set(source):
  892. "Parses a set of inline flags."
  893. flags = 0
  894. try:
  895. while True:
  896. saved_pos = source.pos
  897. ch = source.get()
  898. if ch == "V":
  899. ch += source.get()
  900. flags |= REGEX_FLAGS[ch]
  901. except KeyError:
  902. source.pos = saved_pos
  903. return flags
  904. def parse_flags(source, info):
  905. "Parses flags being turned on/off."
  906. flags_on = parse_flag_set(source)
  907. if source.match("-"):
  908. flags_off = parse_flag_set(source)
  909. if not flags_off:
  910. raise error("bad inline flags: no flags after '-'", source.string,
  911. source.pos)
  912. else:
  913. flags_off = 0
  914. if flags_on & LOCALE:
  915. # Remember that this pattern as an inline locale flag.
  916. info.inline_locale = True
  917. return flags_on, flags_off
  918. def parse_subpattern(source, info, flags_on, flags_off):
  919. "Parses a subpattern with scoped flags."
  920. saved_flags = info.flags
  921. info.flags = (info.flags | flags_on) & ~flags_off
  922. source.ignore_space = bool(info.flags & VERBOSE)
  923. try:
  924. subpattern = _parse_pattern(source, info)
  925. source.expect(")")
  926. finally:
  927. info.flags = saved_flags
  928. source.ignore_space = bool(info.flags & VERBOSE)
  929. return subpattern
  930. def parse_flags_subpattern(source, info):
  931. """Parses a flags subpattern. It could be inline flags or a subpattern
  932. possibly with local flags. If it's a subpattern, then that's returned;
  933. if it's a inline flags, then None is returned.
  934. """
  935. flags_on, flags_off = parse_flags(source, info)
  936. if flags_off & GLOBAL_FLAGS:
  937. raise error("bad inline flags: cannot turn off global flag",
  938. source.string, source.pos)
  939. if flags_on & flags_off:
  940. raise error("bad inline flags: flag turned on and off", source.string,
  941. source.pos)
  942. # Handle flags which are global in all regex behaviours.
  943. new_global_flags = (flags_on & ~info.global_flags) & GLOBAL_FLAGS
  944. if new_global_flags:
  945. info.global_flags |= new_global_flags
  946. # A global has been turned on, so reparse the pattern.
  947. raise _UnscopedFlagSet(info.global_flags)
  948. # Ensure that from now on we have only scoped flags.
  949. flags_on &= ~GLOBAL_FLAGS
  950. if source.match(":"):
  951. return parse_subpattern(source, info, flags_on, flags_off)
  952. if source.match(")"):
  953. parse_positional_flags(source, info, flags_on, flags_off)
  954. return None
  955. raise error("unknown extension", source.string, source.pos)
  956. def parse_positional_flags(source, info, flags_on, flags_off):
  957. "Parses positional flags."
  958. info.flags = (info.flags | flags_on) & ~flags_off
  959. source.ignore_space = bool(info.flags & VERBOSE)
  960. def parse_name(source, allow_numeric=False, allow_group_0=False):
  961. "Parses a name."
  962. name = source.get_while(set(")>"), include=False)
  963. if not name:
  964. raise error("missing group name", source.string, source.pos)
  965. if name.isdigit():
  966. min_group = 0 if allow_group_0 else 1
  967. if not allow_numeric or int(name) < min_group:
  968. raise error("bad character in group name", source.string,
  969. source.pos)
  970. else:
  971. if not name.isidentifier():
  972. raise error("bad character in group name", source.string,
  973. source.pos)
  974. return name
  975. def is_octal(string):
  976. "Checks whether a string is octal."
  977. return all(ch in OCT_DIGITS for ch in string)
  978. def is_decimal(string):
  979. "Checks whether a string is decimal."
  980. return all(ch in DIGITS for ch in string)
  981. def is_hexadecimal(string):
  982. "Checks whether a string is hexadecimal."
  983. return all(ch in HEX_DIGITS for ch in string)
  984. def parse_escape(source, info, in_set):
  985. "Parses an escape sequence."
  986. saved_ignore = source.ignore_space
  987. source.ignore_space = False
  988. ch = source.get()
  989. source.ignore_space = saved_ignore
  990. if not ch:
  991. # A backslash at the end of the pattern.
  992. raise error("bad escape (end of pattern)", source.string, source.pos)
  993. if ch in HEX_ESCAPES:
  994. # A hexadecimal escape sequence.
  995. return parse_hex_escape(source, info, ch, HEX_ESCAPES[ch], in_set, ch)
  996. elif ch == "g" and not in_set:
  997. # A group reference.
  998. saved_pos = source.pos
  999. try:
  1000. return parse_group_ref(source, info)
  1001. except error:
  1002. # Invalid as a group reference, so assume it's a literal.
  1003. source.pos = saved_pos
  1004. return make_character(info, ord(ch), in_set)
  1005. elif ch == "G" and not in_set:
  1006. # A search anchor.
  1007. return SearchAnchor()
  1008. elif ch == "L" and not in_set:
  1009. # A string set.
  1010. return parse_string_set(source, info)
  1011. elif ch == "N":
  1012. # A named codepoint.
  1013. return parse_named_char(source, info, in_set)
  1014. elif ch in "pP":
  1015. # A Unicode property, positive or negative.
  1016. return parse_property(source, info, ch == "p", in_set)
  1017. elif ch == "R" and not in_set:
  1018. # A line ending.
  1019. charset = [0x0A, 0x0B, 0x0C, 0x0D]
  1020. if info.guess_encoding == UNICODE:
  1021. charset.extend([0x85, 0x2028, 0x2029])
  1022. return Atomic(Branch([String([0x0D, 0x0A]), SetUnion(info, [Character(c)
  1023. for c in charset])]))
  1024. elif ch == "X" and not in_set:
  1025. # A grapheme cluster.
  1026. return Grapheme()
  1027. elif ch in ALPHA:
  1028. # An alphabetic escape sequence.
  1029. # Positional escapes aren't allowed inside a character set.
  1030. if not in_set:
  1031. if info.flags & WORD:
  1032. value = WORD_POSITION_ESCAPES.get(ch)
  1033. else:
  1034. value = POSITION_ESCAPES.get(ch)
  1035. if value:
  1036. return value
  1037. value = CHARSET_ESCAPES.get(ch)
  1038. if value:
  1039. return value
  1040. value = CHARACTER_ESCAPES.get(ch)
  1041. if value:
  1042. return Character(ord(value))
  1043. raise error("bad escape \\%s" % ch, source.string, source.pos)
  1044. elif ch in DIGITS:
  1045. # A numeric escape sequence.
  1046. return parse_numeric_escape(source, info, ch, in_set)
  1047. else:
  1048. # A literal.
  1049. return make_character(info, ord(ch), in_set)
  1050. def parse_numeric_escape(source, info, ch, in_set):
  1051. "Parses a numeric escape sequence."
  1052. if in_set or ch == "0":
  1053. # Octal escape sequence, max 3 digits.
  1054. return parse_octal_escape(source, info, [ch], in_set)
  1055. # At least 1 digit, so either octal escape or group.
  1056. digits = ch
  1057. saved_pos = source.pos
  1058. ch = source.get()
  1059. if ch in DIGITS:
  1060. # At least 2 digits, so either octal escape or group.
  1061. digits += ch
  1062. saved_pos = source.pos
  1063. ch = source.get()
  1064. if is_octal(digits) and ch in OCT_DIGITS:
  1065. # 3 octal digits, so octal escape sequence.
  1066. encoding = info.flags & _ALL_ENCODINGS
  1067. if encoding == ASCII or encoding == LOCALE:
  1068. octal_mask = 0xFF
  1069. else:
  1070. octal_mask = 0x1FF
  1071. value = int(digits + ch, 8) & octal_mask
  1072. return make_character(info, value)
  1073. # Group reference.
  1074. source.pos = saved_pos
  1075. if info.is_open_group(digits):
  1076. raise error("cannot refer to an open group", source.string, source.pos)
  1077. return make_ref_group(info, digits, source.pos)
  1078. def parse_octal_escape(source, info, digits, in_set):
  1079. "Parses an octal escape sequence."
  1080. saved_pos = source.pos
  1081. ch = source.get()
  1082. while len(digits) < 3 and ch in OCT_DIGITS:
  1083. digits.append(ch)
  1084. saved_pos = source.pos
  1085. ch = source.get()
  1086. source.pos = saved_pos
  1087. try:
  1088. value = int("".join(digits), 8)
  1089. return make_character(info, value, in_set)
  1090. except ValueError:
  1091. if digits[0] in OCT_DIGITS:
  1092. raise error("incomplete escape \\%s" % ''.join(digits),
  1093. source.string, source.pos)
  1094. else:
  1095. raise error("bad escape \\%s" % digits[0], source.string,
  1096. source.pos)
  1097. def parse_hex_escape(source, info, esc, expected_len, in_set, type):
  1098. "Parses a hex escape sequence."
  1099. saved_pos = source.pos
  1100. digits = []
  1101. for i in range(expected_len):
  1102. ch = source.get()
  1103. if ch not in HEX_DIGITS:
  1104. raise error("incomplete escape \\%s%s" % (type, ''.join(digits)),
  1105. source.string, saved_pos)
  1106. digits.append(ch)
  1107. try:
  1108. value = int("".join(digits), 16)
  1109. except ValueError:
  1110. pass
  1111. else:
  1112. if value < 0x110000:
  1113. return make_character(info, value, in_set)
  1114. # Bad hex escape.
  1115. raise error("bad hex escape \\%s%s" % (esc, ''.join(digits)),
  1116. source.string, saved_pos)
  1117. def parse_group_ref(source, info):
  1118. "Parses a group reference."
  1119. source.expect("<")
  1120. saved_pos = source.pos
  1121. name = parse_name(source, True)
  1122. source.expect(">")
  1123. if info.is_open_group(name):
  1124. raise error("cannot refer to an open group", source.string, source.pos)
  1125. return make_ref_group(info, name, saved_pos)
  1126. def parse_string_set(source, info):
  1127. "Parses a string set reference."
  1128. source.expect("<")
  1129. name = parse_name(source, True)
  1130. source.expect(">")
  1131. if name is None or name not in info.kwargs:
  1132. raise error("undefined named list", source.string, source.pos)
  1133. return make_string_set(info, name)
  1134. def parse_named_char(source, info, in_set):
  1135. "Parses a named character."
  1136. saved_pos = source.pos
  1137. if source.match("{"):
  1138. name = source.get_while(NAMED_CHAR_PART, keep_spaces=True)
  1139. if source.match("}"):
  1140. try:
  1141. value = unicodedata.lookup(name)
  1142. return make_character(info, ord(value), in_set)
  1143. except KeyError:
  1144. raise error("undefined character name", source.string,
  1145. source.pos)
  1146. source.pos = saved_pos
  1147. return make_character(info, ord("N"), in_set)
  1148. def parse_property(source, info, positive, in_set):
  1149. "Parses a Unicode property."
  1150. saved_pos = source.pos
  1151. ch = source.get()
  1152. if ch == "{":
  1153. negate = source.match("^")
  1154. prop_name, name = parse_property_name(source)
  1155. if source.match("}"):
  1156. # It's correctly delimited.
  1157. prop = lookup_property(prop_name, name, positive != negate, source)
  1158. return make_property(info, prop, in_set)
  1159. elif ch and ch in "CLMNPSZ":
  1160. # An abbreviated property, eg \pL.
  1161. prop = lookup_property(None, ch, positive, source)
  1162. return make_property(info, prop, in_set)
  1163. # Not a property, so treat as a literal "p" or "P".
  1164. source.pos = saved_pos
  1165. ch = "p" if positive else "P"
  1166. return make_character(info, ord(ch), in_set)
  1167. def parse_property_name(source):
  1168. "Parses a property name, which may be qualified."
  1169. name = source.get_while(PROPERTY_NAME_PART)
  1170. saved_pos = source.pos
  1171. ch = source.get()
  1172. if ch and ch in ":=":
  1173. prop_name = name
  1174. name = source.get_while(ALNUM | set(" &_-./")).strip()
  1175. if name:
  1176. # Name after the ":" or "=", so it's a qualified name.
  1177. saved_pos = source.pos
  1178. else:
  1179. # No name after the ":" or "=", so assume it's an unqualified name.
  1180. prop_name, name = None, prop_name
  1181. else:
  1182. prop_name = None
  1183. source.pos = saved_pos
  1184. return prop_name, name
  1185. def parse_set(source, info):
  1186. "Parses a character set."
  1187. version = (info.flags & _ALL_VERSIONS) or DEFAULT_VERSION
  1188. saved_ignore = source.ignore_space
  1189. source.ignore_space = False
  1190. # Negative set?
  1191. negate = source.match("^")
  1192. try:
  1193. if version == VERSION0:
  1194. item = parse_set_imp_union(source, info)
  1195. else:
  1196. item = parse_set_union(source, info)
  1197. if not source.match("]"):
  1198. raise error("missing ]", source.string, source.pos)
  1199. finally:
  1200. source.ignore_space = saved_ignore
  1201. if negate:
  1202. item = item.with_flags(positive=not item.positive)
  1203. item = item.with_flags(case_flags=make_case_flags(info))
  1204. return item
  1205. def parse_set_union(source, info):
  1206. "Parses a set union ([x||y])."
  1207. items = [parse_set_symm_diff(source, info)]
  1208. while source.match("||"):
  1209. items.append(parse_set_symm_diff(source, info))
  1210. if len(items) == 1:
  1211. return items[0]
  1212. return SetUnion(info, items)
  1213. def parse_set_symm_diff(source, info):
  1214. "Parses a set symmetric difference ([x~~y])."
  1215. items = [parse_set_inter(source, info)]
  1216. while source.match("~~"):
  1217. items.append(parse_set_inter(source, info))
  1218. if len(items) == 1:
  1219. return items[0]
  1220. return SetSymDiff(info, items)
  1221. def parse_set_inter(source, info):
  1222. "Parses a set intersection ([x&&y])."
  1223. items = [parse_set_diff(source, info)]
  1224. while source.match("&&"):
  1225. items.append(parse_set_diff(source, info))
  1226. if len(items) == 1:
  1227. return items[0]
  1228. return SetInter(info, items)
  1229. def parse_set_diff(source, info):
  1230. "Parses a set difference ([x--y])."
  1231. items = [parse_set_imp_union(source, info)]
  1232. while source.match("--"):
  1233. items.append(parse_set_imp_union(source, info))
  1234. if len(items) == 1:
  1235. return items[0]
  1236. return SetDiff(info, items)
  1237. def parse_set_imp_union(source, info):
  1238. "Parses a set implicit union ([xy])."
  1239. version = (info.flags & _ALL_VERSIONS) or DEFAULT_VERSION
  1240. items = [parse_set_member(source, info)]
  1241. while True:
  1242. saved_pos = source.pos
  1243. if source.match("]"):
  1244. # End of the set.
  1245. source.pos = saved_pos
  1246. break
  1247. if version == VERSION1 and any(source.match(op) for op in SET_OPS):
  1248. # The new behaviour has set operators.
  1249. source.pos = saved_pos
  1250. break
  1251. items.append(parse_set_member(source, info))
  1252. if len(items) == 1:
  1253. return items[0]
  1254. return SetUnion(info, items)
  1255. def parse_set_member(source, info):
  1256. "Parses a member in a character set."
  1257. # Parse a set item.
  1258. start = parse_set_item(source, info)
  1259. saved_pos1 = source.pos
  1260. if (not isinstance(start, Character) or not start.positive or not
  1261. source.match("-")):
  1262. # It's not the start of a range.
  1263. return start
  1264. version = (info.flags & _ALL_VERSIONS) or DEFAULT_VERSION
  1265. # It looks like the start of a range of characters.
  1266. saved_pos2 = source.pos
  1267. if version == VERSION1 and source.match("-"):
  1268. # It's actually the set difference operator '--', so return the
  1269. # character.
  1270. source.pos = saved_pos1
  1271. return start
  1272. if source.match("]"):
  1273. # We've reached the end of the set, so return both the character and
  1274. # hyphen.
  1275. source.pos = saved_pos2
  1276. return SetUnion(info, [start, Character(ord("-"))])
  1277. # Parse a set item.
  1278. end = parse_set_item(source, info)
  1279. if not isinstance(end, Character) or not end.positive:
  1280. # It's not a range, so return the character, hyphen and property.
  1281. return SetUnion(info, [start, Character(ord("-")), end])
  1282. # It _is_ a range.
  1283. if start.value > end.value:
  1284. raise error("bad character range", source.string, source.pos)
  1285. if start.value == end.value:
  1286. return start
  1287. return Range(start.value, end.value)
  1288. def parse_set_item(source, info):
  1289. "Parses an item in a character set."
  1290. version = (info.flags & _ALL_VERSIONS) or DEFAULT_VERSION
  1291. if source.match("\\"):
  1292. # An escape sequence in a set.
  1293. return parse_escape(source, info, True)
  1294. saved_pos = source.pos
  1295. if source.match("[:"):
  1296. # Looks like a POSIX character class.
  1297. try:
  1298. return parse_posix_class(source, info)
  1299. except ParseError:
  1300. # Not a POSIX character class.
  1301. source.pos = saved_pos
  1302. if version == VERSION1 and source.match("["):
  1303. # It's the start of a nested set.
  1304. # Negative set?
  1305. negate = source.match("^")
  1306. item = parse_set_union(source, info)
  1307. if not source.match("]"):
  1308. raise error("missing ]", source.string, source.pos)
  1309. if negate:
  1310. item = item.with_flags(positive=not item.positive)
  1311. return item
  1312. ch = source.get()
  1313. if not ch:
  1314. raise error("unterminated character set", source.string, source.pos)
  1315. return Character(ord(ch))
  1316. def parse_posix_class(source, info):
  1317. "Parses a POSIX character class."
  1318. negate = source.match("^")
  1319. prop_name, name = parse_property_name(source)
  1320. if not source.match(":]"):
  1321. raise ParseError()
  1322. return lookup_property(prop_name, name, not negate, source, posix=True)
  1323. def float_to_rational(flt):
  1324. "Converts a float to a rational pair."
  1325. int_part = int(flt)
  1326. error = flt - int_part
  1327. if abs(error) < 0.0001:
  1328. return int_part, 1
  1329. den, num = float_to_rational(1.0 / error)
  1330. return int_part * den + num, den
  1331. def numeric_to_rational(numeric):
  1332. "Converts a numeric string to a rational string, if possible."
  1333. if numeric[ : 1] == "-":
  1334. sign, numeric = numeric[0], numeric[1 : ]
  1335. else:
  1336. sign = ""
  1337. parts = numeric.split("/")
  1338. if len(parts) == 2:
  1339. num, den = float_to_rational(float(parts[0]) / float(parts[1]))
  1340. elif len(parts) == 1:
  1341. num, den = float_to_rational(float(parts[0]))
  1342. else:
  1343. raise ValueError()
  1344. result = "{}{}/{}".format(sign, num, den)
  1345. if result.endswith("/1"):
  1346. return result[ : -2]
  1347. return result
  1348. def standardise_name(name):
  1349. "Standardises a property or value name."
  1350. try:
  1351. return numeric_to_rational("".join(name))
  1352. except (ValueError, ZeroDivisionError):
  1353. return "".join(ch for ch in name if ch not in "_- ").upper()
  1354. _POSIX_CLASSES = set('ALNUM DIGIT PUNCT XDIGIT'.split())
  1355. _BINARY_VALUES = set('YES Y NO N TRUE T FALSE F'.split())
  1356. def lookup_property(property, value, positive, source=None, posix=False):
  1357. "Looks up a property."
  1358. # Normalise the names (which may still be lists).
  1359. property = standardise_name(property) if property else None
  1360. value = standardise_name(value)
  1361. if (property, value) == ("GENERALCATEGORY", "ASSIGNED"):
  1362. property, value, positive = "GENERALCATEGORY", "UNASSIGNED", not positive
  1363. if posix and not property and value.upper() in _POSIX_CLASSES:
  1364. value = 'POSIX' + value
  1365. if property:
  1366. # Both the property and the value are provided.
  1367. prop = PROPERTIES.get(property)
  1368. if not prop:
  1369. if not source:
  1370. raise error("unknown property")
  1371. raise error("unknown property", source.string, source.pos)
  1372. prop_id, value_dict = prop
  1373. val_id = value_dict.get(value)
  1374. if val_id is None:
  1375. if not source:
  1376. raise error("unknown property value")
  1377. raise error("unknown property value", source.string, source.pos)
  1378. return Property((prop_id << 16) | val_id, positive)
  1379. # Only the value is provided.
  1380. # It might be the name of a GC, script or block value.
  1381. for property in ("GC", "SCRIPT", "BLOCK"):
  1382. prop_id, value_dict = PROPERTIES.get(property)
  1383. val_id = value_dict.get(value)
  1384. if val_id is not None:
  1385. return Property((prop_id << 16) | val_id, positive)
  1386. # It might be the name of a binary property.
  1387. prop = PROPERTIES.get(value)
  1388. if prop:
  1389. prop_id, value_dict = prop
  1390. if set(value_dict) == _BINARY_VALUES:
  1391. return Property((prop_id << 16) | 1, positive)
  1392. return Property(prop_id << 16, not positive)
  1393. # It might be the name of a binary property starting with a prefix.
  1394. if value.startswith("IS"):
  1395. prop = PROPERTIES.get(value[2 : ])
  1396. if prop:
  1397. prop_id, value_dict = prop
  1398. if "YES" in value_dict:
  1399. return Property((prop_id << 16) | 1, positive)
  1400. # It might be the name of a script or block starting with a prefix.
  1401. for prefix, property in (("IS", "SCRIPT"), ("IN", "BLOCK")):
  1402. if value.startswith(prefix):
  1403. prop_id, value_dict = PROPERTIES.get(property)
  1404. val_id = value_dict.get(value[2 : ])
  1405. if val_id is not None:
  1406. return Property((prop_id << 16) | val_id, positive)
  1407. # Unknown property.
  1408. if not source:
  1409. raise error("unknown property")
  1410. raise error("unknown property", source.string, source.pos)
  1411. def _compile_replacement(source, pattern, is_unicode):
  1412. "Compiles a replacement template escape sequence."
  1413. ch = source.get()
  1414. if ch in ALPHA:
  1415. # An alphabetic escape sequence.
  1416. value = CHARACTER_ESCAPES.get(ch)
  1417. if value:
  1418. return False, [ord(value)]
  1419. if ch in HEX_ESCAPES and (ch == "x" or is_unicode):
  1420. # A hexadecimal escape sequence.
  1421. return False, [parse_repl_hex_escape(source, HEX_ESCAPES[ch], ch)]
  1422. if ch == "g":
  1423. # A group preference.
  1424. return True, [compile_repl_group(source, pattern)]
  1425. if ch == "N" and is_unicode:
  1426. # A named character.
  1427. value = parse_repl_named_char(source)
  1428. if value is not None:
  1429. return False, [value]
  1430. raise error("bad escape \\%s" % ch, source.string, source.pos)
  1431. if isinstance(source.sep, bytes):
  1432. octal_mask = 0xFF
  1433. else:
  1434. octal_mask = 0x1FF
  1435. if ch == "0":
  1436. # An octal escape sequence.
  1437. digits = ch
  1438. while len(digits) < 3:
  1439. saved_pos = source.pos
  1440. ch = source.get()
  1441. if ch not in OCT_DIGITS:
  1442. source.pos = saved_pos
  1443. break
  1444. digits += ch
  1445. return False, [int(digits, 8) & octal_mask]
  1446. if ch in DIGITS:
  1447. # Either an octal escape sequence (3 digits) or a group reference (max
  1448. # 2 digits).
  1449. digits = ch
  1450. saved_pos = source.pos
  1451. ch = source.get()
  1452. if ch in DIGITS:
  1453. digits += ch
  1454. saved_pos = source.pos
  1455. ch = source.get()
  1456. if ch and is_octal(digits + ch):
  1457. # An octal escape sequence.
  1458. return False, [int(digits + ch, 8) & octal_mask]
  1459. # A group reference.
  1460. source.pos = saved_pos
  1461. return True, [int(digits)]
  1462. if ch == "\\":
  1463. # An escaped backslash is a backslash.
  1464. return False, [ord("\\")]
  1465. if not ch:
  1466. # A trailing backslash.
  1467. raise error("bad escape (end of pattern)", source.string, source.pos)
  1468. # An escaped non-backslash is a backslash followed by the literal.
  1469. return False, [ord("\\"), ord(ch)]
  1470. def parse_repl_hex_escape(source, expected_len, type):
  1471. "Parses a hex escape sequence in a replacement string."
  1472. digits = []
  1473. for i in range(expected_len):
  1474. ch = source.get()
  1475. if ch not in HEX_DIGITS:
  1476. raise error("incomplete escape \\%s%s" % (type, ''.join(digits)),
  1477. source.string, source.pos)
  1478. digits.append(ch)
  1479. return int("".join(digits), 16)
  1480. def parse_repl_named_char(source):
  1481. "Parses a named character in a replacement string."
  1482. saved_pos = source.pos
  1483. if source.match("{"):
  1484. name = source.get_while(ALPHA | set(" "))
  1485. if source.match("}"):
  1486. try:
  1487. value = unicodedata.lookup(name)
  1488. return ord(value)
  1489. except KeyError:
  1490. raise error("undefined character name", source.string,
  1491. source.pos)
  1492. source.pos = saved_pos
  1493. return None
  1494. def compile_repl_group(source, pattern):
  1495. "Compiles a replacement template group reference."
  1496. source.expect("<")
  1497. name = parse_name(source, True, True)
  1498. source.expect(">")
  1499. if name.isdigit():
  1500. index = int(name)
  1501. if not 0 <= index <= pattern.groups:
  1502. raise error("invalid group reference", source.string, source.pos)
  1503. return index
  1504. try:
  1505. return pattern.groupindex[name]
  1506. except KeyError:
  1507. raise IndexError("unknown group")
  1508. # The regular expression is parsed into a syntax tree. The different types of
  1509. # node are defined below.
  1510. INDENT = " "
  1511. POSITIVE_OP = 0x1
  1512. ZEROWIDTH_OP = 0x2
  1513. FUZZY_OP = 0x4
  1514. REVERSE_OP = 0x8
  1515. REQUIRED_OP = 0x10
  1516. POS_TEXT = {False: "NON-MATCH", True: "MATCH"}
  1517. CASE_TEXT = {NOCASE: "", IGNORECASE: " SIMPLE_IGNORE_CASE", FULLCASE: "",
  1518. FULLIGNORECASE: " FULL_IGNORE_CASE"}
  1519. def make_sequence(items):
  1520. if len(items) == 1:
  1521. return items[0]
  1522. return Sequence(items)
  1523. # Common base class for all nodes.
  1524. class RegexBase:
  1525. def __init__(self):
  1526. self._key = self.__class__
  1527. def with_flags(self, positive=None, case_flags=None, zerowidth=None):
  1528. if positive is None:
  1529. positive = self.positive
  1530. else:
  1531. positive = bool(positive)
  1532. if case_flags is None:
  1533. case_flags = self.case_flags
  1534. else:
  1535. case_flags = CASE_FLAGS_COMBINATIONS[case_flags & CASE_FLAGS]
  1536. if zerowidth is None:
  1537. zerowidth = self.zerowidth
  1538. else:
  1539. zerowidth = bool(zerowidth)
  1540. if (positive == self.positive and case_flags == self.case_flags and
  1541. zerowidth == self.zerowidth):
  1542. return self
  1543. return self.rebuild(positive, case_flags, zerowidth)
  1544. def fix_groups(self, pattern, reverse, fuzzy):
  1545. pass
  1546. def optimise(self, info, reverse):
  1547. return self
  1548. def pack_characters(self, info):
  1549. return self
  1550. def remove_captures(self):
  1551. return self
  1552. def is_atomic(self):
  1553. return True
  1554. def can_be_affix(self):
  1555. return True
  1556. def contains_group(self):
  1557. return False
  1558. def get_firstset(self, reverse):
  1559. raise _FirstSetError()
  1560. def has_simple_start(self):
  1561. return False
  1562. def compile(self, reverse=False, fuzzy=False):
  1563. return self._compile(reverse, fuzzy)
  1564. def is_empty(self):
  1565. return False
  1566. def __hash__(self):
  1567. return hash(self._key)
  1568. def __eq__(self, other):
  1569. return type(self) is type(other) and self._key == other._key
  1570. def __ne__(self, other):
  1571. return not self.__eq__(other)
  1572. def get_required_string(self, reverse):
  1573. return self.max_width(), None
  1574. # Base class for zero-width nodes.
  1575. class ZeroWidthBase(RegexBase):
  1576. def __init__(self, positive=True):
  1577. RegexBase.__init__(self)
  1578. self.positive = bool(positive)
  1579. self._key = self.__class__, self.positive
  1580. def get_firstset(self, reverse):
  1581. return set([None])
  1582. def _compile(self, reverse, fuzzy):
  1583. flags = 0
  1584. if self.positive:
  1585. flags |= POSITIVE_OP
  1586. if fuzzy:
  1587. flags |= FUZZY_OP
  1588. if reverse:
  1589. flags |= REVERSE_OP
  1590. return [(self._opcode, flags)]
  1591. def dump(self, indent, reverse):
  1592. print("{}{} {}".format(INDENT * indent, self._op_name,
  1593. POS_TEXT[self.positive]))
  1594. def max_width(self):
  1595. return 0
  1596. class Any(RegexBase):
  1597. _opcode = {False: OP.ANY, True: OP.ANY_REV}
  1598. _op_name = "ANY"
  1599. def has_simple_start(self):
  1600. return True
  1601. def _compile(self, reverse, fuzzy):
  1602. flags = 0
  1603. if fuzzy:
  1604. flags |= FUZZY_OP
  1605. return [(self._opcode[reverse], flags)]
  1606. def dump(self, indent, reverse):
  1607. print("{}{}".format(INDENT * indent, self._op_name))
  1608. def max_width(self):
  1609. return 1
  1610. class AnyAll(Any):
  1611. _opcode = {False: OP.ANY_ALL, True: OP.ANY_ALL_REV}
  1612. _op_name = "ANY_ALL"
  1613. class AnyU(Any):
  1614. _opcode = {False: OP.ANY_U, True: OP.ANY_U_REV}
  1615. _op_name = "ANY_U"
  1616. class Atomic(RegexBase):
  1617. def __init__(self, subpattern):
  1618. RegexBase.__init__(self)
  1619. self.subpattern = subpattern
  1620. def fix_groups(self, pattern, reverse, fuzzy):
  1621. self.subpattern.fix_groups(pattern, reverse, fuzzy)
  1622. def optimise(self, info, reverse):
  1623. self.subpattern = self.subpattern.optimise(info, reverse)
  1624. if self.subpattern.is_empty():
  1625. return self.subpattern
  1626. return self
  1627. def pack_characters(self, info):
  1628. self.subpattern = self.subpattern.pack_characters(info)
  1629. return self
  1630. def remove_captures(self):
  1631. self.subpattern = self.subpattern.remove_captures()
  1632. return self
  1633. def can_be_affix(self):
  1634. return self.subpattern.can_be_affix()
  1635. def contains_group(self):
  1636. return self.subpattern.contains_group()
  1637. def get_firstset(self, reverse):
  1638. return self.subpattern.get_firstset(reverse)
  1639. def has_simple_start(self):
  1640. return self.subpattern.has_simple_start()
  1641. def _compile(self, reverse, fuzzy):
  1642. return ([(OP.ATOMIC, )] + self.subpattern.compile(reverse, fuzzy) +
  1643. [(OP.END, )])
  1644. def dump(self, indent, reverse):
  1645. print("{}ATOMIC".format(INDENT * indent))
  1646. self.subpattern.dump(indent + 1, reverse)
  1647. def is_empty(self):
  1648. return self.subpattern.is_empty()
  1649. def __eq__(self, other):
  1650. return (type(self) is type(other) and self.subpattern ==
  1651. other.subpattern)
  1652. def max_width(self):
  1653. return self.subpattern.max_width()
  1654. def get_required_string(self, reverse):
  1655. return self.subpattern.get_required_string(reverse)
  1656. class Boundary(ZeroWidthBase):
  1657. _opcode = OP.BOUNDARY
  1658. _op_name = "BOUNDARY"
  1659. class Branch(RegexBase):
  1660. def __init__(self, branches):
  1661. RegexBase.__init__(self)
  1662. self.branches = branches
  1663. def fix_groups(self, pattern, reverse, fuzzy):
  1664. for b in self.branches:
  1665. b.fix_groups(pattern, reverse, fuzzy)
  1666. def optimise(self, info, reverse):
  1667. if not self.branches:
  1668. return Sequence([])
  1669. # Flatten branches within branches.
  1670. branches = Branch._flatten_branches(info, reverse, self.branches)
  1671. # Move any common prefix or suffix out of the branches.
  1672. if reverse:
  1673. suffix, branches = Branch._split_common_suffix(info, branches)
  1674. prefix = []
  1675. else:
  1676. prefix, branches = Branch._split_common_prefix(info, branches)
  1677. suffix = []
  1678. # Try to reduce adjacent single-character branches to sets.
  1679. branches = Branch._reduce_to_set(info, reverse, branches)
  1680. if len(branches) > 1:
  1681. sequence = [Branch(branches)]
  1682. if not prefix or not suffix:
  1683. # We might be able to add a quick precheck before the branches.
  1684. firstset = self._add_precheck(info, reverse, branches)
  1685. if firstset:
  1686. if reverse:
  1687. sequence.append(firstset)
  1688. else:
  1689. sequence.insert(0, firstset)
  1690. else:
  1691. sequence = branches
  1692. return make_sequence(prefix + sequence + suffix)
  1693. def _add_precheck(self, info, reverse, branches):
  1694. charset = set()
  1695. pos = -1 if reverse else 0
  1696. for branch in branches:
  1697. if type(branch) is Literal and branch.case_flags == NOCASE:
  1698. charset.add(branch.characters[pos])
  1699. else:
  1700. return
  1701. if not charset:
  1702. return None
  1703. return _check_firstset(info, reverse, [Character(c) for c in charset])
  1704. def pack_characters(self, info):
  1705. self.branches = [b.pack_characters(info) for b in self.branches]
  1706. return self
  1707. def remove_captures(self):
  1708. self.branches = [b.remove_captures() for b in self.branches]
  1709. return self
  1710. def is_atomic(self):
  1711. return all(b.is_atomic() for b in self.branches)
  1712. def can_be_affix(self):
  1713. return all(b.can_be_affix() for b in self.branches)
  1714. def contains_group(self):
  1715. return any(b.contains_group() for b in self.branches)
  1716. def get_firstset(self, reverse):
  1717. fs = set()
  1718. for b in self.branches:
  1719. fs |= b.get_firstset(reverse)
  1720. return fs or set([None])
  1721. def _compile(self, reverse, fuzzy):
  1722. if not self.branches:
  1723. return []
  1724. code = [(OP.BRANCH, )]
  1725. for b in self.branches:
  1726. code.extend(b.compile(reverse, fuzzy))
  1727. code.append((OP.NEXT, ))
  1728. code[-1] = (OP.END, )
  1729. return code
  1730. def dump(self, indent, reverse):
  1731. print("{}BRANCH".format(INDENT * indent))
  1732. self.branches[0].dump(indent + 1, reverse)
  1733. for b in self.branches[1 : ]:
  1734. print("{}OR".format(INDENT * indent))
  1735. b.dump(indent + 1, reverse)
  1736. @staticmethod
  1737. def _flatten_branches(info, reverse, branches):
  1738. # Flatten the branches so that there aren't branches of branches.
  1739. new_branches = []
  1740. for b in branches:
  1741. b = b.optimise(info, reverse)
  1742. if isinstance(b, Branch):
  1743. new_branches.extend(b.branches)
  1744. else:
  1745. new_branches.append(b)
  1746. return new_branches
  1747. @staticmethod
  1748. def _split_common_prefix(info, branches):
  1749. # Common leading items can be moved out of the branches.
  1750. # Get the items in the branches.
  1751. alternatives = []
  1752. for b in branches:
  1753. if isinstance(b, Sequence):
  1754. alternatives.append(b.items)
  1755. else:
  1756. alternatives.append([b])
  1757. # What is the maximum possible length of the prefix?
  1758. max_count = min(len(a) for a in alternatives)
  1759. # What is the longest common prefix?
  1760. prefix = alternatives[0]
  1761. pos = 0
  1762. end_pos = max_count
  1763. while pos < end_pos and prefix[pos].can_be_affix() and all(a[pos] ==
  1764. prefix[pos] for a in alternatives):
  1765. pos += 1
  1766. count = pos
  1767. if info.flags & UNICODE:
  1768. # We need to check that we're not splitting a sequence of
  1769. # characters which could form part of full case-folding.
  1770. count = pos
  1771. while count > 0 and not all(Branch._can_split(a, count) for a in
  1772. alternatives):
  1773. count -= 1
  1774. # No common prefix is possible.
  1775. if count == 0:
  1776. return [], branches
  1777. # Rebuild the branches.
  1778. new_branches = []
  1779. for a in alternatives:
  1780. new_branches.append(make_sequence(a[count : ]))
  1781. return prefix[ : count], new_branches
  1782. @staticmethod
  1783. def _split_common_suffix(info, branches):
  1784. # Common trailing items can be moved out of the branches.
  1785. # Get the items in the branches.
  1786. alternatives = []
  1787. for b in branches:
  1788. if isinstance(b, Sequence):
  1789. alternatives.append(b.items)
  1790. else:
  1791. alternatives.append([b])
  1792. # What is the maximum possible length of the suffix?
  1793. max_count = min(len(a) for a in alternatives)
  1794. # What is the longest common suffix?
  1795. suffix = alternatives[0]
  1796. pos = -1
  1797. end_pos = -1 - max_count
  1798. while pos > end_pos and suffix[pos].can_be_affix() and all(a[pos] ==
  1799. suffix[pos] for a in alternatives):
  1800. pos -= 1
  1801. count = -1 - pos
  1802. if info.flags & UNICODE:
  1803. # We need to check that we're not splitting a sequence of
  1804. # characters which could form part of full case-folding.
  1805. while count > 0 and not all(Branch._can_split_rev(a, count) for a
  1806. in alternatives):
  1807. count -= 1
  1808. # No common suffix is possible.
  1809. if count == 0:
  1810. return [], branches
  1811. # Rebuild the branches.
  1812. new_branches = []
  1813. for a in alternatives:
  1814. new_branches.append(make_sequence(a[ : -count]))
  1815. return suffix[-count : ], new_branches
  1816. @staticmethod
  1817. def _can_split(items, count):
  1818. # Check the characters either side of the proposed split.
  1819. if not Branch._is_full_case(items, count - 1):
  1820. return True
  1821. if not Branch._is_full_case(items, count):
  1822. return True
  1823. # Check whether a 1-1 split would be OK.
  1824. if Branch._is_folded(items[count - 1 : count + 1]):
  1825. return False
  1826. # Check whether a 1-2 split would be OK.
  1827. if (Branch._is_full_case(items, count + 2) and
  1828. Branch._is_folded(items[count - 1 : count + 2])):
  1829. return False
  1830. # Check whether a 2-1 split would be OK.
  1831. if (Branch._is_full_case(items, count - 2) and
  1832. Branch._is_folded(items[count - 2 : count + 1])):
  1833. return False
  1834. return True
  1835. @staticmethod
  1836. def _can_split_rev(items, count):
  1837. end = len(items)
  1838. # Check the characters either side of the proposed split.
  1839. if not Branch._is_full_case(items, end - count):
  1840. return True
  1841. if not Branch._is_full_case(items, end - count - 1):
  1842. return True
  1843. # Check whether a 1-1 split would be OK.
  1844. if Branch._is_folded(items[end - count - 1 : end - count + 1]):
  1845. return False
  1846. # Check whether a 1-2 split would be OK.
  1847. if (Branch._is_full_case(items, end - count + 2) and
  1848. Branch._is_folded(items[end - count - 1 : end - count + 2])):
  1849. return False
  1850. # Check whether a 2-1 split would be OK.
  1851. if (Branch._is_full_case(items, end - count - 2) and
  1852. Branch._is_folded(items[end - count - 2 : end - count + 1])):
  1853. return False
  1854. return True
  1855. @staticmethod
  1856. def _merge_common_prefixes(info, reverse, branches):
  1857. # Branches with the same case-sensitive character prefix can be grouped
  1858. # together if they are separated only by other branches with a
  1859. # character prefix.
  1860. prefixed = defaultdict(list)
  1861. order = {}
  1862. new_branches = []
  1863. for b in branches:
  1864. if Branch._is_simple_character(b):
  1865. # Branch starts with a simple character.
  1866. prefixed[b.value].append([b])
  1867. order.setdefault(b.value, len(order))
  1868. elif (isinstance(b, Sequence) and b.items and
  1869. Branch._is_simple_character(b.items[0])):
  1870. # Branch starts with a simple character.
  1871. prefixed[b.items[0].value].append(b.items)
  1872. order.setdefault(b.items[0].value, len(order))
  1873. else:
  1874. Branch._flush_char_prefix(info, reverse, prefixed, order,
  1875. new_branches)
  1876. new_branches.append(b)
  1877. Branch._flush_char_prefix(info, prefixed, order, new_branches)
  1878. return new_branches
  1879. @staticmethod
  1880. def _is_simple_character(c):
  1881. return isinstance(c, Character) and c.positive and not c.case_flags
  1882. @staticmethod
  1883. def _reduce_to_set(info, reverse, branches):
  1884. # Can the branches be reduced to a set?
  1885. new_branches = []
  1886. items = set()
  1887. case_flags = NOCASE
  1888. for b in branches:
  1889. if isinstance(b, (Character, Property, SetBase)):
  1890. # Branch starts with a single character.
  1891. if b.case_flags != case_flags:
  1892. # Different case sensitivity, so flush.
  1893. Branch._flush_set_members(info, reverse, items, case_flags,
  1894. new_branches)
  1895. case_flags = b.case_flags
  1896. items.add(b.with_flags(case_flags=NOCASE))
  1897. else:
  1898. Branch._flush_set_members(info, reverse, items, case_flags,
  1899. new_branches)
  1900. new_branches.append(b)
  1901. Branch._flush_set_members(info, reverse, items, case_flags,
  1902. new_branches)
  1903. return new_branches
  1904. @staticmethod
  1905. def _flush_char_prefix(info, reverse, prefixed, order, new_branches):
  1906. # Flush the prefixed branches.
  1907. if not prefixed:
  1908. return
  1909. for value, branches in sorted(prefixed.items(), key=lambda pair:
  1910. order[pair[0]]):
  1911. if len(branches) == 1:
  1912. new_branches.append(make_sequence(branches[0]))
  1913. else:
  1914. subbranches = []
  1915. optional = False
  1916. for b in branches:
  1917. if len(b) > 1:
  1918. subbranches.append(make_sequence(b[1 : ]))
  1919. elif not optional:
  1920. subbranches.append(Sequence())
  1921. optional = True
  1922. sequence = Sequence([Character(value), Branch(subbranches)])
  1923. new_branches.append(sequence.optimise(info, reverse))
  1924. prefixed.clear()
  1925. order.clear()
  1926. @staticmethod
  1927. def _flush_set_members(info, reverse, items, case_flags, new_branches):
  1928. # Flush the set members.
  1929. if not items:
  1930. return
  1931. if len(items) == 1:
  1932. item = list(items)[0]
  1933. else:
  1934. item = SetUnion(info, list(items)).optimise(info, reverse)
  1935. new_branches.append(item.with_flags(case_flags=case_flags))
  1936. items.clear()
  1937. @staticmethod
  1938. def _is_full_case(items, i):
  1939. if not 0 <= i < len(items):
  1940. return False
  1941. item = items[i]
  1942. return (isinstance(item, Character) and item.positive and
  1943. (item.case_flags & FULLIGNORECASE) == FULLIGNORECASE)
  1944. @staticmethod
  1945. def _is_folded(items):
  1946. if len(items) < 2:
  1947. return False
  1948. for i in items:
  1949. if (not isinstance(i, Character) or not i.positive or not
  1950. i.case_flags):
  1951. return False
  1952. folded = "".join(chr(i.value) for i in items)
  1953. folded = _regex.fold_case(FULL_CASE_FOLDING, folded)
  1954. # Get the characters which expand to multiple codepoints on folding.
  1955. expanding_chars = _regex.get_expand_on_folding()
  1956. for c in expanding_chars:
  1957. if folded == _regex.fold_case(FULL_CASE_FOLDING, c):
  1958. return True
  1959. return False
  1960. def is_empty(self):
  1961. return all(b.is_empty() for b in self.branches)
  1962. def __eq__(self, other):
  1963. return type(self) is type(other) and self.branches == other.branches
  1964. def max_width(self):
  1965. return max(b.max_width() for b in self.branches)
  1966. class CallGroup(RegexBase):
  1967. def __init__(self, info, group, position):
  1968. RegexBase.__init__(self)
  1969. self.info = info
  1970. self.group = group
  1971. self.position = position
  1972. self._key = self.__class__, self.group
  1973. def fix_groups(self, pattern, reverse, fuzzy):
  1974. try:
  1975. self.group = int(self.group)
  1976. except ValueError:
  1977. try:
  1978. self.group = self.info.group_index[self.group]
  1979. except KeyError:
  1980. raise error("invalid group reference", pattern, self.position)
  1981. if not 0 <= self.group <= self.info.group_count:
  1982. raise error("unknown group", pattern, self.position)
  1983. if self.group > 0 and self.info.open_group_count[self.group] > 1:
  1984. raise error("ambiguous group reference", pattern, self.position)
  1985. self.info.group_calls.append((self, reverse, fuzzy))
  1986. self._key = self.__class__, self.group
  1987. def remove_captures(self):
  1988. raise error("group reference not allowed", pattern, self.position)
  1989. def _compile(self, reverse, fuzzy):
  1990. return [(OP.GROUP_CALL, self.call_ref)]
  1991. def dump(self, indent, reverse):
  1992. print("{}GROUP_CALL {}".format(INDENT * indent, self.group))
  1993. def __eq__(self, other):
  1994. return type(self) is type(other) and self.group == other.group
  1995. def max_width(self):
  1996. return UNLIMITED
  1997. def __del__(self):
  1998. self.info = None
  1999. class CallRef(RegexBase):
  2000. def __init__(self, ref, parsed):
  2001. self.ref = ref
  2002. self.parsed = parsed
  2003. def _compile(self, reverse, fuzzy):
  2004. return ([(OP.CALL_REF, self.ref)] + self.parsed._compile(reverse,
  2005. fuzzy) + [(OP.END, )])
  2006. class Character(RegexBase):
  2007. _opcode = {(NOCASE, False): OP.CHARACTER, (IGNORECASE, False):
  2008. OP.CHARACTER_IGN, (FULLCASE, False): OP.CHARACTER, (FULLIGNORECASE,
  2009. False): OP.CHARACTER_IGN, (NOCASE, True): OP.CHARACTER_REV, (IGNORECASE,
  2010. True): OP.CHARACTER_IGN_REV, (FULLCASE, True): OP.CHARACTER_REV,
  2011. (FULLIGNORECASE, True): OP.CHARACTER_IGN_REV}
  2012. def __init__(self, value, positive=True, case_flags=NOCASE,
  2013. zerowidth=False):
  2014. RegexBase.__init__(self)
  2015. self.value = value
  2016. self.positive = bool(positive)
  2017. self.case_flags = CASE_FLAGS_COMBINATIONS[case_flags]
  2018. self.zerowidth = bool(zerowidth)
  2019. if (self.positive and (self.case_flags & FULLIGNORECASE) ==
  2020. FULLIGNORECASE):
  2021. self.folded = _regex.fold_case(FULL_CASE_FOLDING, chr(self.value))
  2022. else:
  2023. self.folded = chr(self.value)
  2024. self._key = (self.__class__, self.value, self.positive,
  2025. self.case_flags, self.zerowidth)
  2026. def rebuild(self, positive, case_flags, zerowidth):
  2027. return Character(self.value, positive, case_flags, zerowidth)
  2028. def optimise(self, info, reverse, in_set=False):
  2029. return self
  2030. def get_firstset(self, reverse):
  2031. return set([self])
  2032. def has_simple_start(self):
  2033. return True
  2034. def _compile(self, reverse, fuzzy):
  2035. flags = 0
  2036. if self.positive:
  2037. flags |= POSITIVE_OP
  2038. if self.zerowidth:
  2039. flags |= ZEROWIDTH_OP
  2040. if fuzzy:
  2041. flags |= FUZZY_OP
  2042. code = PrecompiledCode([self._opcode[self.case_flags, reverse], flags,
  2043. self.value])
  2044. if len(self.folded) > 1:
  2045. # The character expands on full case-folding.
  2046. code = Branch([code, String([ord(c) for c in self.folded],
  2047. case_flags=self.case_flags)])
  2048. return code.compile(reverse, fuzzy)
  2049. def dump(self, indent, reverse):
  2050. display = ascii(chr(self.value)).lstrip("bu")
  2051. print("{}CHARACTER {} {}{}".format(INDENT * indent,
  2052. POS_TEXT[self.positive], display, CASE_TEXT[self.case_flags]))
  2053. def matches(self, ch):
  2054. return (ch == self.value) == self.positive
  2055. def max_width(self):
  2056. return len(self.folded)
  2057. def get_required_string(self, reverse):
  2058. if not self.positive:
  2059. return 1, None
  2060. self.folded_characters = tuple(ord(c) for c in self.folded)
  2061. return 0, self
  2062. class Conditional(RegexBase):
  2063. def __init__(self, info, group, yes_item, no_item, position):
  2064. RegexBase.__init__(self)
  2065. self.info = info
  2066. self.group = group
  2067. self.yes_item = yes_item
  2068. self.no_item = no_item
  2069. self.position = position
  2070. def fix_groups(self, pattern, reverse, fuzzy):
  2071. try:
  2072. self.group = int(self.group)
  2073. except ValueError:
  2074. try:
  2075. self.group = self.info.group_index[self.group]
  2076. except KeyError:
  2077. if self.group == 'DEFINE':
  2078. # 'DEFINE' is a special name unless there's a group with
  2079. # that name.
  2080. self.group = 0
  2081. else:
  2082. raise error("unknown group", pattern, self.position)
  2083. if not 0 <= self.group <= self.info.group_count:
  2084. raise error("invalid group reference", pattern, self.position)
  2085. self.yes_item.fix_groups(pattern, reverse, fuzzy)
  2086. self.no_item.fix_groups(pattern, reverse, fuzzy)
  2087. def optimise(self, info, reverse):
  2088. yes_item = self.yes_item.optimise(info, reverse)
  2089. no_item = self.no_item.optimise(info, reverse)
  2090. return Conditional(info, self.group, yes_item, no_item, self.position)
  2091. def pack_characters(self, info):
  2092. self.yes_item = self.yes_item.pack_characters(info)
  2093. self.no_item = self.no_item.pack_characters(info)
  2094. return self
  2095. def remove_captures(self):
  2096. self.yes_item = self.yes_item.remove_captures()
  2097. self.no_item = self.no_item.remove_captures()
  2098. def is_atomic(self):
  2099. return self.yes_item.is_atomic() and self.no_item.is_atomic()
  2100. def can_be_affix(self):
  2101. return self.yes_item.can_be_affix() and self.no_item.can_be_affix()
  2102. def contains_group(self):
  2103. return self.yes_item.contains_group() or self.no_item.contains_group()
  2104. def get_firstset(self, reverse):
  2105. return (self.yes_item.get_firstset(reverse) |
  2106. self.no_item.get_firstset(reverse))
  2107. def _compile(self, reverse, fuzzy):
  2108. code = [(OP.GROUP_EXISTS, self.group)]
  2109. code.extend(self.yes_item.compile(reverse, fuzzy))
  2110. add_code = self.no_item.compile(reverse, fuzzy)
  2111. if add_code:
  2112. code.append((OP.NEXT, ))
  2113. code.extend(add_code)
  2114. code.append((OP.END, ))
  2115. return code
  2116. def dump(self, indent, reverse):
  2117. print("{}GROUP_EXISTS {}".format(INDENT * indent, self.group))
  2118. self.yes_item.dump(indent + 1, reverse)
  2119. if not self.no_item.is_empty():
  2120. print("{}OR".format(INDENT * indent))
  2121. self.no_item.dump(indent + 1, reverse)
  2122. def is_empty(self):
  2123. return self.yes_item.is_empty() and self.no_item.is_empty()
  2124. def __eq__(self, other):
  2125. return type(self) is type(other) and (self.group, self.yes_item,
  2126. self.no_item) == (other.group, other.yes_item, other.no_item)
  2127. def max_width(self):
  2128. return max(self.yes_item.max_width(), self.no_item.max_width())
  2129. def __del__(self):
  2130. self.info = None
  2131. class DefaultBoundary(ZeroWidthBase):
  2132. _opcode = OP.DEFAULT_BOUNDARY
  2133. _op_name = "DEFAULT_BOUNDARY"
  2134. class DefaultEndOfWord(ZeroWidthBase):
  2135. _opcode = OP.DEFAULT_END_OF_WORD
  2136. _op_name = "DEFAULT_END_OF_WORD"
  2137. class DefaultStartOfWord(ZeroWidthBase):
  2138. _opcode = OP.DEFAULT_START_OF_WORD
  2139. _op_name = "DEFAULT_START_OF_WORD"
  2140. class EndOfLine(ZeroWidthBase):
  2141. _opcode = OP.END_OF_LINE
  2142. _op_name = "END_OF_LINE"
  2143. class EndOfLineU(EndOfLine):
  2144. _opcode = OP.END_OF_LINE_U
  2145. _op_name = "END_OF_LINE_U"
  2146. class EndOfString(ZeroWidthBase):
  2147. _opcode = OP.END_OF_STRING
  2148. _op_name = "END_OF_STRING"
  2149. class EndOfStringLine(ZeroWidthBase):
  2150. _opcode = OP.END_OF_STRING_LINE
  2151. _op_name = "END_OF_STRING_LINE"
  2152. class EndOfStringLineU(EndOfStringLine):
  2153. _opcode = OP.END_OF_STRING_LINE_U
  2154. _op_name = "END_OF_STRING_LINE_U"
  2155. class EndOfWord(ZeroWidthBase):
  2156. _opcode = OP.END_OF_WORD
  2157. _op_name = "END_OF_WORD"
  2158. class Failure(ZeroWidthBase):
  2159. _op_name = "FAILURE"
  2160. def _compile(self, reverse, fuzzy):
  2161. return [(OP.FAILURE, )]
  2162. class Fuzzy(RegexBase):
  2163. def __init__(self, subpattern, constraints=None):
  2164. RegexBase.__init__(self)
  2165. if constraints is None:
  2166. constraints = {}
  2167. self.subpattern = subpattern
  2168. self.constraints = constraints
  2169. # If an error type is mentioned in the cost equation, then its maximum
  2170. # defaults to unlimited.
  2171. if "cost" in constraints:
  2172. for e in "dis":
  2173. if e in constraints["cost"]:
  2174. constraints.setdefault(e, (0, None))
  2175. # If any error type is mentioned, then all the error maxima default to
  2176. # 0, otherwise they default to unlimited.
  2177. if set(constraints) & set("dis"):
  2178. for e in "dis":
  2179. constraints.setdefault(e, (0, 0))
  2180. else:
  2181. for e in "dis":
  2182. constraints.setdefault(e, (0, None))
  2183. # The maximum of the generic error type defaults to unlimited.
  2184. constraints.setdefault("e", (0, None))
  2185. # The cost equation defaults to equal costs. Also, the cost of any
  2186. # error type not mentioned in the cost equation defaults to 0.
  2187. if "cost" in constraints:
  2188. for e in "dis":
  2189. constraints["cost"].setdefault(e, 0)
  2190. else:
  2191. constraints["cost"] = {"d": 1, "i": 1, "s": 1, "max":
  2192. constraints["e"][1]}
  2193. def fix_groups(self, pattern, reverse, fuzzy):
  2194. self.subpattern.fix_groups(pattern, reverse, True)
  2195. def pack_characters(self, info):
  2196. self.subpattern = self.subpattern.pack_characters(info)
  2197. return self
  2198. def remove_captures(self):
  2199. self.subpattern = self.subpattern.remove_captures()
  2200. return self
  2201. def is_atomic(self):
  2202. return self.subpattern.is_atomic()
  2203. def contains_group(self):
  2204. return self.subpattern.contains_group()
  2205. def _compile(self, reverse, fuzzy):
  2206. # The individual limits.
  2207. arguments = []
  2208. for e in "dise":
  2209. v = self.constraints[e]
  2210. arguments.append(v[0])
  2211. arguments.append(UNLIMITED if v[1] is None else v[1])
  2212. # The coeffs of the cost equation.
  2213. for e in "dis":
  2214. arguments.append(self.constraints["cost"][e])
  2215. # The maximum of the cost equation.
  2216. v = self.constraints["cost"]["max"]
  2217. arguments.append(UNLIMITED if v is None else v)
  2218. flags = 0
  2219. if reverse:
  2220. flags |= REVERSE_OP
  2221. test = self.constraints.get("test")
  2222. if test:
  2223. return ([(OP.FUZZY_EXT, flags) + tuple(arguments)] +
  2224. test.compile(reverse, True) + [(OP.NEXT,)] +
  2225. self.subpattern.compile(reverse, True) + [(OP.END,)])
  2226. return ([(OP.FUZZY, flags) + tuple(arguments)] +
  2227. self.subpattern.compile(reverse, True) + [(OP.END,)])
  2228. def dump(self, indent, reverse):
  2229. constraints = self._constraints_to_string()
  2230. if constraints:
  2231. constraints = " " + constraints
  2232. print("{}FUZZY{}".format(INDENT * indent, constraints))
  2233. self.subpattern.dump(indent + 1, reverse)
  2234. def is_empty(self):
  2235. return self.subpattern.is_empty()
  2236. def __eq__(self, other):
  2237. return (type(self) is type(other) and self.subpattern ==
  2238. other.subpattern and self.constraints == other.constraints)
  2239. def max_width(self):
  2240. return UNLIMITED
  2241. def _constraints_to_string(self):
  2242. constraints = []
  2243. for name in "ids":
  2244. min, max = self.constraints[name]
  2245. if max == 0:
  2246. continue
  2247. con = ""
  2248. if min > 0:
  2249. con = "{}<=".format(min)
  2250. con += name
  2251. if max is not None:
  2252. con += "<={}".format(max)
  2253. constraints.append(con)
  2254. cost = []
  2255. for name in "ids":
  2256. coeff = self.constraints["cost"][name]
  2257. if coeff > 0:
  2258. cost.append("{}{}".format(coeff, name))
  2259. limit = self.constraints["cost"]["max"]
  2260. if limit is not None and limit > 0:
  2261. cost = "{}<={}".format("+".join(cost), limit)
  2262. constraints.append(cost)
  2263. return ",".join(constraints)
  2264. class Grapheme(RegexBase):
  2265. def _compile(self, reverse, fuzzy):
  2266. # Match at least 1 character until a grapheme boundary is reached. Note
  2267. # that this is the same whether matching forwards or backwards.
  2268. grapheme_matcher = Atomic(Sequence([LazyRepeat(AnyAll(), 1, None),
  2269. GraphemeBoundary()]))
  2270. return grapheme_matcher.compile(reverse, fuzzy)
  2271. def dump(self, indent, reverse):
  2272. print("{}GRAPHEME".format(INDENT * indent))
  2273. def max_width(self):
  2274. return UNLIMITED
  2275. class GraphemeBoundary:
  2276. def compile(self, reverse, fuzzy):
  2277. return [(OP.GRAPHEME_BOUNDARY, 1)]
  2278. class GreedyRepeat(RegexBase):
  2279. _opcode = OP.GREEDY_REPEAT
  2280. _op_name = "GREEDY_REPEAT"
  2281. def __init__(self, subpattern, min_count, max_count):
  2282. RegexBase.__init__(self)
  2283. self.subpattern = subpattern
  2284. self.min_count = min_count
  2285. self.max_count = max_count
  2286. def fix_groups(self, pattern, reverse, fuzzy):
  2287. self.subpattern.fix_groups(pattern, reverse, fuzzy)
  2288. def optimise(self, info, reverse):
  2289. subpattern = self.subpattern.optimise(info, reverse)
  2290. return type(self)(subpattern, self.min_count, self.max_count)
  2291. def pack_characters(self, info):
  2292. self.subpattern = self.subpattern.pack_characters(info)
  2293. return self
  2294. def remove_captures(self):
  2295. self.subpattern = self.subpattern.remove_captures()
  2296. return self
  2297. def is_atomic(self):
  2298. return self.min_count == self.max_count and self.subpattern.is_atomic()
  2299. def can_be_affix(self):
  2300. return False
  2301. def contains_group(self):
  2302. return self.subpattern.contains_group()
  2303. def get_firstset(self, reverse):
  2304. fs = self.subpattern.get_firstset(reverse)
  2305. if self.min_count == 0:
  2306. fs.add(None)
  2307. return fs
  2308. def _compile(self, reverse, fuzzy):
  2309. repeat = [self._opcode, self.min_count]
  2310. if self.max_count is None:
  2311. repeat.append(UNLIMITED)
  2312. else:
  2313. repeat.append(self.max_count)
  2314. subpattern = self.subpattern.compile(reverse, fuzzy)
  2315. if not subpattern:
  2316. return []
  2317. return ([tuple(repeat)] + subpattern + [(OP.END, )])
  2318. def dump(self, indent, reverse):
  2319. if self.max_count is None:
  2320. limit = "INF"
  2321. else:
  2322. limit = self.max_count
  2323. print("{}{} {} {}".format(INDENT * indent, self._op_name,
  2324. self.min_count, limit))
  2325. self.subpattern.dump(indent + 1, reverse)
  2326. def is_empty(self):
  2327. return self.subpattern.is_empty()
  2328. def __eq__(self, other):
  2329. return type(self) is type(other) and (self.subpattern, self.min_count,
  2330. self.max_count) == (other.subpattern, other.min_count,
  2331. other.max_count)
  2332. def max_width(self):
  2333. if self.max_count is None:
  2334. return UNLIMITED
  2335. return self.subpattern.max_width() * self.max_count
  2336. def get_required_string(self, reverse):
  2337. max_count = UNLIMITED if self.max_count is None else self.max_count
  2338. if self.min_count == 0:
  2339. w = self.subpattern.max_width() * max_count
  2340. return min(w, UNLIMITED), None
  2341. ofs, req = self.subpattern.get_required_string(reverse)
  2342. if req:
  2343. return ofs, req
  2344. w = self.subpattern.max_width() * max_count
  2345. return min(w, UNLIMITED), None
  2346. class PossessiveRepeat(GreedyRepeat):
  2347. def is_atomic(self):
  2348. return True
  2349. def _compile(self, reverse, fuzzy):
  2350. subpattern = self.subpattern.compile(reverse, fuzzy)
  2351. if not subpattern:
  2352. return []
  2353. repeat = [self._opcode, self.min_count]
  2354. if self.max_count is None:
  2355. repeat.append(UNLIMITED)
  2356. else:
  2357. repeat.append(self.max_count)
  2358. return ([(OP.ATOMIC, ), tuple(repeat)] + subpattern + [(OP.END, ),
  2359. (OP.END, )])
  2360. def dump(self, indent, reverse):
  2361. print("{}ATOMIC".format(INDENT * indent))
  2362. if self.max_count is None:
  2363. limit = "INF"
  2364. else:
  2365. limit = self.max_count
  2366. print("{}{} {} {}".format(INDENT * (indent + 1), self._op_name,
  2367. self.min_count, limit))
  2368. self.subpattern.dump(indent + 2, reverse)
  2369. class Group(RegexBase):
  2370. def __init__(self, info, group, subpattern):
  2371. RegexBase.__init__(self)
  2372. self.info = info
  2373. self.group = group
  2374. self.subpattern = subpattern
  2375. self.call_ref = None
  2376. def fix_groups(self, pattern, reverse, fuzzy):
  2377. self.info.defined_groups[self.group] = (self, reverse, fuzzy)
  2378. self.subpattern.fix_groups(pattern, reverse, fuzzy)
  2379. def optimise(self, info, reverse):
  2380. subpattern = self.subpattern.optimise(info, reverse)
  2381. return Group(self.info, self.group, subpattern)
  2382. def pack_characters(self, info):
  2383. self.subpattern = self.subpattern.pack_characters(info)
  2384. return self
  2385. def remove_captures(self):
  2386. return self.subpattern.remove_captures()
  2387. def is_atomic(self):
  2388. return self.subpattern.is_atomic()
  2389. def can_be_affix(self):
  2390. return False
  2391. def contains_group(self):
  2392. return True
  2393. def get_firstset(self, reverse):
  2394. return self.subpattern.get_firstset(reverse)
  2395. def has_simple_start(self):
  2396. return self.subpattern.has_simple_start()
  2397. def _compile(self, reverse, fuzzy):
  2398. code = []
  2399. public_group = private_group = self.group
  2400. if private_group < 0:
  2401. public_group = self.info.private_groups[private_group]
  2402. private_group = self.info.group_count - private_group
  2403. key = self.group, reverse, fuzzy
  2404. ref = self.info.call_refs.get(key)
  2405. if ref is not None:
  2406. code += [(OP.CALL_REF, ref)]
  2407. code += [(OP.GROUP, int(not reverse), private_group, public_group)]
  2408. code += self.subpattern.compile(reverse, fuzzy)
  2409. code += [(OP.END, )]
  2410. if ref is not None:
  2411. code += [(OP.END, )]
  2412. return code
  2413. def dump(self, indent, reverse):
  2414. group = self.group
  2415. if group < 0:
  2416. group = private_groups[group]
  2417. print("{}GROUP {}".format(INDENT * indent, group))
  2418. self.subpattern.dump(indent + 1, reverse)
  2419. def __eq__(self, other):
  2420. return (type(self) is type(other) and (self.group, self.subpattern) ==
  2421. (other.group, other.subpattern))
  2422. def max_width(self):
  2423. return self.subpattern.max_width()
  2424. def get_required_string(self, reverse):
  2425. return self.subpattern.get_required_string(reverse)
  2426. def __del__(self):
  2427. self.info = None
  2428. class Keep(ZeroWidthBase):
  2429. _opcode = OP.KEEP
  2430. _op_name = "KEEP"
  2431. class LazyRepeat(GreedyRepeat):
  2432. _opcode = OP.LAZY_REPEAT
  2433. _op_name = "LAZY_REPEAT"
  2434. class LookAround(RegexBase):
  2435. _dir_text = {False: "AHEAD", True: "BEHIND"}
  2436. def __init__(self, behind, positive, subpattern):
  2437. RegexBase.__init__(self)
  2438. self.behind = bool(behind)
  2439. self.positive = bool(positive)
  2440. self.subpattern = subpattern
  2441. def fix_groups(self, pattern, reverse, fuzzy):
  2442. self.subpattern.fix_groups(pattern, self.behind, fuzzy)
  2443. def optimise(self, info, reverse):
  2444. subpattern = self.subpattern.optimise(info, self.behind)
  2445. if self.positive and subpattern.is_empty():
  2446. return subpattern
  2447. return LookAround(self.behind, self.positive, subpattern)
  2448. def pack_characters(self, info):
  2449. self.subpattern = self.subpattern.pack_characters(info)
  2450. return self
  2451. def remove_captures(self):
  2452. return self.subpattern.remove_captures()
  2453. def is_atomic(self):
  2454. return self.subpattern.is_atomic()
  2455. def can_be_affix(self):
  2456. return self.subpattern.can_be_affix()
  2457. def contains_group(self):
  2458. return self.subpattern.contains_group()
  2459. def get_firstset(self, reverse):
  2460. if self.positive and self.behind == reverse:
  2461. return self.subpattern.get_firstset(reverse)
  2462. return set([None])
  2463. def _compile(self, reverse, fuzzy):
  2464. flags = 0
  2465. if self.positive:
  2466. flags |= POSITIVE_OP
  2467. if fuzzy:
  2468. flags |= FUZZY_OP
  2469. if reverse:
  2470. flags |= REVERSE_OP
  2471. return ([(OP.LOOKAROUND, flags, int(not self.behind))] +
  2472. self.subpattern.compile(self.behind) + [(OP.END, )])
  2473. def dump(self, indent, reverse):
  2474. print("{}LOOK{} {}".format(INDENT * indent,
  2475. self._dir_text[self.behind], POS_TEXT[self.positive]))
  2476. self.subpattern.dump(indent + 1, self.behind)
  2477. def is_empty(self):
  2478. return self.positive and self.subpattern.is_empty()
  2479. def __eq__(self, other):
  2480. return type(self) is type(other) and (self.behind, self.positive,
  2481. self.subpattern) == (other.behind, other.positive, other.subpattern)
  2482. def max_width(self):
  2483. return 0
  2484. class LookAroundConditional(RegexBase):
  2485. _dir_text = {False: "AHEAD", True: "BEHIND"}
  2486. def __init__(self, behind, positive, subpattern, yes_item, no_item):
  2487. RegexBase.__init__(self)
  2488. self.behind = bool(behind)
  2489. self.positive = bool(positive)
  2490. self.subpattern = subpattern
  2491. self.yes_item = yes_item
  2492. self.no_item = no_item
  2493. def fix_groups(self, pattern, reverse, fuzzy):
  2494. self.subpattern.fix_groups(pattern, reverse, fuzzy)
  2495. self.yes_item.fix_groups(pattern, reverse, fuzzy)
  2496. self.no_item.fix_groups(pattern, reverse, fuzzy)
  2497. def optimise(self, info, reverse):
  2498. subpattern = self.subpattern.optimise(info, self.behind)
  2499. yes_item = self.yes_item.optimise(info, self.behind)
  2500. no_item = self.no_item.optimise(info, self.behind)
  2501. return LookAroundConditional(self.behind, self.positive, subpattern,
  2502. yes_item, no_item)
  2503. def pack_characters(self, info):
  2504. self.subpattern = self.subpattern.pack_characters(info)
  2505. self.yes_item = self.yes_item.pack_characters(info)
  2506. self.no_item = self.no_item.pack_characters(info)
  2507. return self
  2508. def remove_captures(self):
  2509. self.subpattern = self.subpattern.remove_captures()
  2510. self.yes_item = self.yes_item.remove_captures()
  2511. self.no_item = self.no_item.remove_captures()
  2512. def is_atomic(self):
  2513. return (self.subpattern.is_atomic() and self.yes_item.is_atomic() and
  2514. self.no_item.is_atomic())
  2515. def can_be_affix(self):
  2516. return (self.subpattern.can_be_affix() and self.yes_item.can_be_affix()
  2517. and self.no_item.can_be_affix())
  2518. def contains_group(self):
  2519. return (self.subpattern.contains_group() or
  2520. self.yes_item.contains_group() or self.no_item.contains_group())
  2521. def _compile(self, reverse, fuzzy):
  2522. code = [(OP.CONDITIONAL, int(self.positive), int(not self.behind))]
  2523. code.extend(self.subpattern.compile(self.behind, fuzzy))
  2524. code.append((OP.NEXT, ))
  2525. code.extend(self.yes_item.compile(reverse, fuzzy))
  2526. add_code = self.no_item.compile(reverse, fuzzy)
  2527. if add_code:
  2528. code.append((OP.NEXT, ))
  2529. code.extend(add_code)
  2530. code.append((OP.END, ))
  2531. return code
  2532. def dump(self, indent, reverse):
  2533. print("{}CONDITIONAL {} {}".format(INDENT * indent,
  2534. self._dir_text[self.behind], POS_TEXT[self.positive]))
  2535. self.subpattern.dump(indent + 1, self.behind)
  2536. print("{}EITHER".format(INDENT * indent))
  2537. self.yes_item.dump(indent + 1, reverse)
  2538. if not self.no_item.is_empty():
  2539. print("{}OR".format(INDENT * indent))
  2540. self.no_item.dump(indent + 1, reverse)
  2541. def is_empty(self):
  2542. return (self.subpattern.is_empty() and self.yes_item.is_empty() or
  2543. self.no_item.is_empty())
  2544. def __eq__(self, other):
  2545. return type(self) is type(other) and (self.subpattern, self.yes_item,
  2546. self.no_item) == (other.subpattern, other.yes_item, other.no_item)
  2547. def max_width(self):
  2548. return max(self.yes_item.max_width(), self.no_item.max_width())
  2549. def get_required_string(self, reverse):
  2550. return self.max_width(), None
  2551. class PrecompiledCode(RegexBase):
  2552. def __init__(self, code):
  2553. self.code = code
  2554. def _compile(self, reverse, fuzzy):
  2555. return [tuple(self.code)]
  2556. class Property(RegexBase):
  2557. _opcode = {(NOCASE, False): OP.PROPERTY, (IGNORECASE, False):
  2558. OP.PROPERTY_IGN, (FULLCASE, False): OP.PROPERTY, (FULLIGNORECASE, False):
  2559. OP.PROPERTY_IGN, (NOCASE, True): OP.PROPERTY_REV, (IGNORECASE, True):
  2560. OP.PROPERTY_IGN_REV, (FULLCASE, True): OP.PROPERTY_REV, (FULLIGNORECASE,
  2561. True): OP.PROPERTY_IGN_REV}
  2562. def __init__(self, value, positive=True, case_flags=NOCASE,
  2563. zerowidth=False):
  2564. RegexBase.__init__(self)
  2565. self.value = value
  2566. self.positive = bool(positive)
  2567. self.case_flags = CASE_FLAGS_COMBINATIONS[case_flags]
  2568. self.zerowidth = bool(zerowidth)
  2569. self._key = (self.__class__, self.value, self.positive,
  2570. self.case_flags, self.zerowidth)
  2571. def rebuild(self, positive, case_flags, zerowidth):
  2572. return Property(self.value, positive, case_flags, zerowidth)
  2573. def optimise(self, info, reverse, in_set=False):
  2574. return self
  2575. def get_firstset(self, reverse):
  2576. return set([self])
  2577. def has_simple_start(self):
  2578. return True
  2579. def _compile(self, reverse, fuzzy):
  2580. flags = 0
  2581. if self.positive:
  2582. flags |= POSITIVE_OP
  2583. if self.zerowidth:
  2584. flags |= ZEROWIDTH_OP
  2585. if fuzzy:
  2586. flags |= FUZZY_OP
  2587. return [(self._opcode[self.case_flags, reverse], flags, self.value)]
  2588. def dump(self, indent, reverse):
  2589. prop = PROPERTY_NAMES[self.value >> 16]
  2590. name, value = prop[0], prop[1][self.value & 0xFFFF]
  2591. print("{}PROPERTY {} {}:{}{}".format(INDENT * indent,
  2592. POS_TEXT[self.positive], name, value, CASE_TEXT[self.case_flags]))
  2593. def matches(self, ch):
  2594. return _regex.has_property_value(self.value, ch) == self.positive
  2595. def max_width(self):
  2596. return 1
  2597. class Prune(ZeroWidthBase):
  2598. _op_name = "PRUNE"
  2599. def _compile(self, reverse, fuzzy):
  2600. return [(OP.PRUNE, )]
  2601. class Range(RegexBase):
  2602. _opcode = {(NOCASE, False): OP.RANGE, (IGNORECASE, False): OP.RANGE_IGN,
  2603. (FULLCASE, False): OP.RANGE, (FULLIGNORECASE, False): OP.RANGE_IGN,
  2604. (NOCASE, True): OP.RANGE_REV, (IGNORECASE, True): OP.RANGE_IGN_REV,
  2605. (FULLCASE, True): OP.RANGE_REV, (FULLIGNORECASE, True): OP.RANGE_IGN_REV}
  2606. _op_name = "RANGE"
  2607. def __init__(self, lower, upper, positive=True, case_flags=NOCASE,
  2608. zerowidth=False):
  2609. RegexBase.__init__(self)
  2610. self.lower = lower
  2611. self.upper = upper
  2612. self.positive = bool(positive)
  2613. self.case_flags = CASE_FLAGS_COMBINATIONS[case_flags]
  2614. self.zerowidth = bool(zerowidth)
  2615. self._key = (self.__class__, self.lower, self.upper, self.positive,
  2616. self.case_flags, self.zerowidth)
  2617. def rebuild(self, positive, case_flags, zerowidth):
  2618. return Range(self.lower, self.upper, positive, case_flags, zerowidth)
  2619. def optimise(self, info, reverse, in_set=False):
  2620. # Is the range case-sensitive?
  2621. if not self.positive or not (self.case_flags & IGNORECASE) or in_set:
  2622. return self
  2623. # Is full case-folding possible?
  2624. if (not (info.flags & UNICODE) or (self.case_flags & FULLIGNORECASE) !=
  2625. FULLIGNORECASE):
  2626. return self
  2627. # Get the characters which expand to multiple codepoints on folding.
  2628. expanding_chars = _regex.get_expand_on_folding()
  2629. # Get the folded characters in the range.
  2630. items = []
  2631. for ch in expanding_chars:
  2632. if self.lower <= ord(ch) <= self.upper:
  2633. folded = _regex.fold_case(FULL_CASE_FOLDING, ch)
  2634. items.append(String([ord(c) for c in folded],
  2635. case_flags=self.case_flags))
  2636. if not items:
  2637. # We can fall back to simple case-folding.
  2638. return self
  2639. if len(items) < self.upper - self.lower + 1:
  2640. # Not all the characters are covered by the full case-folding.
  2641. items.insert(0, self)
  2642. return Branch(items)
  2643. def _compile(self, reverse, fuzzy):
  2644. flags = 0
  2645. if self.positive:
  2646. flags |= POSITIVE_OP
  2647. if self.zerowidth:
  2648. flags |= ZEROWIDTH_OP
  2649. if fuzzy:
  2650. flags |= FUZZY_OP
  2651. return [(self._opcode[self.case_flags, reverse], flags, self.lower,
  2652. self.upper)]
  2653. def dump(self, indent, reverse):
  2654. display_lower = ascii(chr(self.lower)).lstrip("bu")
  2655. display_upper = ascii(chr(self.upper)).lstrip("bu")
  2656. print("{}RANGE {} {} {}{}".format(INDENT * indent,
  2657. POS_TEXT[self.positive], display_lower, display_upper,
  2658. CASE_TEXT[self.case_flags]))
  2659. def matches(self, ch):
  2660. return (self.lower <= ch <= self.upper) == self.positive
  2661. def max_width(self):
  2662. return 1
  2663. class RefGroup(RegexBase):
  2664. _opcode = {(NOCASE, False): OP.REF_GROUP, (IGNORECASE, False):
  2665. OP.REF_GROUP_IGN, (FULLCASE, False): OP.REF_GROUP, (FULLIGNORECASE,
  2666. False): OP.REF_GROUP_FLD, (NOCASE, True): OP.REF_GROUP_REV, (IGNORECASE,
  2667. True): OP.REF_GROUP_IGN_REV, (FULLCASE, True): OP.REF_GROUP_REV,
  2668. (FULLIGNORECASE, True): OP.REF_GROUP_FLD_REV}
  2669. def __init__(self, info, group, position, case_flags=NOCASE):
  2670. RegexBase.__init__(self)
  2671. self.info = info
  2672. self.group = group
  2673. self.position = position
  2674. self.case_flags = CASE_FLAGS_COMBINATIONS[case_flags]
  2675. self._key = self.__class__, self.group, self.case_flags
  2676. def fix_groups(self, pattern, reverse, fuzzy):
  2677. try:
  2678. self.group = int(self.group)
  2679. except ValueError:
  2680. try:
  2681. self.group = self.info.group_index[self.group]
  2682. except KeyError:
  2683. raise error("unknown group", pattern, self.position)
  2684. if not 1 <= self.group <= self.info.group_count:
  2685. raise error("invalid group reference", pattern, self.position)
  2686. self._key = self.__class__, self.group, self.case_flags
  2687. def remove_captures(self):
  2688. raise error("group reference not allowed", pattern, self.position)
  2689. def _compile(self, reverse, fuzzy):
  2690. flags = 0
  2691. if fuzzy:
  2692. flags |= FUZZY_OP
  2693. return [(self._opcode[self.case_flags, reverse], flags, self.group)]
  2694. def dump(self, indent, reverse):
  2695. print("{}REF_GROUP {}{}".format(INDENT * indent, self.group,
  2696. CASE_TEXT[self.case_flags]))
  2697. def max_width(self):
  2698. return UNLIMITED
  2699. def __del__(self):
  2700. self.info = None
  2701. class SearchAnchor(ZeroWidthBase):
  2702. _opcode = OP.SEARCH_ANCHOR
  2703. _op_name = "SEARCH_ANCHOR"
  2704. class Sequence(RegexBase):
  2705. def __init__(self, items=None):
  2706. RegexBase.__init__(self)
  2707. if items is None:
  2708. items = []
  2709. self.items = items
  2710. def fix_groups(self, pattern, reverse, fuzzy):
  2711. for s in self.items:
  2712. s.fix_groups(pattern, reverse, fuzzy)
  2713. def optimise(self, info, reverse):
  2714. # Flatten the sequences.
  2715. items = []
  2716. for s in self.items:
  2717. s = s.optimise(info, reverse)
  2718. if isinstance(s, Sequence):
  2719. items.extend(s.items)
  2720. else:
  2721. items.append(s)
  2722. return make_sequence(items)
  2723. def pack_characters(self, info):
  2724. "Packs sequences of characters into strings."
  2725. items = []
  2726. characters = []
  2727. case_flags = NOCASE
  2728. for s in self.items:
  2729. if type(s) is Character and s.positive and not s.zerowidth:
  2730. if s.case_flags != case_flags:
  2731. # Different case sensitivity, so flush, unless neither the
  2732. # previous nor the new character are cased.
  2733. if s.case_flags or is_cased_i(info, s.value):
  2734. Sequence._flush_characters(info, characters,
  2735. case_flags, items)
  2736. case_flags = s.case_flags
  2737. characters.append(s.value)
  2738. elif type(s) is String or type(s) is Literal:
  2739. if s.case_flags != case_flags:
  2740. # Different case sensitivity, so flush, unless the neither
  2741. # the previous nor the new string are cased.
  2742. if s.case_flags or any(is_cased_i(info, c) for c in
  2743. characters):
  2744. Sequence._flush_characters(info, characters,
  2745. case_flags, items)
  2746. case_flags = s.case_flags
  2747. characters.extend(s.characters)
  2748. else:
  2749. Sequence._flush_characters(info, characters, case_flags, items)
  2750. items.append(s.pack_characters(info))
  2751. Sequence._flush_characters(info, characters, case_flags, items)
  2752. return make_sequence(items)
  2753. def remove_captures(self):
  2754. self.items = [s.remove_captures() for s in self.items]
  2755. return self
  2756. def is_atomic(self):
  2757. return all(s.is_atomic() for s in self.items)
  2758. def can_be_affix(self):
  2759. return False
  2760. def contains_group(self):
  2761. return any(s.contains_group() for s in self.items)
  2762. def get_firstset(self, reverse):
  2763. fs = set()
  2764. items = self.items
  2765. if reverse:
  2766. items.reverse()
  2767. for s in items:
  2768. fs |= s.get_firstset(reverse)
  2769. if None not in fs:
  2770. return fs
  2771. fs.discard(None)
  2772. return fs | set([None])
  2773. def has_simple_start(self):
  2774. return bool(self.items) and self.items[0].has_simple_start()
  2775. def _compile(self, reverse, fuzzy):
  2776. seq = self.items
  2777. if reverse:
  2778. seq = seq[::-1]
  2779. code = []
  2780. for s in seq:
  2781. code.extend(s.compile(reverse, fuzzy))
  2782. return code
  2783. def dump(self, indent, reverse):
  2784. for s in self.items:
  2785. s.dump(indent, reverse)
  2786. @staticmethod
  2787. def _flush_characters(info, characters, case_flags, items):
  2788. if not characters:
  2789. return
  2790. # Disregard case_flags if all of the characters are case-less.
  2791. if case_flags & IGNORECASE:
  2792. if not any(is_cased_i(info, c) for c in characters):
  2793. case_flags = NOCASE
  2794. if (case_flags & FULLIGNORECASE) == FULLIGNORECASE:
  2795. literals = Sequence._fix_full_casefold(characters)
  2796. for item in literals:
  2797. chars = item.characters
  2798. if len(chars) == 1:
  2799. items.append(Character(chars[0], case_flags=item.case_flags))
  2800. else:
  2801. items.append(String(chars, case_flags=item.case_flags))
  2802. else:
  2803. if len(characters) == 1:
  2804. items.append(Character(characters[0], case_flags=case_flags))
  2805. else:
  2806. items.append(String(characters, case_flags=case_flags))
  2807. characters[:] = []
  2808. @staticmethod
  2809. def _fix_full_casefold(characters):
  2810. # Split a literal needing full case-folding into chunks that need it
  2811. # and chunks that can use simple case-folding, which is faster.
  2812. expanded = [_regex.fold_case(FULL_CASE_FOLDING, c) for c in
  2813. _regex.get_expand_on_folding()]
  2814. string = _regex.fold_case(FULL_CASE_FOLDING, ''.join(chr(c)
  2815. for c in characters)).lower()
  2816. chunks = []
  2817. for e in expanded:
  2818. found = string.find(e)
  2819. while found >= 0:
  2820. chunks.append((found, found + len(e)))
  2821. found = string.find(e, found + 1)
  2822. pos = 0
  2823. literals = []
  2824. for start, end in Sequence._merge_chunks(chunks):
  2825. if pos < start:
  2826. literals.append(Literal(characters[pos : start],
  2827. case_flags=IGNORECASE))
  2828. literals.append(Literal(characters[start : end],
  2829. case_flags=FULLIGNORECASE))
  2830. pos = end
  2831. if pos < len(characters):
  2832. literals.append(Literal(characters[pos : ], case_flags=IGNORECASE))
  2833. return literals
  2834. @staticmethod
  2835. def _merge_chunks(chunks):
  2836. if len(chunks) < 2:
  2837. return chunks
  2838. chunks.sort()
  2839. start, end = chunks[0]
  2840. new_chunks = []
  2841. for s, e in chunks[1 : ]:
  2842. if s <= end:
  2843. end = max(end, e)
  2844. else:
  2845. new_chunks.append((start, end))
  2846. start, end = s, e
  2847. new_chunks.append((start, end))
  2848. return new_chunks
  2849. def is_empty(self):
  2850. return all(i.is_empty() for i in self.items)
  2851. def __eq__(self, other):
  2852. return type(self) is type(other) and self.items == other.items
  2853. def max_width(self):
  2854. return sum(s.max_width() for s in self.items)
  2855. def get_required_string(self, reverse):
  2856. seq = self.items
  2857. if reverse:
  2858. seq = seq[::-1]
  2859. offset = 0
  2860. for s in seq:
  2861. ofs, req = s.get_required_string(reverse)
  2862. offset += ofs
  2863. if req:
  2864. return offset, req
  2865. return offset, None
  2866. class SetBase(RegexBase):
  2867. def __init__(self, info, items, positive=True, case_flags=NOCASE,
  2868. zerowidth=False):
  2869. RegexBase.__init__(self)
  2870. self.info = info
  2871. self.items = tuple(items)
  2872. self.positive = bool(positive)
  2873. self.case_flags = CASE_FLAGS_COMBINATIONS[case_flags]
  2874. self.zerowidth = bool(zerowidth)
  2875. self.char_width = 1
  2876. self._key = (self.__class__, self.items, self.positive,
  2877. self.case_flags, self.zerowidth)
  2878. def rebuild(self, positive, case_flags, zerowidth):
  2879. return type(self)(self.info, self.items, positive, case_flags,
  2880. zerowidth).optimise(self.info, False)
  2881. def get_firstset(self, reverse):
  2882. return set([self])
  2883. def has_simple_start(self):
  2884. return True
  2885. def _compile(self, reverse, fuzzy):
  2886. flags = 0
  2887. if self.positive:
  2888. flags |= POSITIVE_OP
  2889. if self.zerowidth:
  2890. flags |= ZEROWIDTH_OP
  2891. if fuzzy:
  2892. flags |= FUZZY_OP
  2893. code = [(self._opcode[self.case_flags, reverse], flags)]
  2894. for m in self.items:
  2895. code.extend(m.compile())
  2896. code.append((OP.END, ))
  2897. return code
  2898. def dump(self, indent, reverse):
  2899. print("{}{} {}{}".format(INDENT * indent, self._op_name,
  2900. POS_TEXT[self.positive], CASE_TEXT[self.case_flags]))
  2901. for i in self.items:
  2902. i.dump(indent + 1, reverse)
  2903. def _handle_case_folding(self, info, in_set):
  2904. # Is the set case-sensitive?
  2905. if not self.positive or not (self.case_flags & IGNORECASE) or in_set:
  2906. return self
  2907. # Is full case-folding possible?
  2908. if (not (self.info.flags & UNICODE) or (self.case_flags &
  2909. FULLIGNORECASE) != FULLIGNORECASE):
  2910. return self
  2911. # Get the characters which expand to multiple codepoints on folding.
  2912. expanding_chars = _regex.get_expand_on_folding()
  2913. # Get the folded characters in the set.
  2914. items = []
  2915. seen = set()
  2916. for ch in expanding_chars:
  2917. if self.matches(ord(ch)):
  2918. folded = _regex.fold_case(FULL_CASE_FOLDING, ch)
  2919. if folded not in seen:
  2920. items.append(String([ord(c) for c in folded],
  2921. case_flags=self.case_flags))
  2922. seen.add(folded)
  2923. if not items:
  2924. # We can fall back to simple case-folding.
  2925. return self
  2926. return Branch([self] + items)
  2927. def max_width(self):
  2928. # Is the set case-sensitive?
  2929. if not self.positive or not (self.case_flags & IGNORECASE):
  2930. return 1
  2931. # Is full case-folding possible?
  2932. if (not (self.info.flags & UNICODE) or (self.case_flags &
  2933. FULLIGNORECASE) != FULLIGNORECASE):
  2934. return 1
  2935. # Get the characters which expand to multiple codepoints on folding.
  2936. expanding_chars = _regex.get_expand_on_folding()
  2937. # Get the folded characters in the set.
  2938. seen = set()
  2939. for ch in expanding_chars:
  2940. if self.matches(ord(ch)):
  2941. folded = _regex.fold_case(FULL_CASE_FOLDING, ch)
  2942. seen.add(folded)
  2943. if not seen:
  2944. return 1
  2945. return max(len(folded) for folded in seen)
  2946. def __del__(self):
  2947. self.info = None
  2948. class SetDiff(SetBase):
  2949. _opcode = {(NOCASE, False): OP.SET_DIFF, (IGNORECASE, False):
  2950. OP.SET_DIFF_IGN, (FULLCASE, False): OP.SET_DIFF, (FULLIGNORECASE, False):
  2951. OP.SET_DIFF_IGN, (NOCASE, True): OP.SET_DIFF_REV, (IGNORECASE, True):
  2952. OP.SET_DIFF_IGN_REV, (FULLCASE, True): OP.SET_DIFF_REV, (FULLIGNORECASE,
  2953. True): OP.SET_DIFF_IGN_REV}
  2954. _op_name = "SET_DIFF"
  2955. def optimise(self, info, reverse, in_set=False):
  2956. items = self.items
  2957. if len(items) > 2:
  2958. items = [items[0], SetUnion(info, items[1 : ])]
  2959. if len(items) == 1:
  2960. return items[0].with_flags(case_flags=self.case_flags,
  2961. zerowidth=self.zerowidth).optimise(info, reverse, in_set)
  2962. self.items = tuple(m.optimise(info, reverse, in_set=True) for m in
  2963. items)
  2964. return self._handle_case_folding(info, in_set)
  2965. def matches(self, ch):
  2966. m = self.items[0].matches(ch) and not self.items[1].matches(ch)
  2967. return m == self.positive
  2968. class SetInter(SetBase):
  2969. _opcode = {(NOCASE, False): OP.SET_INTER, (IGNORECASE, False):
  2970. OP.SET_INTER_IGN, (FULLCASE, False): OP.SET_INTER, (FULLIGNORECASE,
  2971. False): OP.SET_INTER_IGN, (NOCASE, True): OP.SET_INTER_REV, (IGNORECASE,
  2972. True): OP.SET_INTER_IGN_REV, (FULLCASE, True): OP.SET_INTER_REV,
  2973. (FULLIGNORECASE, True): OP.SET_INTER_IGN_REV}
  2974. _op_name = "SET_INTER"
  2975. def optimise(self, info, reverse, in_set=False):
  2976. items = []
  2977. for m in self.items:
  2978. m = m.optimise(info, reverse, in_set=True)
  2979. if isinstance(m, SetInter) and m.positive:
  2980. # Intersection in intersection.
  2981. items.extend(m.items)
  2982. else:
  2983. items.append(m)
  2984. if len(items) == 1:
  2985. return items[0].with_flags(case_flags=self.case_flags,
  2986. zerowidth=self.zerowidth).optimise(info, reverse, in_set)
  2987. self.items = tuple(items)
  2988. return self._handle_case_folding(info, in_set)
  2989. def matches(self, ch):
  2990. m = all(i.matches(ch) for i in self.items)
  2991. return m == self.positive
  2992. class SetSymDiff(SetBase):
  2993. _opcode = {(NOCASE, False): OP.SET_SYM_DIFF, (IGNORECASE, False):
  2994. OP.SET_SYM_DIFF_IGN, (FULLCASE, False): OP.SET_SYM_DIFF, (FULLIGNORECASE,
  2995. False): OP.SET_SYM_DIFF_IGN, (NOCASE, True): OP.SET_SYM_DIFF_REV,
  2996. (IGNORECASE, True): OP.SET_SYM_DIFF_IGN_REV, (FULLCASE, True):
  2997. OP.SET_SYM_DIFF_REV, (FULLIGNORECASE, True): OP.SET_SYM_DIFF_IGN_REV}
  2998. _op_name = "SET_SYM_DIFF"
  2999. def optimise(self, info, reverse, in_set=False):
  3000. items = []
  3001. for m in self.items:
  3002. m = m.optimise(info, reverse, in_set=True)
  3003. if isinstance(m, SetSymDiff) and m.positive:
  3004. # Symmetric difference in symmetric difference.
  3005. items.extend(m.items)
  3006. else:
  3007. items.append(m)
  3008. if len(items) == 1:
  3009. return items[0].with_flags(case_flags=self.case_flags,
  3010. zerowidth=self.zerowidth).optimise(info, reverse, in_set)
  3011. self.items = tuple(items)
  3012. return self._handle_case_folding(info, in_set)
  3013. def matches(self, ch):
  3014. m = False
  3015. for i in self.items:
  3016. m = m != i.matches(ch)
  3017. return m == self.positive
  3018. class SetUnion(SetBase):
  3019. _opcode = {(NOCASE, False): OP.SET_UNION, (IGNORECASE, False):
  3020. OP.SET_UNION_IGN, (FULLCASE, False): OP.SET_UNION, (FULLIGNORECASE,
  3021. False): OP.SET_UNION_IGN, (NOCASE, True): OP.SET_UNION_REV, (IGNORECASE,
  3022. True): OP.SET_UNION_IGN_REV, (FULLCASE, True): OP.SET_UNION_REV,
  3023. (FULLIGNORECASE, True): OP.SET_UNION_IGN_REV}
  3024. _op_name = "SET_UNION"
  3025. def optimise(self, info, reverse, in_set=False):
  3026. items = []
  3027. for m in self.items:
  3028. m = m.optimise(info, reverse, in_set=True)
  3029. if isinstance(m, SetUnion) and m.positive:
  3030. # Union in union.
  3031. items.extend(m.items)
  3032. else:
  3033. items.append(m)
  3034. if len(items) == 1:
  3035. i = items[0]
  3036. return i.with_flags(positive=i.positive == self.positive,
  3037. case_flags=self.case_flags,
  3038. zerowidth=self.zerowidth).optimise(info, reverse, in_set)
  3039. self.items = tuple(items)
  3040. return self._handle_case_folding(info, in_set)
  3041. def _compile(self, reverse, fuzzy):
  3042. flags = 0
  3043. if self.positive:
  3044. flags |= POSITIVE_OP
  3045. if self.zerowidth:
  3046. flags |= ZEROWIDTH_OP
  3047. if fuzzy:
  3048. flags |= FUZZY_OP
  3049. characters, others = defaultdict(list), []
  3050. for m in self.items:
  3051. if isinstance(m, Character):
  3052. characters[m.positive].append(m.value)
  3053. else:
  3054. others.append(m)
  3055. code = [(self._opcode[self.case_flags, reverse], flags)]
  3056. for positive, values in characters.items():
  3057. flags = 0
  3058. if positive:
  3059. flags |= POSITIVE_OP
  3060. if len(values) == 1:
  3061. code.append((OP.CHARACTER, flags, values[0]))
  3062. else:
  3063. code.append((OP.STRING, flags, len(values)) + tuple(values))
  3064. for m in others:
  3065. code.extend(m.compile())
  3066. code.append((OP.END, ))
  3067. return code
  3068. def matches(self, ch):
  3069. m = any(i.matches(ch) for i in self.items)
  3070. return m == self.positive
  3071. class Skip(ZeroWidthBase):
  3072. _op_name = "SKIP"
  3073. _opcode = OP.SKIP
  3074. class StartOfLine(ZeroWidthBase):
  3075. _opcode = OP.START_OF_LINE
  3076. _op_name = "START_OF_LINE"
  3077. class StartOfLineU(StartOfLine):
  3078. _opcode = OP.START_OF_LINE_U
  3079. _op_name = "START_OF_LINE_U"
  3080. class StartOfString(ZeroWidthBase):
  3081. _opcode = OP.START_OF_STRING
  3082. _op_name = "START_OF_STRING"
  3083. class StartOfWord(ZeroWidthBase):
  3084. _opcode = OP.START_OF_WORD
  3085. _op_name = "START_OF_WORD"
  3086. class String(RegexBase):
  3087. _opcode = {(NOCASE, False): OP.STRING, (IGNORECASE, False): OP.STRING_IGN,
  3088. (FULLCASE, False): OP.STRING, (FULLIGNORECASE, False): OP.STRING_FLD,
  3089. (NOCASE, True): OP.STRING_REV, (IGNORECASE, True): OP.STRING_IGN_REV,
  3090. (FULLCASE, True): OP.STRING_REV, (FULLIGNORECASE, True):
  3091. OP.STRING_FLD_REV}
  3092. def __init__(self, characters, case_flags=NOCASE):
  3093. self.characters = tuple(characters)
  3094. self.case_flags = CASE_FLAGS_COMBINATIONS[case_flags]
  3095. if (self.case_flags & FULLIGNORECASE) == FULLIGNORECASE:
  3096. folded_characters = []
  3097. for char in self.characters:
  3098. folded = _regex.fold_case(FULL_CASE_FOLDING, chr(char))
  3099. folded_characters.extend(ord(c) for c in folded)
  3100. else:
  3101. folded_characters = self.characters
  3102. self.folded_characters = tuple(folded_characters)
  3103. self.required = False
  3104. self._key = self.__class__, self.characters, self.case_flags
  3105. def get_firstset(self, reverse):
  3106. if reverse:
  3107. pos = -1
  3108. else:
  3109. pos = 0
  3110. return set([Character(self.characters[pos],
  3111. case_flags=self.case_flags)])
  3112. def has_simple_start(self):
  3113. return True
  3114. def _compile(self, reverse, fuzzy):
  3115. flags = 0
  3116. if fuzzy:
  3117. flags |= FUZZY_OP
  3118. if self.required:
  3119. flags |= REQUIRED_OP
  3120. return [(self._opcode[self.case_flags, reverse], flags,
  3121. len(self.folded_characters)) + self.folded_characters]
  3122. def dump(self, indent, reverse):
  3123. display = ascii("".join(chr(c) for c in self.characters)).lstrip("bu")
  3124. print("{}STRING {}{}".format(INDENT * indent, display,
  3125. CASE_TEXT[self.case_flags]))
  3126. def max_width(self):
  3127. return len(self.folded_characters)
  3128. def get_required_string(self, reverse):
  3129. return 0, self
  3130. class Literal(String):
  3131. def dump(self, indent, reverse):
  3132. literal = ''.join(chr(c) for c in self.characters)
  3133. display = ascii(literal).lstrip("bu")
  3134. print("{}LITERAL MATCH {}{}".format(INDENT * indent, display,
  3135. CASE_TEXT[self.case_flags]))
  3136. class StringSet(Branch):
  3137. def __init__(self, info, name, case_flags=NOCASE):
  3138. self.info = info
  3139. self.name = name
  3140. self.case_flags = CASE_FLAGS_COMBINATIONS[case_flags]
  3141. self._key = self.__class__, self.name, self.case_flags
  3142. self.set_key = (name, self.case_flags)
  3143. if self.set_key not in info.named_lists_used:
  3144. info.named_lists_used[self.set_key] = len(info.named_lists_used)
  3145. index = self.info.named_lists_used[self.set_key]
  3146. items = self.info.kwargs[self.name]
  3147. case_flags = self.case_flags
  3148. encoding = self.info.flags & _ALL_ENCODINGS
  3149. fold_flags = encoding | case_flags
  3150. choices = []
  3151. for string in items:
  3152. if isinstance(string, str):
  3153. string = [ord(c) for c in string]
  3154. choices.append([Character(c, case_flags=case_flags) for c in
  3155. string])
  3156. # Sort from longest to shortest.
  3157. choices.sort(key=len, reverse=True)
  3158. self.branches = [Sequence(choice) for choice in choices]
  3159. def dump(self, indent, reverse):
  3160. print("{}STRING_SET {}{}".format(INDENT * indent, self.name,
  3161. CASE_TEXT[self.case_flags]))
  3162. def __del__(self):
  3163. self.info = None
  3164. class Source:
  3165. "Scanner for the regular expression source string."
  3166. def __init__(self, string):
  3167. if isinstance(string, str):
  3168. self.string = string
  3169. self.char_type = chr
  3170. else:
  3171. self.string = string.decode("latin-1")
  3172. self.char_type = lambda c: bytes([c])
  3173. self.pos = 0
  3174. self.ignore_space = False
  3175. self.sep = string[ : 0]
  3176. def get(self, override_ignore=False):
  3177. string = self.string
  3178. pos = self.pos
  3179. try:
  3180. if self.ignore_space and not override_ignore:
  3181. while True:
  3182. if string[pos].isspace():
  3183. # Skip over the whitespace.
  3184. pos += 1
  3185. elif string[pos] == "#":
  3186. # Skip over the comment to the end of the line.
  3187. pos = string.index("\n", pos)
  3188. else:
  3189. break
  3190. ch = string[pos]
  3191. self.pos = pos + 1
  3192. return ch
  3193. except IndexError:
  3194. # We've reached the end of the string.
  3195. self.pos = pos
  3196. return string[ : 0]
  3197. except ValueError:
  3198. # The comment extended to the end of the string.
  3199. self.pos = len(string)
  3200. return string[ : 0]
  3201. def get_many(self, count=1):
  3202. string = self.string
  3203. pos = self.pos
  3204. try:
  3205. if self.ignore_space:
  3206. substring = []
  3207. while len(substring) < count:
  3208. while True:
  3209. if string[pos].isspace():
  3210. # Skip over the whitespace.
  3211. pos += 1
  3212. elif string[pos] == "#":
  3213. # Skip over the comment to the end of the line.
  3214. pos = string.index("\n", pos)
  3215. else:
  3216. break
  3217. substring.append(string[pos])
  3218. pos += 1
  3219. substring = "".join(substring)
  3220. else:
  3221. substring = string[pos : pos + count]
  3222. pos += len(substring)
  3223. self.pos = pos
  3224. return substring
  3225. except IndexError:
  3226. # We've reached the end of the string.
  3227. self.pos = len(string)
  3228. return "".join(substring)
  3229. except ValueError:
  3230. # The comment extended to the end of the string.
  3231. self.pos = len(string)
  3232. return "".join(substring)
  3233. def get_while(self, test_set, include=True, keep_spaces=False):
  3234. string = self.string
  3235. pos = self.pos
  3236. if self.ignore_space and not keep_spaces:
  3237. try:
  3238. substring = []
  3239. while True:
  3240. if string[pos].isspace():
  3241. # Skip over the whitespace.
  3242. pos += 1
  3243. elif string[pos] == "#":
  3244. # Skip over the comment to the end of the line.
  3245. pos = string.index("\n", pos)
  3246. elif (string[pos] in test_set) == include:
  3247. substring.append(string[pos])
  3248. pos += 1
  3249. else:
  3250. break
  3251. self.pos = pos
  3252. except IndexError:
  3253. # We've reached the end of the string.
  3254. self.pos = len(string)
  3255. except ValueError:
  3256. # The comment extended to the end of the string.
  3257. self.pos = len(string)
  3258. return "".join(substring)
  3259. else:
  3260. try:
  3261. while (string[pos] in test_set) == include:
  3262. pos += 1
  3263. substring = string[self.pos : pos]
  3264. self.pos = pos
  3265. return substring
  3266. except IndexError:
  3267. # We've reached the end of the string.
  3268. substring = string[self.pos : pos]
  3269. self.pos = pos
  3270. return substring
  3271. def skip_while(self, test_set, include=True):
  3272. string = self.string
  3273. pos = self.pos
  3274. try:
  3275. if self.ignore_space:
  3276. while True:
  3277. if string[pos].isspace():
  3278. # Skip over the whitespace.
  3279. pos += 1
  3280. elif string[pos] == "#":
  3281. # Skip over the comment to the end of the line.
  3282. pos = string.index("\n", pos)
  3283. elif (string[pos] in test_set) == include:
  3284. pos += 1
  3285. else:
  3286. break
  3287. else:
  3288. while (string[pos] in test_set) == include:
  3289. pos += 1
  3290. self.pos = pos
  3291. except IndexError:
  3292. # We've reached the end of the string.
  3293. self.pos = len(string)
  3294. except ValueError:
  3295. # The comment extended to the end of the string.
  3296. self.pos = len(string)
  3297. def match(self, substring):
  3298. string = self.string
  3299. pos = self.pos
  3300. if self.ignore_space:
  3301. try:
  3302. for c in substring:
  3303. while True:
  3304. if string[pos].isspace():
  3305. # Skip over the whitespace.
  3306. pos += 1
  3307. elif string[pos] == "#":
  3308. # Skip over the comment to the end of the line.
  3309. pos = string.index("\n", pos)
  3310. else:
  3311. break
  3312. if string[pos] != c:
  3313. return False
  3314. pos += 1
  3315. self.pos = pos
  3316. return True
  3317. except IndexError:
  3318. # We've reached the end of the string.
  3319. return False
  3320. except ValueError:
  3321. # The comment extended to the end of the string.
  3322. return False
  3323. else:
  3324. if not string.startswith(substring, pos):
  3325. return False
  3326. self.pos = pos + len(substring)
  3327. return True
  3328. def expect(self, substring):
  3329. if not self.match(substring):
  3330. raise error("missing {}".format(substring), self.string, self.pos)
  3331. def at_end(self):
  3332. string = self.string
  3333. pos = self.pos
  3334. try:
  3335. if self.ignore_space:
  3336. while True:
  3337. if string[pos].isspace():
  3338. pos += 1
  3339. elif string[pos] == "#":
  3340. pos = string.index("\n", pos)
  3341. else:
  3342. break
  3343. return pos >= len(string)
  3344. except IndexError:
  3345. # We've reached the end of the string.
  3346. return True
  3347. except ValueError:
  3348. # The comment extended to the end of the string.
  3349. return True
  3350. class Info:
  3351. "Info about the regular expression."
  3352. def __init__(self, flags=0, char_type=None, kwargs={}):
  3353. flags |= DEFAULT_FLAGS[(flags & _ALL_VERSIONS) or DEFAULT_VERSION]
  3354. self.flags = flags
  3355. self.global_flags = flags
  3356. self.inline_locale = False
  3357. self.kwargs = kwargs
  3358. self.group_count = 0
  3359. self.group_index = {}
  3360. self.group_name = {}
  3361. self.char_type = char_type
  3362. self.named_lists_used = {}
  3363. self.open_groups = []
  3364. self.open_group_count = {}
  3365. self.defined_groups = {}
  3366. self.group_calls = []
  3367. self.private_groups = {}
  3368. def open_group(self, name=None):
  3369. group = self.group_index.get(name)
  3370. if group is None:
  3371. while True:
  3372. self.group_count += 1
  3373. if name is None or self.group_count not in self.group_name:
  3374. break
  3375. group = self.group_count
  3376. if name:
  3377. self.group_index[name] = group
  3378. self.group_name[group] = name
  3379. if group in self.open_groups:
  3380. # We have a nested named group. We'll assign it a private group
  3381. # number, initially negative until we can assign a proper
  3382. # (positive) number.
  3383. group_alias = -(len(self.private_groups) + 1)
  3384. self.private_groups[group_alias] = group
  3385. group = group_alias
  3386. self.open_groups.append(group)
  3387. self.open_group_count[group] = self.open_group_count.get(group, 0) + 1
  3388. return group
  3389. def close_group(self):
  3390. self.open_groups.pop()
  3391. def is_open_group(self, name):
  3392. # In version 1, a group reference can refer to an open group. We'll
  3393. # just pretend the group isn't open.
  3394. version = (self.flags & _ALL_VERSIONS) or DEFAULT_VERSION
  3395. if version == VERSION1:
  3396. return False
  3397. if name.isdigit():
  3398. group = int(name)
  3399. else:
  3400. group = self.group_index.get(name)
  3401. return group in self.open_groups
  3402. def _check_group_features(info, parsed):
  3403. """Checks whether the reverse and fuzzy features of the group calls match
  3404. the groups which they call.
  3405. """
  3406. call_refs = {}
  3407. additional_groups = []
  3408. for call, reverse, fuzzy in info.group_calls:
  3409. # Look up the reference of this group call.
  3410. key = (call.group, reverse, fuzzy)
  3411. ref = call_refs.get(key)
  3412. if ref is None:
  3413. # This group doesn't have a reference yet, so look up its features.
  3414. if call.group == 0:
  3415. # Calling the pattern as a whole.
  3416. rev = bool(info.flags & REVERSE)
  3417. fuz = isinstance(parsed, Fuzzy)
  3418. if (rev, fuz) != (reverse, fuzzy):
  3419. # The pattern as a whole doesn't have the features we want,
  3420. # so we'll need to make a copy of it with the desired
  3421. # features.
  3422. additional_groups.append((CallRef(len(call_refs), parsed),
  3423. reverse, fuzzy))
  3424. else:
  3425. # Calling a capture group.
  3426. def_info = info.defined_groups[call.group]
  3427. group = def_info[0]
  3428. if def_info[1 : ] != (reverse, fuzzy):
  3429. # The group doesn't have the features we want, so we'll
  3430. # need to make a copy of it with the desired features.
  3431. additional_groups.append((group, reverse, fuzzy))
  3432. ref = len(call_refs)
  3433. call_refs[key] = ref
  3434. call.call_ref = ref
  3435. info.call_refs = call_refs
  3436. info.additional_groups = additional_groups
  3437. def _get_required_string(parsed, flags):
  3438. "Gets the required string and related info of a parsed pattern."
  3439. req_offset, required = parsed.get_required_string(bool(flags & REVERSE))
  3440. if required:
  3441. required.required = True
  3442. if req_offset >= UNLIMITED:
  3443. req_offset = -1
  3444. req_flags = required.case_flags
  3445. if not (flags & UNICODE):
  3446. req_flags &= ~UNICODE
  3447. req_chars = required.folded_characters
  3448. else:
  3449. req_offset = 0
  3450. req_chars = ()
  3451. req_flags = 0
  3452. return req_offset, req_chars, req_flags
  3453. class Scanner:
  3454. def __init__(self, lexicon, flags=0):
  3455. self.lexicon = lexicon
  3456. # Combine phrases into a compound pattern.
  3457. patterns = []
  3458. for phrase, action in lexicon:
  3459. # Parse the regular expression.
  3460. source = Source(phrase)
  3461. info = Info(flags, source.char_type)
  3462. source.ignore_space = bool(info.flags & VERBOSE)
  3463. parsed = _parse_pattern(source, info)
  3464. if not source.at_end():
  3465. raise error("unbalanced parenthesis", source.string,
  3466. source.pos)
  3467. # We want to forbid capture groups within each phrase.
  3468. patterns.append(parsed.remove_captures())
  3469. # Combine all the subpatterns into one pattern.
  3470. info = Info(flags)
  3471. patterns = [Group(info, g + 1, p) for g, p in enumerate(patterns)]
  3472. parsed = Branch(patterns)
  3473. # Optimise the compound pattern.
  3474. reverse = bool(info.flags & REVERSE)
  3475. parsed = parsed.optimise(info, reverse)
  3476. parsed = parsed.pack_characters(info)
  3477. # Get the required string.
  3478. req_offset, req_chars, req_flags = _get_required_string(parsed,
  3479. info.flags)
  3480. # Check the features of the groups.
  3481. _check_group_features(info, parsed)
  3482. # Complain if there are any group calls. They are not supported by the
  3483. # Scanner class.
  3484. if info.call_refs:
  3485. raise error("recursive regex not supported by Scanner",
  3486. source.string, source.pos)
  3487. reverse = bool(info.flags & REVERSE)
  3488. # Compile the compound pattern. The result is a list of tuples.
  3489. code = parsed.compile(reverse) + [(OP.SUCCESS, )]
  3490. # Flatten the code into a list of ints.
  3491. code = _flatten_code(code)
  3492. if not parsed.has_simple_start():
  3493. # Get the first set, if possible.
  3494. try:
  3495. fs_code = _compile_firstset(info, parsed.get_firstset(reverse))
  3496. fs_code = _flatten_code(fs_code)
  3497. code = fs_code + code
  3498. except _FirstSetError:
  3499. pass
  3500. # Check the global flags for conflicts.
  3501. version = (info.flags & _ALL_VERSIONS) or DEFAULT_VERSION
  3502. if version not in (0, VERSION0, VERSION1):
  3503. raise ValueError("VERSION0 and VERSION1 flags are mutually incompatible")
  3504. # Create the PatternObject.
  3505. #
  3506. # Local flags like IGNORECASE affect the code generation, but aren't
  3507. # needed by the PatternObject itself. Conversely, global flags like
  3508. # LOCALE _don't_ affect the code generation but _are_ needed by the
  3509. # PatternObject.
  3510. self.scanner = _regex.compile(None, (flags & GLOBAL_FLAGS) | version,
  3511. code, {}, {}, {}, [], req_offset, req_chars, req_flags,
  3512. len(patterns))
  3513. def scan(self, string):
  3514. result = []
  3515. append = result.append
  3516. match = self.scanner.scanner(string).match
  3517. i = 0
  3518. while True:
  3519. m = match()
  3520. if not m:
  3521. break
  3522. j = m.end()
  3523. if i == j:
  3524. break
  3525. action = self.lexicon[m.lastindex - 1][1]
  3526. if hasattr(action, '__call__'):
  3527. self.match = m
  3528. action = action(self, m.group())
  3529. if action is not None:
  3530. append(action)
  3531. i = j
  3532. return result, string[i : ]
  3533. # Get the known properties dict.
  3534. PROPERTIES = _regex.get_properties()
  3535. # Build the inverse of the properties dict.
  3536. PROPERTY_NAMES = {}
  3537. for prop_name, (prop_id, values) in PROPERTIES.items():
  3538. name, prop_values = PROPERTY_NAMES.get(prop_id, ("", {}))
  3539. name = max(name, prop_name, key=len)
  3540. PROPERTY_NAMES[prop_id] = name, prop_values
  3541. for val_name, val_id in values.items():
  3542. prop_values[val_id] = max(prop_values.get(val_id, ""), val_name,
  3543. key=len)
  3544. # Character escape sequences.
  3545. CHARACTER_ESCAPES = {
  3546. "a": "\a",
  3547. "b": "\b",
  3548. "f": "\f",
  3549. "n": "\n",
  3550. "r": "\r",
  3551. "t": "\t",
  3552. "v": "\v",
  3553. }
  3554. # Predefined character set escape sequences.
  3555. CHARSET_ESCAPES = {
  3556. "d": lookup_property(None, "Digit", True),
  3557. "D": lookup_property(None, "Digit", False),
  3558. "h": lookup_property(None, "Blank", True),
  3559. "s": lookup_property(None, "Space", True),
  3560. "S": lookup_property(None, "Space", False),
  3561. "w": lookup_property(None, "Word", True),
  3562. "W": lookup_property(None, "Word", False),
  3563. }
  3564. # Positional escape sequences.
  3565. POSITION_ESCAPES = {
  3566. "A": StartOfString(),
  3567. "b": Boundary(),
  3568. "B": Boundary(False),
  3569. "K": Keep(),
  3570. "m": StartOfWord(),
  3571. "M": EndOfWord(),
  3572. "Z": EndOfString(),
  3573. }
  3574. # Positional escape sequences when WORD flag set.
  3575. WORD_POSITION_ESCAPES = dict(POSITION_ESCAPES)
  3576. WORD_POSITION_ESCAPES.update({
  3577. "b": DefaultBoundary(),
  3578. "B": DefaultBoundary(False),
  3579. "m": DefaultStartOfWord(),
  3580. "M": DefaultEndOfWord(),
  3581. })
  3582. # Regex control verbs.
  3583. VERBS = {
  3584. "FAIL": Failure(),
  3585. "F": Failure(),
  3586. "PRUNE": Prune(),
  3587. "SKIP": Skip(),
  3588. }