Add files using upload-large-folder tool
Browse files- .gitattributes +14 -0
- 17.Clips4Sale.BareBackStudios.Far Cry Daughter.mp4 +3 -0
- 17.Mofos.College Rivals Bond Over Cock.mp4 +3 -0
- 17.Swallowed.Naughty Nymphos With Holly Hendrix And Kenzie Reeves.mp4 +3 -0
- 17.TeenFidelity.Not For Sale.ep281.mp4 +3 -0
- 18.ALSScan.No Clean Up.mp4 +3 -0
- 18.GirlsWay.Already In Trouble.mp4 +3 -0
- 19.BrattySis.Nubiles-Porn.Sharing My Step Sisters Friend.mp4 +3 -0
- screens/17.Clips4Sale.Superheroine Destruction.Harley Quinn - Mind Controlled & Violated.mp4.jpg +3 -0
- screens/17.DigitalPlayground.Slippery Salesgirl.mp4.jpg +3 -0
- screens/18.ALSScan.No Clean Up.mp4.jpg +3 -0
- screens/18.DigitalPlayground.Locker Room Lust.mp4.jpg +3 -0
- screens/18.Mofos.Backyard Camping for Hottie on House Arrest.mp4.jpg +3 -0
- screens/18.TushyRaw.Out Of Town.mp4.jpg +3 -0
- screens/19.ArchangelVideo.My Pussy Is Dripping.mp4.jpg +3 -0
- venv/lib/python3.12/site-packages/yaml/cyaml.py +101 -0
- venv/lib/python3.12/site-packages/yaml/error.py +75 -0
- venv/lib/python3.12/site-packages/yaml/loader.py +63 -0
- venv/lib/python3.12/site-packages/yaml/parser.py +589 -0
- venv/lib/python3.12/site-packages/yaml/resolver.py +227 -0
- venv/lib/python3.12/site-packages/yaml/serializer.py +111 -0
.gitattributes
CHANGED
@@ -321,3 +321,17 @@ screens/18.GirlsWay.Peer[[:space:]]Pressure[[:space:]]-[[:space:]]The[[:space:]]
|
|
321 |
17.Clips4Sale.Family[[:space:]]Therapy.Sister[[:space:]]Auditions[[:space:]]for[[:space:]]Brother.mp4 filter=lfs diff=lfs merge=lfs -text
|
322 |
18.Tushy.My[[:space:]]Third[[:space:]]Anal[[:space:]]Confession.100736_JULLIAN[[:space:]]JANSON.mp4 filter=lfs diff=lfs merge=lfs -text
|
323 |
19.EvilAngel.Squirting[[:space:]]Kenzie[[:space:]]-[[:space:]]Anal[[:space:]]Gaping[[:space:]]&[[:space:]]Facial.mp4 filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
321 |
17.Clips4Sale.Family[[:space:]]Therapy.Sister[[:space:]]Auditions[[:space:]]for[[:space:]]Brother.mp4 filter=lfs diff=lfs merge=lfs -text
|
322 |
18.Tushy.My[[:space:]]Third[[:space:]]Anal[[:space:]]Confession.100736_JULLIAN[[:space:]]JANSON.mp4 filter=lfs diff=lfs merge=lfs -text
|
323 |
19.EvilAngel.Squirting[[:space:]]Kenzie[[:space:]]-[[:space:]]Anal[[:space:]]Gaping[[:space:]]&[[:space:]]Facial.mp4 filter=lfs diff=lfs merge=lfs -text
|
324 |
+
screens/18.TushyRaw.Out[[:space:]]Of[[:space:]]Town.mp4.jpg filter=lfs diff=lfs merge=lfs -text
|
325 |
+
screens/17.Clips4Sale.Superheroine[[:space:]]Destruction.Harley[[:space:]]Quinn[[:space:]]-[[:space:]]Mind[[:space:]]Controlled[[:space:]]&[[:space:]]Violated.mp4.jpg filter=lfs diff=lfs merge=lfs -text
|
326 |
+
screens/18.ALSScan.No[[:space:]]Clean[[:space:]]Up.mp4.jpg filter=lfs diff=lfs merge=lfs -text
|
327 |
+
screens/17.DigitalPlayground.Slippery[[:space:]]Salesgirl.mp4.jpg filter=lfs diff=lfs merge=lfs -text
|
328 |
+
screens/18.Mofos.Backyard[[:space:]]Camping[[:space:]]for[[:space:]]Hottie[[:space:]]on[[:space:]]House[[:space:]]Arrest.mp4.jpg filter=lfs diff=lfs merge=lfs -text
|
329 |
+
screens/18.DigitalPlayground.Locker[[:space:]]Room[[:space:]]Lust.mp4.jpg filter=lfs diff=lfs merge=lfs -text
|
330 |
+
screens/19.ArchangelVideo.My[[:space:]]Pussy[[:space:]]Is[[:space:]]Dripping.mp4.jpg filter=lfs diff=lfs merge=lfs -text
|
331 |
+
17.Mofos.College[[:space:]]Rivals[[:space:]]Bond[[:space:]]Over[[:space:]]Cock.mp4 filter=lfs diff=lfs merge=lfs -text
|
332 |
+
17.Swallowed.Naughty[[:space:]]Nymphos[[:space:]]With[[:space:]]Holly[[:space:]]Hendrix[[:space:]]And[[:space:]]Kenzie[[:space:]]Reeves.mp4 filter=lfs diff=lfs merge=lfs -text
|
333 |
+
17.Clips4Sale.BareBackStudios.Far[[:space:]]Cry[[:space:]]Daughter.mp4 filter=lfs diff=lfs merge=lfs -text
|
334 |
+
18.GirlsWay.Already[[:space:]]In[[:space:]]Trouble.mp4 filter=lfs diff=lfs merge=lfs -text
|
335 |
+
17.TeenFidelity.Not[[:space:]]For[[:space:]]Sale.ep281.mp4 filter=lfs diff=lfs merge=lfs -text
|
336 |
+
19.BrattySis.Nubiles-Porn.Sharing[[:space:]]My[[:space:]]Step[[:space:]]Sisters[[:space:]]Friend.mp4 filter=lfs diff=lfs merge=lfs -text
|
337 |
+
18.ALSScan.No[[:space:]]Clean[[:space:]]Up.mp4 filter=lfs diff=lfs merge=lfs -text
|
17.Clips4Sale.BareBackStudios.Far Cry Daughter.mp4
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c4d2abf143ea5725ca34e5f84e2a29a28c0bf4dd54458e63fc4fde76a1302f60
|
3 |
+
size 2961386131
|
17.Mofos.College Rivals Bond Over Cock.mp4
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:aa2baccb57a3c3d8842ecd703089f1daddf581c932054af21264d6e4bd0947a0
|
3 |
+
size 2985051266
|
17.Swallowed.Naughty Nymphos With Holly Hendrix And Kenzie Reeves.mp4
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ce7c10a7b46a06ee1cc6750a4732c9e29ae31e4840fa5a2c63e6801d0f646f3f
|
3 |
+
size 2827951439
|
17.TeenFidelity.Not For Sale.ep281.mp4
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5ee325fce2fb9f48075eea637c4f14ebb3a046d47ebd2bfacfeae0032b1e90b4
|
3 |
+
size 4485672337
|
18.ALSScan.No Clean Up.mp4
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:28f4cd0267dcb2c08f428e92672accf0b7101da5d93707b5b11e996e0e586bb3
|
3 |
+
size 2785669246
|
18.GirlsWay.Already In Trouble.mp4
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:276a7ed77462602e3a7c0080041bc8392e04bd377ec2bdaf8ebe1f13b14a0c7e
|
3 |
+
size 2721775160
|
19.BrattySis.Nubiles-Porn.Sharing My Step Sisters Friend.mp4
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:2d46e2f6c3d57b27cf1e8a4b13b6b350bdd00758b30e2860bf90bf51904b4303
|
3 |
+
size 4975682273
|
screens/17.Clips4Sale.Superheroine Destruction.Harley Quinn - Mind Controlled & Violated.mp4.jpg
ADDED
![]() |
Git LFS Details
|
screens/17.DigitalPlayground.Slippery Salesgirl.mp4.jpg
ADDED
![]() |
Git LFS Details
|
screens/18.ALSScan.No Clean Up.mp4.jpg
ADDED
![]() |
Git LFS Details
|
screens/18.DigitalPlayground.Locker Room Lust.mp4.jpg
ADDED
![]() |
Git LFS Details
|
screens/18.Mofos.Backyard Camping for Hottie on House Arrest.mp4.jpg
ADDED
![]() |
Git LFS Details
|
screens/18.TushyRaw.Out Of Town.mp4.jpg
ADDED
![]() |
Git LFS Details
|
screens/19.ArchangelVideo.My Pussy Is Dripping.mp4.jpg
ADDED
![]() |
Git LFS Details
|
venv/lib/python3.12/site-packages/yaml/cyaml.py
ADDED
@@ -0,0 +1,101 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
__all__ = [
|
3 |
+
'CBaseLoader', 'CSafeLoader', 'CFullLoader', 'CUnsafeLoader', 'CLoader',
|
4 |
+
'CBaseDumper', 'CSafeDumper', 'CDumper'
|
5 |
+
]
|
6 |
+
|
7 |
+
from yaml._yaml import CParser, CEmitter
|
8 |
+
|
9 |
+
from .constructor import *
|
10 |
+
|
11 |
+
from .serializer import *
|
12 |
+
from .representer import *
|
13 |
+
|
14 |
+
from .resolver import *
|
15 |
+
|
16 |
+
class CBaseLoader(CParser, BaseConstructor, BaseResolver):
|
17 |
+
|
18 |
+
def __init__(self, stream):
|
19 |
+
CParser.__init__(self, stream)
|
20 |
+
BaseConstructor.__init__(self)
|
21 |
+
BaseResolver.__init__(self)
|
22 |
+
|
23 |
+
class CSafeLoader(CParser, SafeConstructor, Resolver):
|
24 |
+
|
25 |
+
def __init__(self, stream):
|
26 |
+
CParser.__init__(self, stream)
|
27 |
+
SafeConstructor.__init__(self)
|
28 |
+
Resolver.__init__(self)
|
29 |
+
|
30 |
+
class CFullLoader(CParser, FullConstructor, Resolver):
|
31 |
+
|
32 |
+
def __init__(self, stream):
|
33 |
+
CParser.__init__(self, stream)
|
34 |
+
FullConstructor.__init__(self)
|
35 |
+
Resolver.__init__(self)
|
36 |
+
|
37 |
+
class CUnsafeLoader(CParser, UnsafeConstructor, Resolver):
|
38 |
+
|
39 |
+
def __init__(self, stream):
|
40 |
+
CParser.__init__(self, stream)
|
41 |
+
UnsafeConstructor.__init__(self)
|
42 |
+
Resolver.__init__(self)
|
43 |
+
|
44 |
+
class CLoader(CParser, Constructor, Resolver):
|
45 |
+
|
46 |
+
def __init__(self, stream):
|
47 |
+
CParser.__init__(self, stream)
|
48 |
+
Constructor.__init__(self)
|
49 |
+
Resolver.__init__(self)
|
50 |
+
|
51 |
+
class CBaseDumper(CEmitter, BaseRepresenter, BaseResolver):
|
52 |
+
|
53 |
+
def __init__(self, stream,
|
54 |
+
default_style=None, default_flow_style=False,
|
55 |
+
canonical=None, indent=None, width=None,
|
56 |
+
allow_unicode=None, line_break=None,
|
57 |
+
encoding=None, explicit_start=None, explicit_end=None,
|
58 |
+
version=None, tags=None, sort_keys=True):
|
59 |
+
CEmitter.__init__(self, stream, canonical=canonical,
|
60 |
+
indent=indent, width=width, encoding=encoding,
|
61 |
+
allow_unicode=allow_unicode, line_break=line_break,
|
62 |
+
explicit_start=explicit_start, explicit_end=explicit_end,
|
63 |
+
version=version, tags=tags)
|
64 |
+
Representer.__init__(self, default_style=default_style,
|
65 |
+
default_flow_style=default_flow_style, sort_keys=sort_keys)
|
66 |
+
Resolver.__init__(self)
|
67 |
+
|
68 |
+
class CSafeDumper(CEmitter, SafeRepresenter, Resolver):
|
69 |
+
|
70 |
+
def __init__(self, stream,
|
71 |
+
default_style=None, default_flow_style=False,
|
72 |
+
canonical=None, indent=None, width=None,
|
73 |
+
allow_unicode=None, line_break=None,
|
74 |
+
encoding=None, explicit_start=None, explicit_end=None,
|
75 |
+
version=None, tags=None, sort_keys=True):
|
76 |
+
CEmitter.__init__(self, stream, canonical=canonical,
|
77 |
+
indent=indent, width=width, encoding=encoding,
|
78 |
+
allow_unicode=allow_unicode, line_break=line_break,
|
79 |
+
explicit_start=explicit_start, explicit_end=explicit_end,
|
80 |
+
version=version, tags=tags)
|
81 |
+
SafeRepresenter.__init__(self, default_style=default_style,
|
82 |
+
default_flow_style=default_flow_style, sort_keys=sort_keys)
|
83 |
+
Resolver.__init__(self)
|
84 |
+
|
85 |
+
class CDumper(CEmitter, Serializer, Representer, Resolver):
|
86 |
+
|
87 |
+
def __init__(self, stream,
|
88 |
+
default_style=None, default_flow_style=False,
|
89 |
+
canonical=None, indent=None, width=None,
|
90 |
+
allow_unicode=None, line_break=None,
|
91 |
+
encoding=None, explicit_start=None, explicit_end=None,
|
92 |
+
version=None, tags=None, sort_keys=True):
|
93 |
+
CEmitter.__init__(self, stream, canonical=canonical,
|
94 |
+
indent=indent, width=width, encoding=encoding,
|
95 |
+
allow_unicode=allow_unicode, line_break=line_break,
|
96 |
+
explicit_start=explicit_start, explicit_end=explicit_end,
|
97 |
+
version=version, tags=tags)
|
98 |
+
Representer.__init__(self, default_style=default_style,
|
99 |
+
default_flow_style=default_flow_style, sort_keys=sort_keys)
|
100 |
+
Resolver.__init__(self)
|
101 |
+
|
venv/lib/python3.12/site-packages/yaml/error.py
ADDED
@@ -0,0 +1,75 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
__all__ = ['Mark', 'YAMLError', 'MarkedYAMLError']
|
3 |
+
|
4 |
+
class Mark:
|
5 |
+
|
6 |
+
def __init__(self, name, index, line, column, buffer, pointer):
|
7 |
+
self.name = name
|
8 |
+
self.index = index
|
9 |
+
self.line = line
|
10 |
+
self.column = column
|
11 |
+
self.buffer = buffer
|
12 |
+
self.pointer = pointer
|
13 |
+
|
14 |
+
def get_snippet(self, indent=4, max_length=75):
|
15 |
+
if self.buffer is None:
|
16 |
+
return None
|
17 |
+
head = ''
|
18 |
+
start = self.pointer
|
19 |
+
while start > 0 and self.buffer[start-1] not in '\0\r\n\x85\u2028\u2029':
|
20 |
+
start -= 1
|
21 |
+
if self.pointer-start > max_length/2-1:
|
22 |
+
head = ' ... '
|
23 |
+
start += 5
|
24 |
+
break
|
25 |
+
tail = ''
|
26 |
+
end = self.pointer
|
27 |
+
while end < len(self.buffer) and self.buffer[end] not in '\0\r\n\x85\u2028\u2029':
|
28 |
+
end += 1
|
29 |
+
if end-self.pointer > max_length/2-1:
|
30 |
+
tail = ' ... '
|
31 |
+
end -= 5
|
32 |
+
break
|
33 |
+
snippet = self.buffer[start:end]
|
34 |
+
return ' '*indent + head + snippet + tail + '\n' \
|
35 |
+
+ ' '*(indent+self.pointer-start+len(head)) + '^'
|
36 |
+
|
37 |
+
def __str__(self):
|
38 |
+
snippet = self.get_snippet()
|
39 |
+
where = " in \"%s\", line %d, column %d" \
|
40 |
+
% (self.name, self.line+1, self.column+1)
|
41 |
+
if snippet is not None:
|
42 |
+
where += ":\n"+snippet
|
43 |
+
return where
|
44 |
+
|
45 |
+
class YAMLError(Exception):
|
46 |
+
pass
|
47 |
+
|
48 |
+
class MarkedYAMLError(YAMLError):
|
49 |
+
|
50 |
+
def __init__(self, context=None, context_mark=None,
|
51 |
+
problem=None, problem_mark=None, note=None):
|
52 |
+
self.context = context
|
53 |
+
self.context_mark = context_mark
|
54 |
+
self.problem = problem
|
55 |
+
self.problem_mark = problem_mark
|
56 |
+
self.note = note
|
57 |
+
|
58 |
+
def __str__(self):
|
59 |
+
lines = []
|
60 |
+
if self.context is not None:
|
61 |
+
lines.append(self.context)
|
62 |
+
if self.context_mark is not None \
|
63 |
+
and (self.problem is None or self.problem_mark is None
|
64 |
+
or self.context_mark.name != self.problem_mark.name
|
65 |
+
or self.context_mark.line != self.problem_mark.line
|
66 |
+
or self.context_mark.column != self.problem_mark.column):
|
67 |
+
lines.append(str(self.context_mark))
|
68 |
+
if self.problem is not None:
|
69 |
+
lines.append(self.problem)
|
70 |
+
if self.problem_mark is not None:
|
71 |
+
lines.append(str(self.problem_mark))
|
72 |
+
if self.note is not None:
|
73 |
+
lines.append(self.note)
|
74 |
+
return '\n'.join(lines)
|
75 |
+
|
venv/lib/python3.12/site-packages/yaml/loader.py
ADDED
@@ -0,0 +1,63 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
__all__ = ['BaseLoader', 'FullLoader', 'SafeLoader', 'Loader', 'UnsafeLoader']
|
3 |
+
|
4 |
+
from .reader import *
|
5 |
+
from .scanner import *
|
6 |
+
from .parser import *
|
7 |
+
from .composer import *
|
8 |
+
from .constructor import *
|
9 |
+
from .resolver import *
|
10 |
+
|
11 |
+
class BaseLoader(Reader, Scanner, Parser, Composer, BaseConstructor, BaseResolver):
|
12 |
+
|
13 |
+
def __init__(self, stream):
|
14 |
+
Reader.__init__(self, stream)
|
15 |
+
Scanner.__init__(self)
|
16 |
+
Parser.__init__(self)
|
17 |
+
Composer.__init__(self)
|
18 |
+
BaseConstructor.__init__(self)
|
19 |
+
BaseResolver.__init__(self)
|
20 |
+
|
21 |
+
class FullLoader(Reader, Scanner, Parser, Composer, FullConstructor, Resolver):
|
22 |
+
|
23 |
+
def __init__(self, stream):
|
24 |
+
Reader.__init__(self, stream)
|
25 |
+
Scanner.__init__(self)
|
26 |
+
Parser.__init__(self)
|
27 |
+
Composer.__init__(self)
|
28 |
+
FullConstructor.__init__(self)
|
29 |
+
Resolver.__init__(self)
|
30 |
+
|
31 |
+
class SafeLoader(Reader, Scanner, Parser, Composer, SafeConstructor, Resolver):
|
32 |
+
|
33 |
+
def __init__(self, stream):
|
34 |
+
Reader.__init__(self, stream)
|
35 |
+
Scanner.__init__(self)
|
36 |
+
Parser.__init__(self)
|
37 |
+
Composer.__init__(self)
|
38 |
+
SafeConstructor.__init__(self)
|
39 |
+
Resolver.__init__(self)
|
40 |
+
|
41 |
+
class Loader(Reader, Scanner, Parser, Composer, Constructor, Resolver):
|
42 |
+
|
43 |
+
def __init__(self, stream):
|
44 |
+
Reader.__init__(self, stream)
|
45 |
+
Scanner.__init__(self)
|
46 |
+
Parser.__init__(self)
|
47 |
+
Composer.__init__(self)
|
48 |
+
Constructor.__init__(self)
|
49 |
+
Resolver.__init__(self)
|
50 |
+
|
51 |
+
# UnsafeLoader is the same as Loader (which is and was always unsafe on
|
52 |
+
# untrusted input). Use of either Loader or UnsafeLoader should be rare, since
|
53 |
+
# FullLoad should be able to load almost all YAML safely. Loader is left intact
|
54 |
+
# to ensure backwards compatibility.
|
55 |
+
class UnsafeLoader(Reader, Scanner, Parser, Composer, Constructor, Resolver):
|
56 |
+
|
57 |
+
def __init__(self, stream):
|
58 |
+
Reader.__init__(self, stream)
|
59 |
+
Scanner.__init__(self)
|
60 |
+
Parser.__init__(self)
|
61 |
+
Composer.__init__(self)
|
62 |
+
Constructor.__init__(self)
|
63 |
+
Resolver.__init__(self)
|
venv/lib/python3.12/site-packages/yaml/parser.py
ADDED
@@ -0,0 +1,589 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
# The following YAML grammar is LL(1) and is parsed by a recursive descent
|
3 |
+
# parser.
|
4 |
+
#
|
5 |
+
# stream ::= STREAM-START implicit_document? explicit_document* STREAM-END
|
6 |
+
# implicit_document ::= block_node DOCUMENT-END*
|
7 |
+
# explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
|
8 |
+
# block_node_or_indentless_sequence ::=
|
9 |
+
# ALIAS
|
10 |
+
# | properties (block_content | indentless_block_sequence)?
|
11 |
+
# | block_content
|
12 |
+
# | indentless_block_sequence
|
13 |
+
# block_node ::= ALIAS
|
14 |
+
# | properties block_content?
|
15 |
+
# | block_content
|
16 |
+
# flow_node ::= ALIAS
|
17 |
+
# | properties flow_content?
|
18 |
+
# | flow_content
|
19 |
+
# properties ::= TAG ANCHOR? | ANCHOR TAG?
|
20 |
+
# block_content ::= block_collection | flow_collection | SCALAR
|
21 |
+
# flow_content ::= flow_collection | SCALAR
|
22 |
+
# block_collection ::= block_sequence | block_mapping
|
23 |
+
# flow_collection ::= flow_sequence | flow_mapping
|
24 |
+
# block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END
|
25 |
+
# indentless_sequence ::= (BLOCK-ENTRY block_node?)+
|
26 |
+
# block_mapping ::= BLOCK-MAPPING_START
|
27 |
+
# ((KEY block_node_or_indentless_sequence?)?
|
28 |
+
# (VALUE block_node_or_indentless_sequence?)?)*
|
29 |
+
# BLOCK-END
|
30 |
+
# flow_sequence ::= FLOW-SEQUENCE-START
|
31 |
+
# (flow_sequence_entry FLOW-ENTRY)*
|
32 |
+
# flow_sequence_entry?
|
33 |
+
# FLOW-SEQUENCE-END
|
34 |
+
# flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
|
35 |
+
# flow_mapping ::= FLOW-MAPPING-START
|
36 |
+
# (flow_mapping_entry FLOW-ENTRY)*
|
37 |
+
# flow_mapping_entry?
|
38 |
+
# FLOW-MAPPING-END
|
39 |
+
# flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
|
40 |
+
#
|
41 |
+
# FIRST sets:
|
42 |
+
#
|
43 |
+
# stream: { STREAM-START }
|
44 |
+
# explicit_document: { DIRECTIVE DOCUMENT-START }
|
45 |
+
# implicit_document: FIRST(block_node)
|
46 |
+
# block_node: { ALIAS TAG ANCHOR SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START }
|
47 |
+
# flow_node: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START }
|
48 |
+
# block_content: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
|
49 |
+
# flow_content: { FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
|
50 |
+
# block_collection: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START }
|
51 |
+
# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
|
52 |
+
# block_sequence: { BLOCK-SEQUENCE-START }
|
53 |
+
# block_mapping: { BLOCK-MAPPING-START }
|
54 |
+
# block_node_or_indentless_sequence: { ALIAS ANCHOR TAG SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START BLOCK-ENTRY }
|
55 |
+
# indentless_sequence: { ENTRY }
|
56 |
+
# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
|
57 |
+
# flow_sequence: { FLOW-SEQUENCE-START }
|
58 |
+
# flow_mapping: { FLOW-MAPPING-START }
|
59 |
+
# flow_sequence_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY }
|
60 |
+
# flow_mapping_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY }
|
61 |
+
|
62 |
+
__all__ = ['Parser', 'ParserError']
|
63 |
+
|
64 |
+
from .error import MarkedYAMLError
|
65 |
+
from .tokens import *
|
66 |
+
from .events import *
|
67 |
+
from .scanner import *
|
68 |
+
|
69 |
+
class ParserError(MarkedYAMLError):
|
70 |
+
pass
|
71 |
+
|
72 |
+
class Parser:
|
73 |
+
# Since writing a recursive-descendant parser is a straightforward task, we
|
74 |
+
# do not give many comments here.
|
75 |
+
|
76 |
+
DEFAULT_TAGS = {
|
77 |
+
'!': '!',
|
78 |
+
'!!': 'tag:yaml.org,2002:',
|
79 |
+
}
|
80 |
+
|
81 |
+
def __init__(self):
|
82 |
+
self.current_event = None
|
83 |
+
self.yaml_version = None
|
84 |
+
self.tag_handles = {}
|
85 |
+
self.states = []
|
86 |
+
self.marks = []
|
87 |
+
self.state = self.parse_stream_start
|
88 |
+
|
89 |
+
def dispose(self):
|
90 |
+
# Reset the state attributes (to clear self-references)
|
91 |
+
self.states = []
|
92 |
+
self.state = None
|
93 |
+
|
94 |
+
def check_event(self, *choices):
|
95 |
+
# Check the type of the next event.
|
96 |
+
if self.current_event is None:
|
97 |
+
if self.state:
|
98 |
+
self.current_event = self.state()
|
99 |
+
if self.current_event is not None:
|
100 |
+
if not choices:
|
101 |
+
return True
|
102 |
+
for choice in choices:
|
103 |
+
if isinstance(self.current_event, choice):
|
104 |
+
return True
|
105 |
+
return False
|
106 |
+
|
107 |
+
def peek_event(self):
|
108 |
+
# Get the next event.
|
109 |
+
if self.current_event is None:
|
110 |
+
if self.state:
|
111 |
+
self.current_event = self.state()
|
112 |
+
return self.current_event
|
113 |
+
|
114 |
+
def get_event(self):
|
115 |
+
# Get the next event and proceed further.
|
116 |
+
if self.current_event is None:
|
117 |
+
if self.state:
|
118 |
+
self.current_event = self.state()
|
119 |
+
value = self.current_event
|
120 |
+
self.current_event = None
|
121 |
+
return value
|
122 |
+
|
123 |
+
# stream ::= STREAM-START implicit_document? explicit_document* STREAM-END
|
124 |
+
# implicit_document ::= block_node DOCUMENT-END*
|
125 |
+
# explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
|
126 |
+
|
127 |
+
def parse_stream_start(self):
|
128 |
+
|
129 |
+
# Parse the stream start.
|
130 |
+
token = self.get_token()
|
131 |
+
event = StreamStartEvent(token.start_mark, token.end_mark,
|
132 |
+
encoding=token.encoding)
|
133 |
+
|
134 |
+
# Prepare the next state.
|
135 |
+
self.state = self.parse_implicit_document_start
|
136 |
+
|
137 |
+
return event
|
138 |
+
|
139 |
+
def parse_implicit_document_start(self):
|
140 |
+
|
141 |
+
# Parse an implicit document.
|
142 |
+
if not self.check_token(DirectiveToken, DocumentStartToken,
|
143 |
+
StreamEndToken):
|
144 |
+
self.tag_handles = self.DEFAULT_TAGS
|
145 |
+
token = self.peek_token()
|
146 |
+
start_mark = end_mark = token.start_mark
|
147 |
+
event = DocumentStartEvent(start_mark, end_mark,
|
148 |
+
explicit=False)
|
149 |
+
|
150 |
+
# Prepare the next state.
|
151 |
+
self.states.append(self.parse_document_end)
|
152 |
+
self.state = self.parse_block_node
|
153 |
+
|
154 |
+
return event
|
155 |
+
|
156 |
+
else:
|
157 |
+
return self.parse_document_start()
|
158 |
+
|
159 |
+
def parse_document_start(self):
|
160 |
+
|
161 |
+
# Parse any extra document end indicators.
|
162 |
+
while self.check_token(DocumentEndToken):
|
163 |
+
self.get_token()
|
164 |
+
|
165 |
+
# Parse an explicit document.
|
166 |
+
if not self.check_token(StreamEndToken):
|
167 |
+
token = self.peek_token()
|
168 |
+
start_mark = token.start_mark
|
169 |
+
version, tags = self.process_directives()
|
170 |
+
if not self.check_token(DocumentStartToken):
|
171 |
+
raise ParserError(None, None,
|
172 |
+
"expected '<document start>', but found %r"
|
173 |
+
% self.peek_token().id,
|
174 |
+
self.peek_token().start_mark)
|
175 |
+
token = self.get_token()
|
176 |
+
end_mark = token.end_mark
|
177 |
+
event = DocumentStartEvent(start_mark, end_mark,
|
178 |
+
explicit=True, version=version, tags=tags)
|
179 |
+
self.states.append(self.parse_document_end)
|
180 |
+
self.state = self.parse_document_content
|
181 |
+
else:
|
182 |
+
# Parse the end of the stream.
|
183 |
+
token = self.get_token()
|
184 |
+
event = StreamEndEvent(token.start_mark, token.end_mark)
|
185 |
+
assert not self.states
|
186 |
+
assert not self.marks
|
187 |
+
self.state = None
|
188 |
+
return event
|
189 |
+
|
190 |
+
def parse_document_end(self):
|
191 |
+
|
192 |
+
# Parse the document end.
|
193 |
+
token = self.peek_token()
|
194 |
+
start_mark = end_mark = token.start_mark
|
195 |
+
explicit = False
|
196 |
+
if self.check_token(DocumentEndToken):
|
197 |
+
token = self.get_token()
|
198 |
+
end_mark = token.end_mark
|
199 |
+
explicit = True
|
200 |
+
event = DocumentEndEvent(start_mark, end_mark,
|
201 |
+
explicit=explicit)
|
202 |
+
|
203 |
+
# Prepare the next state.
|
204 |
+
self.state = self.parse_document_start
|
205 |
+
|
206 |
+
return event
|
207 |
+
|
208 |
+
def parse_document_content(self):
|
209 |
+
if self.check_token(DirectiveToken,
|
210 |
+
DocumentStartToken, DocumentEndToken, StreamEndToken):
|
211 |
+
event = self.process_empty_scalar(self.peek_token().start_mark)
|
212 |
+
self.state = self.states.pop()
|
213 |
+
return event
|
214 |
+
else:
|
215 |
+
return self.parse_block_node()
|
216 |
+
|
217 |
+
def process_directives(self):
|
218 |
+
self.yaml_version = None
|
219 |
+
self.tag_handles = {}
|
220 |
+
while self.check_token(DirectiveToken):
|
221 |
+
token = self.get_token()
|
222 |
+
if token.name == 'YAML':
|
223 |
+
if self.yaml_version is not None:
|
224 |
+
raise ParserError(None, None,
|
225 |
+
"found duplicate YAML directive", token.start_mark)
|
226 |
+
major, minor = token.value
|
227 |
+
if major != 1:
|
228 |
+
raise ParserError(None, None,
|
229 |
+
"found incompatible YAML document (version 1.* is required)",
|
230 |
+
token.start_mark)
|
231 |
+
self.yaml_version = token.value
|
232 |
+
elif token.name == 'TAG':
|
233 |
+
handle, prefix = token.value
|
234 |
+
if handle in self.tag_handles:
|
235 |
+
raise ParserError(None, None,
|
236 |
+
"duplicate tag handle %r" % handle,
|
237 |
+
token.start_mark)
|
238 |
+
self.tag_handles[handle] = prefix
|
239 |
+
if self.tag_handles:
|
240 |
+
value = self.yaml_version, self.tag_handles.copy()
|
241 |
+
else:
|
242 |
+
value = self.yaml_version, None
|
243 |
+
for key in self.DEFAULT_TAGS:
|
244 |
+
if key not in self.tag_handles:
|
245 |
+
self.tag_handles[key] = self.DEFAULT_TAGS[key]
|
246 |
+
return value
|
247 |
+
|
248 |
+
# block_node_or_indentless_sequence ::= ALIAS
|
249 |
+
# | properties (block_content | indentless_block_sequence)?
|
250 |
+
# | block_content
|
251 |
+
# | indentless_block_sequence
|
252 |
+
# block_node ::= ALIAS
|
253 |
+
# | properties block_content?
|
254 |
+
# | block_content
|
255 |
+
# flow_node ::= ALIAS
|
256 |
+
# | properties flow_content?
|
257 |
+
# | flow_content
|
258 |
+
# properties ::= TAG ANCHOR? | ANCHOR TAG?
|
259 |
+
# block_content ::= block_collection | flow_collection | SCALAR
|
260 |
+
# flow_content ::= flow_collection | SCALAR
|
261 |
+
# block_collection ::= block_sequence | block_mapping
|
262 |
+
# flow_collection ::= flow_sequence | flow_mapping
|
263 |
+
|
264 |
+
def parse_block_node(self):
|
265 |
+
return self.parse_node(block=True)
|
266 |
+
|
267 |
+
def parse_flow_node(self):
|
268 |
+
return self.parse_node()
|
269 |
+
|
270 |
+
def parse_block_node_or_indentless_sequence(self):
|
271 |
+
return self.parse_node(block=True, indentless_sequence=True)
|
272 |
+
|
273 |
+
def parse_node(self, block=False, indentless_sequence=False):
|
274 |
+
if self.check_token(AliasToken):
|
275 |
+
token = self.get_token()
|
276 |
+
event = AliasEvent(token.value, token.start_mark, token.end_mark)
|
277 |
+
self.state = self.states.pop()
|
278 |
+
else:
|
279 |
+
anchor = None
|
280 |
+
tag = None
|
281 |
+
start_mark = end_mark = tag_mark = None
|
282 |
+
if self.check_token(AnchorToken):
|
283 |
+
token = self.get_token()
|
284 |
+
start_mark = token.start_mark
|
285 |
+
end_mark = token.end_mark
|
286 |
+
anchor = token.value
|
287 |
+
if self.check_token(TagToken):
|
288 |
+
token = self.get_token()
|
289 |
+
tag_mark = token.start_mark
|
290 |
+
end_mark = token.end_mark
|
291 |
+
tag = token.value
|
292 |
+
elif self.check_token(TagToken):
|
293 |
+
token = self.get_token()
|
294 |
+
start_mark = tag_mark = token.start_mark
|
295 |
+
end_mark = token.end_mark
|
296 |
+
tag = token.value
|
297 |
+
if self.check_token(AnchorToken):
|
298 |
+
token = self.get_token()
|
299 |
+
end_mark = token.end_mark
|
300 |
+
anchor = token.value
|
301 |
+
if tag is not None:
|
302 |
+
handle, suffix = tag
|
303 |
+
if handle is not None:
|
304 |
+
if handle not in self.tag_handles:
|
305 |
+
raise ParserError("while parsing a node", start_mark,
|
306 |
+
"found undefined tag handle %r" % handle,
|
307 |
+
tag_mark)
|
308 |
+
tag = self.tag_handles[handle]+suffix
|
309 |
+
else:
|
310 |
+
tag = suffix
|
311 |
+
#if tag == '!':
|
312 |
+
# raise ParserError("while parsing a node", start_mark,
|
313 |
+
# "found non-specific tag '!'", tag_mark,
|
314 |
+
# "Please check 'http://pyyaml.org/wiki/YAMLNonSpecificTag' and share your opinion.")
|
315 |
+
if start_mark is None:
|
316 |
+
start_mark = end_mark = self.peek_token().start_mark
|
317 |
+
event = None
|
318 |
+
implicit = (tag is None or tag == '!')
|
319 |
+
if indentless_sequence and self.check_token(BlockEntryToken):
|
320 |
+
end_mark = self.peek_token().end_mark
|
321 |
+
event = SequenceStartEvent(anchor, tag, implicit,
|
322 |
+
start_mark, end_mark)
|
323 |
+
self.state = self.parse_indentless_sequence_entry
|
324 |
+
else:
|
325 |
+
if self.check_token(ScalarToken):
|
326 |
+
token = self.get_token()
|
327 |
+
end_mark = token.end_mark
|
328 |
+
if (token.plain and tag is None) or tag == '!':
|
329 |
+
implicit = (True, False)
|
330 |
+
elif tag is None:
|
331 |
+
implicit = (False, True)
|
332 |
+
else:
|
333 |
+
implicit = (False, False)
|
334 |
+
event = ScalarEvent(anchor, tag, implicit, token.value,
|
335 |
+
start_mark, end_mark, style=token.style)
|
336 |
+
self.state = self.states.pop()
|
337 |
+
elif self.check_token(FlowSequenceStartToken):
|
338 |
+
end_mark = self.peek_token().end_mark
|
339 |
+
event = SequenceStartEvent(anchor, tag, implicit,
|
340 |
+
start_mark, end_mark, flow_style=True)
|
341 |
+
self.state = self.parse_flow_sequence_first_entry
|
342 |
+
elif self.check_token(FlowMappingStartToken):
|
343 |
+
end_mark = self.peek_token().end_mark
|
344 |
+
event = MappingStartEvent(anchor, tag, implicit,
|
345 |
+
start_mark, end_mark, flow_style=True)
|
346 |
+
self.state = self.parse_flow_mapping_first_key
|
347 |
+
elif block and self.check_token(BlockSequenceStartToken):
|
348 |
+
end_mark = self.peek_token().start_mark
|
349 |
+
event = SequenceStartEvent(anchor, tag, implicit,
|
350 |
+
start_mark, end_mark, flow_style=False)
|
351 |
+
self.state = self.parse_block_sequence_first_entry
|
352 |
+
elif block and self.check_token(BlockMappingStartToken):
|
353 |
+
end_mark = self.peek_token().start_mark
|
354 |
+
event = MappingStartEvent(anchor, tag, implicit,
|
355 |
+
start_mark, end_mark, flow_style=False)
|
356 |
+
self.state = self.parse_block_mapping_first_key
|
357 |
+
elif anchor is not None or tag is not None:
|
358 |
+
# Empty scalars are allowed even if a tag or an anchor is
|
359 |
+
# specified.
|
360 |
+
event = ScalarEvent(anchor, tag, (implicit, False), '',
|
361 |
+
start_mark, end_mark)
|
362 |
+
self.state = self.states.pop()
|
363 |
+
else:
|
364 |
+
if block:
|
365 |
+
node = 'block'
|
366 |
+
else:
|
367 |
+
node = 'flow'
|
368 |
+
token = self.peek_token()
|
369 |
+
raise ParserError("while parsing a %s node" % node, start_mark,
|
370 |
+
"expected the node content, but found %r" % token.id,
|
371 |
+
token.start_mark)
|
372 |
+
return event
|
373 |
+
|
374 |
+
# block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END
|
375 |
+
|
376 |
+
def parse_block_sequence_first_entry(self):
|
377 |
+
token = self.get_token()
|
378 |
+
self.marks.append(token.start_mark)
|
379 |
+
return self.parse_block_sequence_entry()
|
380 |
+
|
381 |
+
def parse_block_sequence_entry(self):
|
382 |
+
if self.check_token(BlockEntryToken):
|
383 |
+
token = self.get_token()
|
384 |
+
if not self.check_token(BlockEntryToken, BlockEndToken):
|
385 |
+
self.states.append(self.parse_block_sequence_entry)
|
386 |
+
return self.parse_block_node()
|
387 |
+
else:
|
388 |
+
self.state = self.parse_block_sequence_entry
|
389 |
+
return self.process_empty_scalar(token.end_mark)
|
390 |
+
if not self.check_token(BlockEndToken):
|
391 |
+
token = self.peek_token()
|
392 |
+
raise ParserError("while parsing a block collection", self.marks[-1],
|
393 |
+
"expected <block end>, but found %r" % token.id, token.start_mark)
|
394 |
+
token = self.get_token()
|
395 |
+
event = SequenceEndEvent(token.start_mark, token.end_mark)
|
396 |
+
self.state = self.states.pop()
|
397 |
+
self.marks.pop()
|
398 |
+
return event
|
399 |
+
|
400 |
+
# indentless_sequence ::= (BLOCK-ENTRY block_node?)+
|
401 |
+
|
402 |
+
def parse_indentless_sequence_entry(self):
|
403 |
+
if self.check_token(BlockEntryToken):
|
404 |
+
token = self.get_token()
|
405 |
+
if not self.check_token(BlockEntryToken,
|
406 |
+
KeyToken, ValueToken, BlockEndToken):
|
407 |
+
self.states.append(self.parse_indentless_sequence_entry)
|
408 |
+
return self.parse_block_node()
|
409 |
+
else:
|
410 |
+
self.state = self.parse_indentless_sequence_entry
|
411 |
+
return self.process_empty_scalar(token.end_mark)
|
412 |
+
token = self.peek_token()
|
413 |
+
event = SequenceEndEvent(token.start_mark, token.start_mark)
|
414 |
+
self.state = self.states.pop()
|
415 |
+
return event
|
416 |
+
|
417 |
+
# block_mapping ::= BLOCK-MAPPING_START
|
418 |
+
# ((KEY block_node_or_indentless_sequence?)?
|
419 |
+
# (VALUE block_node_or_indentless_sequence?)?)*
|
420 |
+
# BLOCK-END
|
421 |
+
|
422 |
+
def parse_block_mapping_first_key(self):
|
423 |
+
token = self.get_token()
|
424 |
+
self.marks.append(token.start_mark)
|
425 |
+
return self.parse_block_mapping_key()
|
426 |
+
|
427 |
+
def parse_block_mapping_key(self):
|
428 |
+
if self.check_token(KeyToken):
|
429 |
+
token = self.get_token()
|
430 |
+
if not self.check_token(KeyToken, ValueToken, BlockEndToken):
|
431 |
+
self.states.append(self.parse_block_mapping_value)
|
432 |
+
return self.parse_block_node_or_indentless_sequence()
|
433 |
+
else:
|
434 |
+
self.state = self.parse_block_mapping_value
|
435 |
+
return self.process_empty_scalar(token.end_mark)
|
436 |
+
if not self.check_token(BlockEndToken):
|
437 |
+
token = self.peek_token()
|
438 |
+
raise ParserError("while parsing a block mapping", self.marks[-1],
|
439 |
+
"expected <block end>, but found %r" % token.id, token.start_mark)
|
440 |
+
token = self.get_token()
|
441 |
+
event = MappingEndEvent(token.start_mark, token.end_mark)
|
442 |
+
self.state = self.states.pop()
|
443 |
+
self.marks.pop()
|
444 |
+
return event
|
445 |
+
|
446 |
+
def parse_block_mapping_value(self):
|
447 |
+
if self.check_token(ValueToken):
|
448 |
+
token = self.get_token()
|
449 |
+
if not self.check_token(KeyToken, ValueToken, BlockEndToken):
|
450 |
+
self.states.append(self.parse_block_mapping_key)
|
451 |
+
return self.parse_block_node_or_indentless_sequence()
|
452 |
+
else:
|
453 |
+
self.state = self.parse_block_mapping_key
|
454 |
+
return self.process_empty_scalar(token.end_mark)
|
455 |
+
else:
|
456 |
+
self.state = self.parse_block_mapping_key
|
457 |
+
token = self.peek_token()
|
458 |
+
return self.process_empty_scalar(token.start_mark)
|
459 |
+
|
460 |
+
# flow_sequence ::= FLOW-SEQUENCE-START
|
461 |
+
# (flow_sequence_entry FLOW-ENTRY)*
|
462 |
+
# flow_sequence_entry?
|
463 |
+
# FLOW-SEQUENCE-END
|
464 |
+
# flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
|
465 |
+
#
|
466 |
+
# Note that while production rules for both flow_sequence_entry and
|
467 |
+
# flow_mapping_entry are equal, their interpretations are different.
|
468 |
+
# For `flow_sequence_entry`, the part `KEY flow_node? (VALUE flow_node?)?`
|
469 |
+
# generate an inline mapping (set syntax).
|
470 |
+
|
471 |
+
def parse_flow_sequence_first_entry(self):
|
472 |
+
token = self.get_token()
|
473 |
+
self.marks.append(token.start_mark)
|
474 |
+
return self.parse_flow_sequence_entry(first=True)
|
475 |
+
|
476 |
+
def parse_flow_sequence_entry(self, first=False):
|
477 |
+
if not self.check_token(FlowSequenceEndToken):
|
478 |
+
if not first:
|
479 |
+
if self.check_token(FlowEntryToken):
|
480 |
+
self.get_token()
|
481 |
+
else:
|
482 |
+
token = self.peek_token()
|
483 |
+
raise ParserError("while parsing a flow sequence", self.marks[-1],
|
484 |
+
"expected ',' or ']', but got %r" % token.id, token.start_mark)
|
485 |
+
|
486 |
+
if self.check_token(KeyToken):
|
487 |
+
token = self.peek_token()
|
488 |
+
event = MappingStartEvent(None, None, True,
|
489 |
+
token.start_mark, token.end_mark,
|
490 |
+
flow_style=True)
|
491 |
+
self.state = self.parse_flow_sequence_entry_mapping_key
|
492 |
+
return event
|
493 |
+
elif not self.check_token(FlowSequenceEndToken):
|
494 |
+
self.states.append(self.parse_flow_sequence_entry)
|
495 |
+
return self.parse_flow_node()
|
496 |
+
token = self.get_token()
|
497 |
+
event = SequenceEndEvent(token.start_mark, token.end_mark)
|
498 |
+
self.state = self.states.pop()
|
499 |
+
self.marks.pop()
|
500 |
+
return event
|
501 |
+
|
502 |
+
def parse_flow_sequence_entry_mapping_key(self):
|
503 |
+
token = self.get_token()
|
504 |
+
if not self.check_token(ValueToken,
|
505 |
+
FlowEntryToken, FlowSequenceEndToken):
|
506 |
+
self.states.append(self.parse_flow_sequence_entry_mapping_value)
|
507 |
+
return self.parse_flow_node()
|
508 |
+
else:
|
509 |
+
self.state = self.parse_flow_sequence_entry_mapping_value
|
510 |
+
return self.process_empty_scalar(token.end_mark)
|
511 |
+
|
512 |
+
def parse_flow_sequence_entry_mapping_value(self):
|
513 |
+
if self.check_token(ValueToken):
|
514 |
+
token = self.get_token()
|
515 |
+
if not self.check_token(FlowEntryToken, FlowSequenceEndToken):
|
516 |
+
self.states.append(self.parse_flow_sequence_entry_mapping_end)
|
517 |
+
return self.parse_flow_node()
|
518 |
+
else:
|
519 |
+
self.state = self.parse_flow_sequence_entry_mapping_end
|
520 |
+
return self.process_empty_scalar(token.end_mark)
|
521 |
+
else:
|
522 |
+
self.state = self.parse_flow_sequence_entry_mapping_end
|
523 |
+
token = self.peek_token()
|
524 |
+
return self.process_empty_scalar(token.start_mark)
|
525 |
+
|
526 |
+
def parse_flow_sequence_entry_mapping_end(self):
|
527 |
+
self.state = self.parse_flow_sequence_entry
|
528 |
+
token = self.peek_token()
|
529 |
+
return MappingEndEvent(token.start_mark, token.start_mark)
|
530 |
+
|
531 |
+
# flow_mapping ::= FLOW-MAPPING-START
|
532 |
+
# (flow_mapping_entry FLOW-ENTRY)*
|
533 |
+
# flow_mapping_entry?
|
534 |
+
# FLOW-MAPPING-END
|
535 |
+
# flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
|
536 |
+
|
537 |
+
def parse_flow_mapping_first_key(self):
|
538 |
+
token = self.get_token()
|
539 |
+
self.marks.append(token.start_mark)
|
540 |
+
return self.parse_flow_mapping_key(first=True)
|
541 |
+
|
542 |
+
def parse_flow_mapping_key(self, first=False):
|
543 |
+
if not self.check_token(FlowMappingEndToken):
|
544 |
+
if not first:
|
545 |
+
if self.check_token(FlowEntryToken):
|
546 |
+
self.get_token()
|
547 |
+
else:
|
548 |
+
token = self.peek_token()
|
549 |
+
raise ParserError("while parsing a flow mapping", self.marks[-1],
|
550 |
+
"expected ',' or '}', but got %r" % token.id, token.start_mark)
|
551 |
+
if self.check_token(KeyToken):
|
552 |
+
token = self.get_token()
|
553 |
+
if not self.check_token(ValueToken,
|
554 |
+
FlowEntryToken, FlowMappingEndToken):
|
555 |
+
self.states.append(self.parse_flow_mapping_value)
|
556 |
+
return self.parse_flow_node()
|
557 |
+
else:
|
558 |
+
self.state = self.parse_flow_mapping_value
|
559 |
+
return self.process_empty_scalar(token.end_mark)
|
560 |
+
elif not self.check_token(FlowMappingEndToken):
|
561 |
+
self.states.append(self.parse_flow_mapping_empty_value)
|
562 |
+
return self.parse_flow_node()
|
563 |
+
token = self.get_token()
|
564 |
+
event = MappingEndEvent(token.start_mark, token.end_mark)
|
565 |
+
self.state = self.states.pop()
|
566 |
+
self.marks.pop()
|
567 |
+
return event
|
568 |
+
|
569 |
+
def parse_flow_mapping_value(self):
|
570 |
+
if self.check_token(ValueToken):
|
571 |
+
token = self.get_token()
|
572 |
+
if not self.check_token(FlowEntryToken, FlowMappingEndToken):
|
573 |
+
self.states.append(self.parse_flow_mapping_key)
|
574 |
+
return self.parse_flow_node()
|
575 |
+
else:
|
576 |
+
self.state = self.parse_flow_mapping_key
|
577 |
+
return self.process_empty_scalar(token.end_mark)
|
578 |
+
else:
|
579 |
+
self.state = self.parse_flow_mapping_key
|
580 |
+
token = self.peek_token()
|
581 |
+
return self.process_empty_scalar(token.start_mark)
|
582 |
+
|
583 |
+
def parse_flow_mapping_empty_value(self):
|
584 |
+
self.state = self.parse_flow_mapping_key
|
585 |
+
return self.process_empty_scalar(self.peek_token().start_mark)
|
586 |
+
|
587 |
+
def process_empty_scalar(self, mark):
|
588 |
+
return ScalarEvent(None, None, (True, False), '', mark, mark)
|
589 |
+
|
venv/lib/python3.12/site-packages/yaml/resolver.py
ADDED
@@ -0,0 +1,227 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
__all__ = ['BaseResolver', 'Resolver']
|
3 |
+
|
4 |
+
from .error import *
|
5 |
+
from .nodes import *
|
6 |
+
|
7 |
+
import re
|
8 |
+
|
9 |
+
class ResolverError(YAMLError):
|
10 |
+
pass
|
11 |
+
|
12 |
+
class BaseResolver:
|
13 |
+
|
14 |
+
DEFAULT_SCALAR_TAG = 'tag:yaml.org,2002:str'
|
15 |
+
DEFAULT_SEQUENCE_TAG = 'tag:yaml.org,2002:seq'
|
16 |
+
DEFAULT_MAPPING_TAG = 'tag:yaml.org,2002:map'
|
17 |
+
|
18 |
+
yaml_implicit_resolvers = {}
|
19 |
+
yaml_path_resolvers = {}
|
20 |
+
|
21 |
+
def __init__(self):
|
22 |
+
self.resolver_exact_paths = []
|
23 |
+
self.resolver_prefix_paths = []
|
24 |
+
|
25 |
+
@classmethod
|
26 |
+
def add_implicit_resolver(cls, tag, regexp, first):
|
27 |
+
if not 'yaml_implicit_resolvers' in cls.__dict__:
|
28 |
+
implicit_resolvers = {}
|
29 |
+
for key in cls.yaml_implicit_resolvers:
|
30 |
+
implicit_resolvers[key] = cls.yaml_implicit_resolvers[key][:]
|
31 |
+
cls.yaml_implicit_resolvers = implicit_resolvers
|
32 |
+
if first is None:
|
33 |
+
first = [None]
|
34 |
+
for ch in first:
|
35 |
+
cls.yaml_implicit_resolvers.setdefault(ch, []).append((tag, regexp))
|
36 |
+
|
37 |
+
@classmethod
|
38 |
+
def add_path_resolver(cls, tag, path, kind=None):
|
39 |
+
# Note: `add_path_resolver` is experimental. The API could be changed.
|
40 |
+
# `new_path` is a pattern that is matched against the path from the
|
41 |
+
# root to the node that is being considered. `node_path` elements are
|
42 |
+
# tuples `(node_check, index_check)`. `node_check` is a node class:
|
43 |
+
# `ScalarNode`, `SequenceNode`, `MappingNode` or `None`. `None`
|
44 |
+
# matches any kind of a node. `index_check` could be `None`, a boolean
|
45 |
+
# value, a string value, or a number. `None` and `False` match against
|
46 |
+
# any _value_ of sequence and mapping nodes. `True` matches against
|
47 |
+
# any _key_ of a mapping node. A string `index_check` matches against
|
48 |
+
# a mapping value that corresponds to a scalar key which content is
|
49 |
+
# equal to the `index_check` value. An integer `index_check` matches
|
50 |
+
# against a sequence value with the index equal to `index_check`.
|
51 |
+
if not 'yaml_path_resolvers' in cls.__dict__:
|
52 |
+
cls.yaml_path_resolvers = cls.yaml_path_resolvers.copy()
|
53 |
+
new_path = []
|
54 |
+
for element in path:
|
55 |
+
if isinstance(element, (list, tuple)):
|
56 |
+
if len(element) == 2:
|
57 |
+
node_check, index_check = element
|
58 |
+
elif len(element) == 1:
|
59 |
+
node_check = element[0]
|
60 |
+
index_check = True
|
61 |
+
else:
|
62 |
+
raise ResolverError("Invalid path element: %s" % element)
|
63 |
+
else:
|
64 |
+
node_check = None
|
65 |
+
index_check = element
|
66 |
+
if node_check is str:
|
67 |
+
node_check = ScalarNode
|
68 |
+
elif node_check is list:
|
69 |
+
node_check = SequenceNode
|
70 |
+
elif node_check is dict:
|
71 |
+
node_check = MappingNode
|
72 |
+
elif node_check not in [ScalarNode, SequenceNode, MappingNode] \
|
73 |
+
and not isinstance(node_check, str) \
|
74 |
+
and node_check is not None:
|
75 |
+
raise ResolverError("Invalid node checker: %s" % node_check)
|
76 |
+
if not isinstance(index_check, (str, int)) \
|
77 |
+
and index_check is not None:
|
78 |
+
raise ResolverError("Invalid index checker: %s" % index_check)
|
79 |
+
new_path.append((node_check, index_check))
|
80 |
+
if kind is str:
|
81 |
+
kind = ScalarNode
|
82 |
+
elif kind is list:
|
83 |
+
kind = SequenceNode
|
84 |
+
elif kind is dict:
|
85 |
+
kind = MappingNode
|
86 |
+
elif kind not in [ScalarNode, SequenceNode, MappingNode] \
|
87 |
+
and kind is not None:
|
88 |
+
raise ResolverError("Invalid node kind: %s" % kind)
|
89 |
+
cls.yaml_path_resolvers[tuple(new_path), kind] = tag
|
90 |
+
|
91 |
+
def descend_resolver(self, current_node, current_index):
|
92 |
+
if not self.yaml_path_resolvers:
|
93 |
+
return
|
94 |
+
exact_paths = {}
|
95 |
+
prefix_paths = []
|
96 |
+
if current_node:
|
97 |
+
depth = len(self.resolver_prefix_paths)
|
98 |
+
for path, kind in self.resolver_prefix_paths[-1]:
|
99 |
+
if self.check_resolver_prefix(depth, path, kind,
|
100 |
+
current_node, current_index):
|
101 |
+
if len(path) > depth:
|
102 |
+
prefix_paths.append((path, kind))
|
103 |
+
else:
|
104 |
+
exact_paths[kind] = self.yaml_path_resolvers[path, kind]
|
105 |
+
else:
|
106 |
+
for path, kind in self.yaml_path_resolvers:
|
107 |
+
if not path:
|
108 |
+
exact_paths[kind] = self.yaml_path_resolvers[path, kind]
|
109 |
+
else:
|
110 |
+
prefix_paths.append((path, kind))
|
111 |
+
self.resolver_exact_paths.append(exact_paths)
|
112 |
+
self.resolver_prefix_paths.append(prefix_paths)
|
113 |
+
|
114 |
+
def ascend_resolver(self):
|
115 |
+
if not self.yaml_path_resolvers:
|
116 |
+
return
|
117 |
+
self.resolver_exact_paths.pop()
|
118 |
+
self.resolver_prefix_paths.pop()
|
119 |
+
|
120 |
+
def check_resolver_prefix(self, depth, path, kind,
|
121 |
+
current_node, current_index):
|
122 |
+
node_check, index_check = path[depth-1]
|
123 |
+
if isinstance(node_check, str):
|
124 |
+
if current_node.tag != node_check:
|
125 |
+
return
|
126 |
+
elif node_check is not None:
|
127 |
+
if not isinstance(current_node, node_check):
|
128 |
+
return
|
129 |
+
if index_check is True and current_index is not None:
|
130 |
+
return
|
131 |
+
if (index_check is False or index_check is None) \
|
132 |
+
and current_index is None:
|
133 |
+
return
|
134 |
+
if isinstance(index_check, str):
|
135 |
+
if not (isinstance(current_index, ScalarNode)
|
136 |
+
and index_check == current_index.value):
|
137 |
+
return
|
138 |
+
elif isinstance(index_check, int) and not isinstance(index_check, bool):
|
139 |
+
if index_check != current_index:
|
140 |
+
return
|
141 |
+
return True
|
142 |
+
|
143 |
+
def resolve(self, kind, value, implicit):
|
144 |
+
if kind is ScalarNode and implicit[0]:
|
145 |
+
if value == '':
|
146 |
+
resolvers = self.yaml_implicit_resolvers.get('', [])
|
147 |
+
else:
|
148 |
+
resolvers = self.yaml_implicit_resolvers.get(value[0], [])
|
149 |
+
wildcard_resolvers = self.yaml_implicit_resolvers.get(None, [])
|
150 |
+
for tag, regexp in resolvers + wildcard_resolvers:
|
151 |
+
if regexp.match(value):
|
152 |
+
return tag
|
153 |
+
implicit = implicit[1]
|
154 |
+
if self.yaml_path_resolvers:
|
155 |
+
exact_paths = self.resolver_exact_paths[-1]
|
156 |
+
if kind in exact_paths:
|
157 |
+
return exact_paths[kind]
|
158 |
+
if None in exact_paths:
|
159 |
+
return exact_paths[None]
|
160 |
+
if kind is ScalarNode:
|
161 |
+
return self.DEFAULT_SCALAR_TAG
|
162 |
+
elif kind is SequenceNode:
|
163 |
+
return self.DEFAULT_SEQUENCE_TAG
|
164 |
+
elif kind is MappingNode:
|
165 |
+
return self.DEFAULT_MAPPING_TAG
|
166 |
+
|
167 |
+
class Resolver(BaseResolver):
|
168 |
+
pass
|
169 |
+
|
170 |
+
Resolver.add_implicit_resolver(
|
171 |
+
'tag:yaml.org,2002:bool',
|
172 |
+
re.compile(r'''^(?:yes|Yes|YES|no|No|NO
|
173 |
+
|true|True|TRUE|false|False|FALSE
|
174 |
+
|on|On|ON|off|Off|OFF)$''', re.X),
|
175 |
+
list('yYnNtTfFoO'))
|
176 |
+
|
177 |
+
Resolver.add_implicit_resolver(
|
178 |
+
'tag:yaml.org,2002:float',
|
179 |
+
re.compile(r'''^(?:[-+]?(?:[0-9][0-9_]*)\.[0-9_]*(?:[eE][-+][0-9]+)?
|
180 |
+
|\.[0-9][0-9_]*(?:[eE][-+][0-9]+)?
|
181 |
+
|[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\.[0-9_]*
|
182 |
+
|[-+]?\.(?:inf|Inf|INF)
|
183 |
+
|\.(?:nan|NaN|NAN))$''', re.X),
|
184 |
+
list('-+0123456789.'))
|
185 |
+
|
186 |
+
Resolver.add_implicit_resolver(
|
187 |
+
'tag:yaml.org,2002:int',
|
188 |
+
re.compile(r'''^(?:[-+]?0b[0-1_]+
|
189 |
+
|[-+]?0[0-7_]+
|
190 |
+
|[-+]?(?:0|[1-9][0-9_]*)
|
191 |
+
|[-+]?0x[0-9a-fA-F_]+
|
192 |
+
|[-+]?[1-9][0-9_]*(?::[0-5]?[0-9])+)$''', re.X),
|
193 |
+
list('-+0123456789'))
|
194 |
+
|
195 |
+
Resolver.add_implicit_resolver(
|
196 |
+
'tag:yaml.org,2002:merge',
|
197 |
+
re.compile(r'^(?:<<)$'),
|
198 |
+
['<'])
|
199 |
+
|
200 |
+
Resolver.add_implicit_resolver(
|
201 |
+
'tag:yaml.org,2002:null',
|
202 |
+
re.compile(r'''^(?: ~
|
203 |
+
|null|Null|NULL
|
204 |
+
| )$''', re.X),
|
205 |
+
['~', 'n', 'N', ''])
|
206 |
+
|
207 |
+
Resolver.add_implicit_resolver(
|
208 |
+
'tag:yaml.org,2002:timestamp',
|
209 |
+
re.compile(r'''^(?:[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]
|
210 |
+
|[0-9][0-9][0-9][0-9] -[0-9][0-9]? -[0-9][0-9]?
|
211 |
+
(?:[Tt]|[ \t]+)[0-9][0-9]?
|
212 |
+
:[0-9][0-9] :[0-9][0-9] (?:\.[0-9]*)?
|
213 |
+
(?:[ \t]*(?:Z|[-+][0-9][0-9]?(?::[0-9][0-9])?))?)$''', re.X),
|
214 |
+
list('0123456789'))
|
215 |
+
|
216 |
+
Resolver.add_implicit_resolver(
|
217 |
+
'tag:yaml.org,2002:value',
|
218 |
+
re.compile(r'^(?:=)$'),
|
219 |
+
['='])
|
220 |
+
|
221 |
+
# The following resolver is only for documentation purposes. It cannot work
|
222 |
+
# because plain scalars cannot start with '!', '&', or '*'.
|
223 |
+
Resolver.add_implicit_resolver(
|
224 |
+
'tag:yaml.org,2002:yaml',
|
225 |
+
re.compile(r'^(?:!|&|\*)$'),
|
226 |
+
list('!&*'))
|
227 |
+
|
venv/lib/python3.12/site-packages/yaml/serializer.py
ADDED
@@ -0,0 +1,111 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
__all__ = ['Serializer', 'SerializerError']
|
3 |
+
|
4 |
+
from .error import YAMLError
|
5 |
+
from .events import *
|
6 |
+
from .nodes import *
|
7 |
+
|
8 |
+
class SerializerError(YAMLError):
|
9 |
+
pass
|
10 |
+
|
11 |
+
class Serializer:
|
12 |
+
|
13 |
+
ANCHOR_TEMPLATE = 'id%03d'
|
14 |
+
|
15 |
+
def __init__(self, encoding=None,
|
16 |
+
explicit_start=None, explicit_end=None, version=None, tags=None):
|
17 |
+
self.use_encoding = encoding
|
18 |
+
self.use_explicit_start = explicit_start
|
19 |
+
self.use_explicit_end = explicit_end
|
20 |
+
self.use_version = version
|
21 |
+
self.use_tags = tags
|
22 |
+
self.serialized_nodes = {}
|
23 |
+
self.anchors = {}
|
24 |
+
self.last_anchor_id = 0
|
25 |
+
self.closed = None
|
26 |
+
|
27 |
+
def open(self):
|
28 |
+
if self.closed is None:
|
29 |
+
self.emit(StreamStartEvent(encoding=self.use_encoding))
|
30 |
+
self.closed = False
|
31 |
+
elif self.closed:
|
32 |
+
raise SerializerError("serializer is closed")
|
33 |
+
else:
|
34 |
+
raise SerializerError("serializer is already opened")
|
35 |
+
|
36 |
+
def close(self):
|
37 |
+
if self.closed is None:
|
38 |
+
raise SerializerError("serializer is not opened")
|
39 |
+
elif not self.closed:
|
40 |
+
self.emit(StreamEndEvent())
|
41 |
+
self.closed = True
|
42 |
+
|
43 |
+
#def __del__(self):
|
44 |
+
# self.close()
|
45 |
+
|
46 |
+
def serialize(self, node):
|
47 |
+
if self.closed is None:
|
48 |
+
raise SerializerError("serializer is not opened")
|
49 |
+
elif self.closed:
|
50 |
+
raise SerializerError("serializer is closed")
|
51 |
+
self.emit(DocumentStartEvent(explicit=self.use_explicit_start,
|
52 |
+
version=self.use_version, tags=self.use_tags))
|
53 |
+
self.anchor_node(node)
|
54 |
+
self.serialize_node(node, None, None)
|
55 |
+
self.emit(DocumentEndEvent(explicit=self.use_explicit_end))
|
56 |
+
self.serialized_nodes = {}
|
57 |
+
self.anchors = {}
|
58 |
+
self.last_anchor_id = 0
|
59 |
+
|
60 |
+
def anchor_node(self, node):
|
61 |
+
if node in self.anchors:
|
62 |
+
if self.anchors[node] is None:
|
63 |
+
self.anchors[node] = self.generate_anchor(node)
|
64 |
+
else:
|
65 |
+
self.anchors[node] = None
|
66 |
+
if isinstance(node, SequenceNode):
|
67 |
+
for item in node.value:
|
68 |
+
self.anchor_node(item)
|
69 |
+
elif isinstance(node, MappingNode):
|
70 |
+
for key, value in node.value:
|
71 |
+
self.anchor_node(key)
|
72 |
+
self.anchor_node(value)
|
73 |
+
|
74 |
+
def generate_anchor(self, node):
|
75 |
+
self.last_anchor_id += 1
|
76 |
+
return self.ANCHOR_TEMPLATE % self.last_anchor_id
|
77 |
+
|
78 |
+
def serialize_node(self, node, parent, index):
|
79 |
+
alias = self.anchors[node]
|
80 |
+
if node in self.serialized_nodes:
|
81 |
+
self.emit(AliasEvent(alias))
|
82 |
+
else:
|
83 |
+
self.serialized_nodes[node] = True
|
84 |
+
self.descend_resolver(parent, index)
|
85 |
+
if isinstance(node, ScalarNode):
|
86 |
+
detected_tag = self.resolve(ScalarNode, node.value, (True, False))
|
87 |
+
default_tag = self.resolve(ScalarNode, node.value, (False, True))
|
88 |
+
implicit = (node.tag == detected_tag), (node.tag == default_tag)
|
89 |
+
self.emit(ScalarEvent(alias, node.tag, implicit, node.value,
|
90 |
+
style=node.style))
|
91 |
+
elif isinstance(node, SequenceNode):
|
92 |
+
implicit = (node.tag
|
93 |
+
== self.resolve(SequenceNode, node.value, True))
|
94 |
+
self.emit(SequenceStartEvent(alias, node.tag, implicit,
|
95 |
+
flow_style=node.flow_style))
|
96 |
+
index = 0
|
97 |
+
for item in node.value:
|
98 |
+
self.serialize_node(item, node, index)
|
99 |
+
index += 1
|
100 |
+
self.emit(SequenceEndEvent())
|
101 |
+
elif isinstance(node, MappingNode):
|
102 |
+
implicit = (node.tag
|
103 |
+
== self.resolve(MappingNode, node.value, True))
|
104 |
+
self.emit(MappingStartEvent(alias, node.tag, implicit,
|
105 |
+
flow_style=node.flow_style))
|
106 |
+
for key, value in node.value:
|
107 |
+
self.serialize_node(key, node, None)
|
108 |
+
self.serialize_node(value, node, key)
|
109 |
+
self.emit(MappingEndEvent())
|
110 |
+
self.ascend_resolver()
|
111 |
+
|