Mercurial > repos > bcclaywell > argo_navis
comparison venv/lib/python2.7/site-packages/yaml/__init__.py @ 0:d67268158946 draft
planemo upload commit a3f181f5f126803c654b3a66dd4e83a48f7e203b
author | bcclaywell |
---|---|
date | Mon, 12 Oct 2015 17:43:33 -0400 |
parents | |
children |
comparison
equal
deleted
inserted
replaced
-1:000000000000 | 0:d67268158946 |
---|---|
1 | |
2 from error import * | |
3 | |
4 from tokens import * | |
5 from events import * | |
6 from nodes import * | |
7 | |
8 from loader import * | |
9 from dumper import * | |
10 | |
11 __version__ = '3.11' | |
12 | |
13 try: | |
14 from cyaml import * | |
15 __with_libyaml__ = True | |
16 except ImportError: | |
17 __with_libyaml__ = False | |
18 | |
19 def scan(stream, Loader=Loader): | |
20 """ | |
21 Scan a YAML stream and produce scanning tokens. | |
22 """ | |
23 loader = Loader(stream) | |
24 try: | |
25 while loader.check_token(): | |
26 yield loader.get_token() | |
27 finally: | |
28 loader.dispose() | |
29 | |
30 def parse(stream, Loader=Loader): | |
31 """ | |
32 Parse a YAML stream and produce parsing events. | |
33 """ | |
34 loader = Loader(stream) | |
35 try: | |
36 while loader.check_event(): | |
37 yield loader.get_event() | |
38 finally: | |
39 loader.dispose() | |
40 | |
41 def compose(stream, Loader=Loader): | |
42 """ | |
43 Parse the first YAML document in a stream | |
44 and produce the corresponding representation tree. | |
45 """ | |
46 loader = Loader(stream) | |
47 try: | |
48 return loader.get_single_node() | |
49 finally: | |
50 loader.dispose() | |
51 | |
52 def compose_all(stream, Loader=Loader): | |
53 """ | |
54 Parse all YAML documents in a stream | |
55 and produce corresponding representation trees. | |
56 """ | |
57 loader = Loader(stream) | |
58 try: | |
59 while loader.check_node(): | |
60 yield loader.get_node() | |
61 finally: | |
62 loader.dispose() | |
63 | |
64 def load(stream, Loader=Loader): | |
65 """ | |
66 Parse the first YAML document in a stream | |
67 and produce the corresponding Python object. | |
68 """ | |
69 loader = Loader(stream) | |
70 try: | |
71 return loader.get_single_data() | |
72 finally: | |
73 loader.dispose() | |
74 | |
75 def load_all(stream, Loader=Loader): | |
76 """ | |
77 Parse all YAML documents in a stream | |
78 and produce corresponding Python objects. | |
79 """ | |
80 loader = Loader(stream) | |
81 try: | |
82 while loader.check_data(): | |
83 yield loader.get_data() | |
84 finally: | |
85 loader.dispose() | |
86 | |
87 def safe_load(stream): | |
88 """ | |
89 Parse the first YAML document in a stream | |
90 and produce the corresponding Python object. | |
91 Resolve only basic YAML tags. | |
92 """ | |
93 return load(stream, SafeLoader) | |
94 | |
95 def safe_load_all(stream): | |
96 """ | |
97 Parse all YAML documents in a stream | |
98 and produce corresponding Python objects. | |
99 Resolve only basic YAML tags. | |
100 """ | |
101 return load_all(stream, SafeLoader) | |
102 | |
103 def emit(events, stream=None, Dumper=Dumper, | |
104 canonical=None, indent=None, width=None, | |
105 allow_unicode=None, line_break=None): | |
106 """ | |
107 Emit YAML parsing events into a stream. | |
108 If stream is None, return the produced string instead. | |
109 """ | |
110 getvalue = None | |
111 if stream is None: | |
112 from StringIO import StringIO | |
113 stream = StringIO() | |
114 getvalue = stream.getvalue | |
115 dumper = Dumper(stream, canonical=canonical, indent=indent, width=width, | |
116 allow_unicode=allow_unicode, line_break=line_break) | |
117 try: | |
118 for event in events: | |
119 dumper.emit(event) | |
120 finally: | |
121 dumper.dispose() | |
122 if getvalue: | |
123 return getvalue() | |
124 | |
125 def serialize_all(nodes, stream=None, Dumper=Dumper, | |
126 canonical=None, indent=None, width=None, | |
127 allow_unicode=None, line_break=None, | |
128 encoding='utf-8', explicit_start=None, explicit_end=None, | |
129 version=None, tags=None): | |
130 """ | |
131 Serialize a sequence of representation trees into a YAML stream. | |
132 If stream is None, return the produced string instead. | |
133 """ | |
134 getvalue = None | |
135 if stream is None: | |
136 if encoding is None: | |
137 from StringIO import StringIO | |
138 else: | |
139 from cStringIO import StringIO | |
140 stream = StringIO() | |
141 getvalue = stream.getvalue | |
142 dumper = Dumper(stream, canonical=canonical, indent=indent, width=width, | |
143 allow_unicode=allow_unicode, line_break=line_break, | |
144 encoding=encoding, version=version, tags=tags, | |
145 explicit_start=explicit_start, explicit_end=explicit_end) | |
146 try: | |
147 dumper.open() | |
148 for node in nodes: | |
149 dumper.serialize(node) | |
150 dumper.close() | |
151 finally: | |
152 dumper.dispose() | |
153 if getvalue: | |
154 return getvalue() | |
155 | |
156 def serialize(node, stream=None, Dumper=Dumper, **kwds): | |
157 """ | |
158 Serialize a representation tree into a YAML stream. | |
159 If stream is None, return the produced string instead. | |
160 """ | |
161 return serialize_all([node], stream, Dumper=Dumper, **kwds) | |
162 | |
163 def dump_all(documents, stream=None, Dumper=Dumper, | |
164 default_style=None, default_flow_style=None, | |
165 canonical=None, indent=None, width=None, | |
166 allow_unicode=None, line_break=None, | |
167 encoding='utf-8', explicit_start=None, explicit_end=None, | |
168 version=None, tags=None): | |
169 """ | |
170 Serialize a sequence of Python objects into a YAML stream. | |
171 If stream is None, return the produced string instead. | |
172 """ | |
173 getvalue = None | |
174 if stream is None: | |
175 if encoding is None: | |
176 from StringIO import StringIO | |
177 else: | |
178 from cStringIO import StringIO | |
179 stream = StringIO() | |
180 getvalue = stream.getvalue | |
181 dumper = Dumper(stream, default_style=default_style, | |
182 default_flow_style=default_flow_style, | |
183 canonical=canonical, indent=indent, width=width, | |
184 allow_unicode=allow_unicode, line_break=line_break, | |
185 encoding=encoding, version=version, tags=tags, | |
186 explicit_start=explicit_start, explicit_end=explicit_end) | |
187 try: | |
188 dumper.open() | |
189 for data in documents: | |
190 dumper.represent(data) | |
191 dumper.close() | |
192 finally: | |
193 dumper.dispose() | |
194 if getvalue: | |
195 return getvalue() | |
196 | |
197 def dump(data, stream=None, Dumper=Dumper, **kwds): | |
198 """ | |
199 Serialize a Python object into a YAML stream. | |
200 If stream is None, return the produced string instead. | |
201 """ | |
202 return dump_all([data], stream, Dumper=Dumper, **kwds) | |
203 | |
204 def safe_dump_all(documents, stream=None, **kwds): | |
205 """ | |
206 Serialize a sequence of Python objects into a YAML stream. | |
207 Produce only basic YAML tags. | |
208 If stream is None, return the produced string instead. | |
209 """ | |
210 return dump_all(documents, stream, Dumper=SafeDumper, **kwds) | |
211 | |
212 def safe_dump(data, stream=None, **kwds): | |
213 """ | |
214 Serialize a Python object into a YAML stream. | |
215 Produce only basic YAML tags. | |
216 If stream is None, return the produced string instead. | |
217 """ | |
218 return dump_all([data], stream, Dumper=SafeDumper, **kwds) | |
219 | |
220 def add_implicit_resolver(tag, regexp, first=None, | |
221 Loader=Loader, Dumper=Dumper): | |
222 """ | |
223 Add an implicit scalar detector. | |
224 If an implicit scalar value matches the given regexp, | |
225 the corresponding tag is assigned to the scalar. | |
226 first is a sequence of possible initial characters or None. | |
227 """ | |
228 Loader.add_implicit_resolver(tag, regexp, first) | |
229 Dumper.add_implicit_resolver(tag, regexp, first) | |
230 | |
231 def add_path_resolver(tag, path, kind=None, Loader=Loader, Dumper=Dumper): | |
232 """ | |
233 Add a path based resolver for the given tag. | |
234 A path is a list of keys that forms a path | |
235 to a node in the representation tree. | |
236 Keys can be string values, integers, or None. | |
237 """ | |
238 Loader.add_path_resolver(tag, path, kind) | |
239 Dumper.add_path_resolver(tag, path, kind) | |
240 | |
241 def add_constructor(tag, constructor, Loader=Loader): | |
242 """ | |
243 Add a constructor for the given tag. | |
244 Constructor is a function that accepts a Loader instance | |
245 and a node object and produces the corresponding Python object. | |
246 """ | |
247 Loader.add_constructor(tag, constructor) | |
248 | |
249 def add_multi_constructor(tag_prefix, multi_constructor, Loader=Loader): | |
250 """ | |
251 Add a multi-constructor for the given tag prefix. | |
252 Multi-constructor is called for a node if its tag starts with tag_prefix. | |
253 Multi-constructor accepts a Loader instance, a tag suffix, | |
254 and a node object and produces the corresponding Python object. | |
255 """ | |
256 Loader.add_multi_constructor(tag_prefix, multi_constructor) | |
257 | |
258 def add_representer(data_type, representer, Dumper=Dumper): | |
259 """ | |
260 Add a representer for the given type. | |
261 Representer is a function accepting a Dumper instance | |
262 and an instance of the given data type | |
263 and producing the corresponding representation node. | |
264 """ | |
265 Dumper.add_representer(data_type, representer) | |
266 | |
267 def add_multi_representer(data_type, multi_representer, Dumper=Dumper): | |
268 """ | |
269 Add a representer for the given type. | |
270 Multi-representer is a function accepting a Dumper instance | |
271 and an instance of the given data type or subtype | |
272 and producing the corresponding representation node. | |
273 """ | |
274 Dumper.add_multi_representer(data_type, multi_representer) | |
275 | |
276 class YAMLObjectMetaclass(type): | |
277 """ | |
278 The metaclass for YAMLObject. | |
279 """ | |
280 def __init__(cls, name, bases, kwds): | |
281 super(YAMLObjectMetaclass, cls).__init__(name, bases, kwds) | |
282 if 'yaml_tag' in kwds and kwds['yaml_tag'] is not None: | |
283 cls.yaml_loader.add_constructor(cls.yaml_tag, cls.from_yaml) | |
284 cls.yaml_dumper.add_representer(cls, cls.to_yaml) | |
285 | |
286 class YAMLObject(object): | |
287 """ | |
288 An object that can dump itself to a YAML stream | |
289 and load itself from a YAML stream. | |
290 """ | |
291 | |
292 __metaclass__ = YAMLObjectMetaclass | |
293 __slots__ = () # no direct instantiation, so allow immutable subclasses | |
294 | |
295 yaml_loader = Loader | |
296 yaml_dumper = Dumper | |
297 | |
298 yaml_tag = None | |
299 yaml_flow_style = None | |
300 | |
301 def from_yaml(cls, loader, node): | |
302 """ | |
303 Convert a representation node to a Python object. | |
304 """ | |
305 return loader.construct_yaml_object(node, cls) | |
306 from_yaml = classmethod(from_yaml) | |
307 | |
308 def to_yaml(cls, dumper, data): | |
309 """ | |
310 Convert a Python object to a representation node. | |
311 """ | |
312 return dumper.represent_yaml_object(cls.yaml_tag, data, cls, | |
313 flow_style=cls.yaml_flow_style) | |
314 to_yaml = classmethod(to_yaml) | |
315 |