|
@@ -0,0 +1,2484 @@
|
|
1
|
+# configobj.py
|
|
2
|
+# A config file reader/writer that supports nested sections in config files.
|
|
3
|
+# Copyright (C) 2005-2014:
|
|
4
|
+# (name) : (email)
|
|
5
|
+# Michael Foord: fuzzyman AT voidspace DOT org DOT uk
|
|
6
|
+# Nicola Larosa: nico AT tekNico DOT net
|
|
7
|
+# Rob Dennis: rdennis AT gmail DOT com
|
|
8
|
+# Eli Courtwright: eli AT courtwright DOT org
|
|
9
|
+
|
|
10
|
+# This software is licensed under the terms of the BSD license.
|
|
11
|
+# http://opensource.org/licenses/BSD-3-Clause
|
|
12
|
+
|
|
13
|
+# ConfigObj 5 - main repository for documentation and issue tracking:
|
|
14
|
+# https://github.com/DiffSK/configobj
|
|
15
|
+
|
|
16
|
+import os
|
|
17
|
+import re
|
|
18
|
+import sys
|
|
19
|
+
|
|
20
|
+from codecs import BOM_UTF8, BOM_UTF16, BOM_UTF16_BE, BOM_UTF16_LE
|
|
21
|
+
|
|
22
|
+#from six import six
|
|
23
|
+import six
|
|
24
|
+#from _version import __version__
|
|
25
|
+
|
|
26
|
+# imported lazily to avoid startup performance hit if it isn't used
|
|
27
|
+compiler = None
|
|
28
|
+
|
|
29
|
+# A dictionary mapping BOM to
|
|
30
|
+# the encoding to decode with, and what to set the
|
|
31
|
+# encoding attribute to.
|
|
32
|
+BOMS = {
|
|
33
|
+ BOM_UTF8: ('utf_8', None),
|
|
34
|
+ BOM_UTF16_BE: ('utf16_be', 'utf_16'),
|
|
35
|
+ BOM_UTF16_LE: ('utf16_le', 'utf_16'),
|
|
36
|
+ BOM_UTF16: ('utf_16', 'utf_16'),
|
|
37
|
+ }
|
|
38
|
+# All legal variants of the BOM codecs.
|
|
39
|
+# TODO: the list of aliases is not meant to be exhaustive, is there a
|
|
40
|
+# better way ?
|
|
41
|
+BOM_LIST = {
|
|
42
|
+ 'utf_16': 'utf_16',
|
|
43
|
+ 'u16': 'utf_16',
|
|
44
|
+ 'utf16': 'utf_16',
|
|
45
|
+ 'utf-16': 'utf_16',
|
|
46
|
+ 'utf16_be': 'utf16_be',
|
|
47
|
+ 'utf_16_be': 'utf16_be',
|
|
48
|
+ 'utf-16be': 'utf16_be',
|
|
49
|
+ 'utf16_le': 'utf16_le',
|
|
50
|
+ 'utf_16_le': 'utf16_le',
|
|
51
|
+ 'utf-16le': 'utf16_le',
|
|
52
|
+ 'utf_8': 'utf_8',
|
|
53
|
+ 'u8': 'utf_8',
|
|
54
|
+ 'utf': 'utf_8',
|
|
55
|
+ 'utf8': 'utf_8',
|
|
56
|
+ 'utf-8': 'utf_8',
|
|
57
|
+ }
|
|
58
|
+
|
|
59
|
+# Map of encodings to the BOM to write.
|
|
60
|
+BOM_SET = {
|
|
61
|
+ 'utf_8': BOM_UTF8,
|
|
62
|
+ 'utf_16': BOM_UTF16,
|
|
63
|
+ 'utf16_be': BOM_UTF16_BE,
|
|
64
|
+ 'utf16_le': BOM_UTF16_LE,
|
|
65
|
+ None: BOM_UTF8
|
|
66
|
+ }
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+def match_utf8(encoding):
|
|
70
|
+ return BOM_LIST.get(encoding.lower()) == 'utf_8'
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+# Quote strings used for writing values
|
|
74
|
+squot = "'%s'"
|
|
75
|
+dquot = '"%s"'
|
|
76
|
+noquot = "%s"
|
|
77
|
+wspace_plus = ' \r\n\v\t\'"'
|
|
78
|
+tsquot = '"""%s"""'
|
|
79
|
+tdquot = "'''%s'''"
|
|
80
|
+
|
|
81
|
+# Sentinel for use in getattr calls to replace hasattr
|
|
82
|
+MISSING = object()
|
|
83
|
+
|
|
84
|
+__all__ = (
|
|
85
|
+ 'DEFAULT_INDENT_TYPE',
|
|
86
|
+ 'DEFAULT_INTERPOLATION',
|
|
87
|
+ 'ConfigObjError',
|
|
88
|
+ 'NestingError',
|
|
89
|
+ 'ParseError',
|
|
90
|
+ 'DuplicateError',
|
|
91
|
+ 'ConfigspecError',
|
|
92
|
+ 'ConfigObj',
|
|
93
|
+ 'SimpleVal',
|
|
94
|
+ 'InterpolationError',
|
|
95
|
+ 'InterpolationLoopError',
|
|
96
|
+ 'MissingInterpolationOption',
|
|
97
|
+ 'RepeatSectionError',
|
|
98
|
+ 'ReloadError',
|
|
99
|
+ 'UnreprError',
|
|
100
|
+ 'UnknownType',
|
|
101
|
+ 'flatten_errors',
|
|
102
|
+ 'get_extra_values'
|
|
103
|
+)
|
|
104
|
+
|
|
105
|
+DEFAULT_INTERPOLATION = 'configparser'
|
|
106
|
+DEFAULT_INDENT_TYPE = ' '
|
|
107
|
+MAX_INTERPOL_DEPTH = 10
|
|
108
|
+
|
|
109
|
+OPTION_DEFAULTS = {
|
|
110
|
+ 'interpolation': True,
|
|
111
|
+ 'raise_errors': False,
|
|
112
|
+ 'list_values': True,
|
|
113
|
+ 'create_empty': False,
|
|
114
|
+ 'file_error': False,
|
|
115
|
+ 'configspec': None,
|
|
116
|
+ 'stringify': True,
|
|
117
|
+ # option may be set to one of ('', ' ', '\t')
|
|
118
|
+ 'indent_type': None,
|
|
119
|
+ 'encoding': None,
|
|
120
|
+ 'default_encoding': None,
|
|
121
|
+ 'unrepr': False,
|
|
122
|
+ 'write_empty_values': False,
|
|
123
|
+}
|
|
124
|
+
|
|
125
|
+# this could be replaced if six is used for compatibility, or there are no
|
|
126
|
+# more assertions about items being a string
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+def getObj(s):
|
|
130
|
+ global compiler
|
|
131
|
+ if compiler is None:
|
|
132
|
+ import compiler
|
|
133
|
+ s = "a=" + s
|
|
134
|
+ p = compiler.parse(s)
|
|
135
|
+ return p.getChildren()[1].getChildren()[0].getChildren()[1]
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+class UnknownType(Exception):
|
|
139
|
+ pass
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+class Builder(object):
|
|
143
|
+
|
|
144
|
+ def build(self, o):
|
|
145
|
+ if m is None:
|
|
146
|
+ raise UnknownType(o.__class__.__name__)
|
|
147
|
+ return m(o)
|
|
148
|
+
|
|
149
|
+ def build_List(self, o):
|
|
150
|
+ return list(map(self.build, o.getChildren()))
|
|
151
|
+
|
|
152
|
+ def build_Const(self, o):
|
|
153
|
+ return o.value
|
|
154
|
+
|
|
155
|
+ def build_Dict(self, o):
|
|
156
|
+ d = {}
|
|
157
|
+ i = iter(map(self.build, o.getChildren()))
|
|
158
|
+ for el in i:
|
|
159
|
+ d[el] = next(i)
|
|
160
|
+ return d
|
|
161
|
+
|
|
162
|
+ def build_Tuple(self, o):
|
|
163
|
+ return tuple(self.build_List(o))
|
|
164
|
+
|
|
165
|
+ def build_Name(self, o):
|
|
166
|
+ if o.name == 'None':
|
|
167
|
+ return None
|
|
168
|
+ if o.name == 'True':
|
|
169
|
+ return True
|
|
170
|
+ if o.name == 'False':
|
|
171
|
+ return False
|
|
172
|
+
|
|
173
|
+ # An undefined Name
|
|
174
|
+ raise UnknownType('Undefined Name')
|
|
175
|
+
|
|
176
|
+ def build_Add(self, o):
|
|
177
|
+ real, imag = list(map(self.build_Const, o.getChildren()))
|
|
178
|
+ try:
|
|
179
|
+ real = float(real)
|
|
180
|
+ except TypeError:
|
|
181
|
+ raise UnknownType('Add')
|
|
182
|
+ if not isinstance(imag, complex) or imag.real != 0.0:
|
|
183
|
+ raise UnknownType('Add')
|
|
184
|
+ return real+imag
|
|
185
|
+
|
|
186
|
+ def build_Getattr(self, o):
|
|
187
|
+ parent = self.build(o.expr)
|
|
188
|
+ return getattr(parent, o.attrname)
|
|
189
|
+
|
|
190
|
+ def build_UnarySub(self, o):
|
|
191
|
+ return -self.build_Const(o.getChildren()[0])
|
|
192
|
+
|
|
193
|
+ def build_UnaryAdd(self, o):
|
|
194
|
+ return self.build_Const(o.getChildren()[0])
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+_builder = Builder()
|
|
198
|
+
|
|
199
|
+
|
|
200
|
+def unrepr(s):
|
|
201
|
+ if not s:
|
|
202
|
+ return s
|
|
203
|
+
|
|
204
|
+ # this is supposed to be safe
|
|
205
|
+ import ast
|
|
206
|
+ return ast.literal_eval(s)
|
|
207
|
+
|
|
208
|
+
|
|
209
|
+class ConfigObjError(SyntaxError):
|
|
210
|
+ """
|
|
211
|
+ This is the base class for all errors that ConfigObj raises.
|
|
212
|
+ It is a subclass of SyntaxError.
|
|
213
|
+ """
|
|
214
|
+ def __init__(self, message='', line_number=None, line=''):
|
|
215
|
+ self.line = line
|
|
216
|
+ self.line_number = line_number
|
|
217
|
+ SyntaxError.__init__(self, message)
|
|
218
|
+
|
|
219
|
+
|
|
220
|
+class NestingError(ConfigObjError):
|
|
221
|
+ """
|
|
222
|
+ This error indicates a level of nesting that doesn't match.
|
|
223
|
+ """
|
|
224
|
+
|
|
225
|
+
|
|
226
|
+class ParseError(ConfigObjError):
|
|
227
|
+ """
|
|
228
|
+ This error indicates that a line is badly written.
|
|
229
|
+ It is neither a valid ``key = value`` line,
|
|
230
|
+ nor a valid section marker line.
|
|
231
|
+ """
|
|
232
|
+
|
|
233
|
+
|
|
234
|
+class ReloadError(IOError):
|
|
235
|
+ """
|
|
236
|
+ A 'reload' operation failed.
|
|
237
|
+ This exception is a subclass of ``IOError``.
|
|
238
|
+ """
|
|
239
|
+ def __init__(self):
|
|
240
|
+ IOError.__init__(self, 'reload failed, filename is not set.')
|
|
241
|
+
|
|
242
|
+
|
|
243
|
+class DuplicateError(ConfigObjError):
|
|
244
|
+ """
|
|
245
|
+ The keyword or section specified already exists.
|
|
246
|
+ """
|
|
247
|
+
|
|
248
|
+
|
|
249
|
+class ConfigspecError(ConfigObjError):
|
|
250
|
+ """
|
|
251
|
+ An error occured whilst parsing a configspec.
|
|
252
|
+ """
|
|
253
|
+
|
|
254
|
+
|
|
255
|
+class InterpolationError(ConfigObjError):
|
|
256
|
+ """Base class for the two interpolation errors."""
|
|
257
|
+
|
|
258
|
+
|
|
259
|
+class InterpolationLoopError(InterpolationError):
|
|
260
|
+ """Maximum interpolation depth exceeded in string interpolation."""
|
|
261
|
+
|
|
262
|
+ def __init__(self, option):
|
|
263
|
+ InterpolationError.__init__(
|
|
264
|
+ self,
|
|
265
|
+ 'interpolation loop detected in value "%s".' % option)
|
|
266
|
+
|
|
267
|
+
|
|
268
|
+class RepeatSectionError(ConfigObjError):
|
|
269
|
+ """
|
|
270
|
+ This error indicates additional sections in a section with a
|
|
271
|
+ ``__many__`` (repeated) section.
|
|
272
|
+ """
|
|
273
|
+
|
|
274
|
+
|
|
275
|
+class MissingInterpolationOption(InterpolationError):
|
|
276
|
+ """A value specified for interpolation was missing."""
|
|
277
|
+ def __init__(self, option):
|
|
278
|
+ msg = 'missing option "%s" in interpolation.' % option
|
|
279
|
+ InterpolationError.__init__(self, msg)
|
|
280
|
+
|
|
281
|
+
|
|
282
|
+class UnreprError(ConfigObjError):
|
|
283
|
+ """An error parsing in unrepr mode."""
|
|
284
|
+
|
|
285
|
+
|
|
286
|
+
|
|
287
|
+class InterpolationEngine(object):
|
|
288
|
+ """
|
|
289
|
+ A helper class to help perform string interpolation.
|
|
290
|
+
|
|
291
|
+ This class is an abstract base class; its descendants perform
|
|
292
|
+ the actual work.
|
|
293
|
+ """
|
|
294
|
+
|
|
295
|
+ # compiled regexp to use in self.interpolate()
|
|
296
|
+ _KEYCRE = re.compile(r"%\(([^)]*)\)s")
|
|
297
|
+ _cookie = '%'
|
|
298
|
+
|
|
299
|
+ def __init__(self, section):
|
|
300
|
+ # the Section instance that "owns" this engine
|
|
301
|
+ self.section = section
|
|
302
|
+
|
|
303
|
+
|
|
304
|
+ def interpolate(self, key, value):
|
|
305
|
+ # short-cut
|
|
306
|
+ if not self._cookie in value:
|
|
307
|
+ return value
|
|
308
|
+
|
|
309
|
+ def recursive_interpolate(key, value, section, backtrail):
|
|
310
|
+ """The function that does the actual work.
|
|
311
|
+
|
|
312
|
+ ``value``: the string we're trying to interpolate.
|
|
313
|
+ ``section``: the section in which that string was found
|
|
314
|
+ ``backtrail``: a dict to keep track of where we've been,
|
|
315
|
+ to detect and prevent infinite recursion loops
|
|
316
|
+
|
|
317
|
+ This is similar to a depth-first-search algorithm.
|
|
318
|
+ """
|
|
319
|
+ # Have we been here already?
|
|
320
|
+ if (key, section.name) in backtrail:
|
|
321
|
+ # Yes - infinite loop detected
|
|
322
|
+ raise InterpolationLoopError(key)
|
|
323
|
+ # Place a marker on our backtrail so we won't come back here again
|
|
324
|
+ backtrail[(key, section.name)] = 1
|
|
325
|
+
|
|
326
|
+ # Now start the actual work
|
|
327
|
+ match = self._KEYCRE.search(value)
|
|
328
|
+ while match:
|
|
329
|
+ # The actual parsing of the match is implementation-dependent,
|
|
330
|
+ # so delegate to our helper function
|
|
331
|
+ k, v, s = self._parse_match(match)
|
|
332
|
+ if k is None:
|
|
333
|
+ # That's the signal that no further interpolation is needed
|
|
334
|
+ replacement = v
|
|
335
|
+ else:
|
|
336
|
+ # Further interpolation may be needed to obtain final value
|
|
337
|
+ replacement = recursive_interpolate(k, v, s, backtrail)
|
|
338
|
+ # Replace the matched string with its final value
|
|
339
|
+ start, end = match.span()
|
|
340
|
+ value = ''.join((value[:start], replacement, value[end:]))
|
|
341
|
+ new_search_start = start + len(replacement)
|
|
342
|
+ # Pick up the next interpolation key, if any, for next time
|
|
343
|
+ # through the while loop
|
|
344
|
+ match = self._KEYCRE.search(value, new_search_start)
|
|
345
|
+
|
|
346
|
+ # Now safe to come back here again; remove marker from backtrail
|
|
347
|
+ del backtrail[(key, section.name)]
|
|
348
|
+
|
|
349
|
+ return value
|
|
350
|
+
|
|
351
|
+ # Back in interpolate(), all we have to do is kick off the recursive
|
|
352
|
+ # function with appropriate starting values
|
|
353
|
+ value = recursive_interpolate(key, value, self.section, {})
|
|
354
|
+ return value
|
|
355
|
+
|
|
356
|
+
|
|
357
|
+ def _fetch(self, key):
|
|
358
|
+ """Helper function to fetch values from owning section.
|
|
359
|
+
|
|
360
|
+ Returns a 2-tuple: the value, and the section where it was found.
|
|
361
|
+ """
|
|
362
|
+ # switch off interpolation before we try and fetch anything !
|
|
363
|
+ save_interp = self.section.main.interpolation
|
|
364
|
+ self.section.main.interpolation = False
|
|
365
|
+
|
|
366
|
+ # Start at section that "owns" this InterpolationEngine
|
|
367
|
+ current_section = self.section
|
|
368
|
+ while True:
|
|
369
|
+ # try the current section first
|
|
370
|
+ val = current_section.get(key)
|
|
371
|
+ if val is not None and not isinstance(val, Section):
|
|
372
|
+ break
|
|
373
|
+ # try "DEFAULT" next
|
|
374
|
+ val = current_section.get('DEFAULT', {}).get(key)
|
|
375
|
+ if val is not None and not isinstance(val, Section):
|
|
376
|
+ break
|
|
377
|
+ # move up to parent and try again
|
|
378
|
+ # top-level's parent is itself
|
|
379
|
+ if current_section.parent is current_section:
|
|
380
|
+ # reached top level, time to give up
|
|
381
|
+ break
|
|
382
|
+ current_section = current_section.parent
|
|
383
|
+
|
|
384
|
+ # restore interpolation to previous value before returning
|
|
385
|
+ self.section.main.interpolation = save_interp
|
|
386
|
+ if val is None:
|
|
387
|
+ raise MissingInterpolationOption(key)
|
|
388
|
+ return val, current_section
|
|
389
|
+
|
|
390
|
+
|
|
391
|
+ def _parse_match(self, match):
|
|
392
|
+ """Implementation-dependent helper function.
|
|
393
|
+
|
|
394
|
+ Will be passed a match object corresponding to the interpolation
|
|
395
|
+ key we just found (e.g., "%(foo)s" or "$foo"). Should look up that
|
|
396
|
+ key in the appropriate config file section (using the ``_fetch()``
|
|
397
|
+ helper function) and return a 3-tuple: (key, value, section)
|
|
398
|
+
|
|
399
|
+ ``key`` is the name of the key we're looking for
|
|
400
|
+ ``value`` is the value found for that key
|
|
401
|
+ ``section`` is a reference to the section where it was found
|
|
402
|
+
|
|
403
|
+ ``key`` and ``section`` should be None if no further
|
|
404
|
+ interpolation should be performed on the resulting value
|
|
405
|
+ (e.g., if we interpolated "$$" and returned "$").
|
|
406
|
+ """
|
|
407
|
+ raise NotImplementedError()
|
|
408
|
+
|
|
409
|
+
|
|
410
|
+
|
|
411
|
+class ConfigParserInterpolation(InterpolationEngine):
|
|
412
|
+ """Behaves like ConfigParser."""
|
|
413
|
+ _cookie = '%'
|
|
414
|
+ _KEYCRE = re.compile(r"%\(([^)]*)\)s")
|
|
415
|
+
|
|
416
|
+ def _parse_match(self, match):
|
|
417
|
+ key = match.group(1)
|
|
418
|
+ value, section = self._fetch(key)
|
|
419
|
+ return key, value, section
|
|
420
|
+
|
|
421
|
+
|
|
422
|
+
|
|
423
|
+class TemplateInterpolation(InterpolationEngine):
|
|
424
|
+ """Behaves like string.Template."""
|
|
425
|
+ _cookie = '$'
|
|
426
|
+ _delimiter = '$'
|
|
427
|
+ _KEYCRE = re.compile(r"""
|
|
428
|
+ \$(?:
|
|
429
|
+ (?P<escaped>\$) | # Two $ signs
|
|
430
|
+ (?P<named>[_a-z][_a-z0-9]*) | # $name format
|
|
431
|
+ {(?P<braced>[^}]*)} # ${name} format
|
|
432
|
+ )
|
|
433
|
+ """, re.IGNORECASE | re.VERBOSE)
|
|
434
|
+
|
|
435
|
+ def _parse_match(self, match):
|
|
436
|
+ # Valid name (in or out of braces): fetch value from section
|
|
437
|
+ key = match.group('named') or match.group('braced')
|
|
438
|
+ if key is not None:
|
|
439
|
+ value, section = self._fetch(key)
|
|
440
|
+ return key, value, section
|
|
441
|
+ # Escaped delimiter (e.g., $$): return single delimiter
|
|
442
|
+ if match.group('escaped') is not None:
|
|
443
|
+ # Return None for key and section to indicate it's time to stop
|
|
444
|
+ return None, self._delimiter, None
|
|
445
|
+ # Anything else: ignore completely, just return it unchanged
|
|
446
|
+ return None, match.group(), None
|
|
447
|
+
|
|
448
|
+
|
|
449
|
+interpolation_engines = {
|
|
450
|
+ 'configparser': ConfigParserInterpolation,
|
|
451
|
+ 'template': TemplateInterpolation,
|
|
452
|
+}
|
|
453
|
+
|
|
454
|
+
|
|
455
|
+def __newobj__(cls, *args):
|
|
456
|
+ # Hack for pickle
|
|
457
|
+ return cls.__new__(cls, *args)
|
|
458
|
+
|
|
459
|
+class Section(dict):
|
|
460
|
+ """
|
|
461
|
+ A dictionary-like object that represents a section in a config file.
|
|
462
|
+
|
|
463
|
+ It does string interpolation if the 'interpolation' attribute
|
|
464
|
+ of the 'main' object is set to True.
|
|
465
|
+
|
|
466
|
+ Interpolation is tried first from this object, then from the 'DEFAULT'
|
|
467
|
+ section of this object, next from the parent and its 'DEFAULT' section,
|
|
468
|
+ and so on until the main object is reached.
|
|
469
|
+
|
|
470
|
+ A Section will behave like an ordered dictionary - following the
|
|
471
|
+ order of the ``scalars`` and ``sections`` attributes.
|
|
472
|
+ You can use this to change the order of members.
|
|
473
|
+
|
|
474
|
+ Iteration follows the order: scalars, then sections.
|
|
475
|
+ """
|
|
476
|
+
|
|
477
|
+
|
|
478
|
+ def __setstate__(self, state):
|
|
479
|
+ dict.update(self, state[0])
|
|
480
|
+ self.__dict__.update(state[1])
|
|
481
|
+
|
|
482
|
+ def __reduce__(self):
|
|
483
|
+ state = (dict(self), self.__dict__)
|
|
484
|
+ return (__newobj__, (self.__class__,), state)
|
|
485
|
+
|
|
486
|
+
|
|
487
|
+ def __init__(self, parent, depth, main, indict=None, name=None):
|
|
488
|
+ """
|
|
489
|
+ * parent is the section above
|
|
490
|
+ * depth is the depth level of this section
|
|
491
|
+ * main is the main ConfigObj
|
|
492
|
+ * indict is a dictionary to initialise the section with
|
|
493
|
+ """
|
|
494
|
+ if indict is None:
|
|
495
|
+ indict = {}
|
|
496
|
+ dict.__init__(self)
|
|
497
|
+ # used for nesting level *and* interpolation
|
|
498
|
+ self.parent = parent
|
|
499
|
+ # used for the interpolation attribute
|
|
500
|
+ self.main = main
|
|
501
|
+ # level of nesting depth of this Section
|
|
502
|
+ self.depth = depth
|
|
503
|
+ # purely for information
|
|
504
|
+ self.name = name
|
|
505
|
+ #
|
|
506
|
+ self._initialise()
|
|
507
|
+ # we do this explicitly so that __setitem__ is used properly
|
|
508
|
+ # (rather than just passing to ``dict.__init__``)
|
|
509
|
+ for entry, value in indict.items():
|
|
510
|
+ self[entry] = value
|
|
511
|
+
|
|
512
|
+
|
|
513
|
+ def _initialise(self):
|
|
514
|
+ # the sequence of scalar values in this Section
|
|
515
|
+ self.scalars = []
|
|
516
|
+ # the sequence of sections in this Section
|
|
517
|
+ self.sections = []
|
|
518
|
+ # for comments :-)
|
|
519
|
+ self.comments = {}
|
|
520
|
+ self.inline_comments = {}
|
|
521
|
+ # the configspec
|
|
522
|
+ self.configspec = None
|
|
523
|
+ # for defaults
|
|
524
|
+ self.defaults = []
|
|
525
|
+ self.default_values = {}
|
|
526
|
+ self.extra_values = []
|
|
527
|
+ self._created = False
|
|
528
|
+
|
|
529
|
+
|
|
530
|
+ def _interpolate(self, key, value):
|
|
531
|
+ try:
|
|
532
|
+ # do we already have an interpolation engine?
|
|
533
|
+ engine = self._interpolation_engine
|
|
534
|
+ except AttributeError:
|
|
535
|
+ # not yet: first time running _interpolate(), so pick the engine
|
|
536
|
+ name = self.main.interpolation
|
|
537
|
+ if name == True: # note that "if name:" would be incorrect here
|
|
538
|
+ # backwards-compatibility: interpolation=True means use default
|
|
539
|
+ name = DEFAULT_INTERPOLATION
|
|
540
|
+ name = name.lower() # so that "Template", "template", etc. all work
|
|
541
|
+ class_ = interpolation_engines.get(name, None)
|
|
542
|
+ if class_ is None:
|
|
543
|
+ # invalid value for self.main.interpolation
|
|
544
|
+ self.main.interpolation = False
|
|
545
|
+ return value
|
|
546
|
+ else:
|
|
547
|
+ # save reference to engine so we don't have to do this again
|
|
548
|
+ engine = self._interpolation_engine = class_(self)
|
|
549
|
+ # let the engine do the actual work
|
|
550
|
+ return engine.interpolate(key, value)
|
|
551
|
+
|
|
552
|
+
|
|
553
|
+ def __getitem__(self, key):
|
|
554
|
+ """Fetch the item and do string interpolation."""
|
|
555
|
+ val = dict.__getitem__(self, key)
|
|
556
|
+ if self.main.interpolation:
|
|
557
|
+ if isinstance(val, six.string_types):
|
|
558
|
+ return self._interpolate(key, val)
|
|
559
|
+ if isinstance(val, list):
|
|
560
|
+ def _check(entry):
|
|
561
|
+ if isinstance(entry, six.string_types):
|
|
562
|
+ return self._interpolate(key, entry)
|
|
563
|
+ return entry
|
|
564
|
+ new = [_check(entry) for entry in val]
|
|
565
|
+ if new != val:
|
|
566
|
+ return new
|
|
567
|
+ return val
|
|
568
|
+
|
|
569
|
+
|
|
570
|
+ def __setitem__(self, key, value, unrepr=False):
|
|
571
|
+ """
|
|
572
|
+ Correctly set a value.
|
|
573
|
+
|
|
574
|
+ Making dictionary values Section instances.
|
|
575
|
+ (We have to special case 'Section' instances - which are also dicts)
|
|
576
|
+
|
|
577
|
+ Keys must be strings.
|
|
578
|
+ Values need only be strings (or lists of strings) if
|
|
579
|
+ ``main.stringify`` is set.
|
|
580
|
+
|
|
581
|
+ ``unrepr`` must be set when setting a value to a dictionary, without
|
|
582
|
+ creating a new sub-section.
|
|
583
|
+ """
|
|
584
|
+ if not isinstance(key, six.string_types):
|
|
585
|
+ raise ValueError('The key "%s" is not a string.' % key)
|
|
586
|
+
|
|
587
|
+ # add the comment
|
|
588
|
+ if key not in self.comments:
|
|
589
|
+ self.comments[key] = []
|
|
590
|
+ self.inline_comments[key] = ''
|
|
591
|
+ # remove the entry from defaults
|
|
592
|
+ if key in self.defaults:
|
|
593
|
+ self.defaults.remove(key)
|
|
594
|
+ #
|
|
595
|
+ if isinstance(value, Section):
|
|
596
|
+ if key not in self:
|
|
597
|
+ self.sections.append(key)
|
|
598
|
+ dict.__setitem__(self, key, value)
|
|
599
|
+ elif isinstance(value, dict) and not unrepr:
|
|
600
|
+ # First create the new depth level,
|
|
601
|
+ # then create the section
|
|
602
|
+ if key not in self:
|
|
603
|
+ self.sections.append(key)
|
|
604
|
+ new_depth = self.depth + 1
|
|
605
|
+ dict.__setitem__(
|
|
606
|
+ self,
|
|
607
|
+ key,
|
|
608
|
+ Section(
|
|
609
|
+ self,
|
|
610
|
+ new_depth,
|
|
611
|
+ self.main,
|
|
612
|
+ indict=value,
|
|
613
|
+ name=key))
|
|
614
|
+ else:
|
|
615
|
+ if key not in self:
|
|
616
|
+ self.scalars.append(key)
|
|
617
|
+ if not self.main.stringify:
|
|
618
|
+ if isinstance(value, six.string_types):
|
|
619
|
+ pass
|
|
620
|
+ elif isinstance(value, (list, tuple)):
|
|
621
|
+ for entry in value:
|
|
622
|
+ if not isinstance(entry, six.string_types):
|
|
623
|
+ raise TypeError('Value is not a string "%s".' % entry)
|
|
624
|
+ else:
|
|
625
|
+ raise TypeError('Value is not a string "%s".' % value)
|
|
626
|
+ dict.__setitem__(self, key, value)
|
|
627
|
+
|
|
628
|
+
|
|
629
|
+ def __delitem__(self, key):
|
|
630
|
+ """Remove items from the sequence when deleting."""
|
|
631
|
+ dict. __delitem__(self, key)
|
|
632
|
+ if key in self.scalars:
|
|
633
|
+ self.scalars.remove(key)
|
|
634
|
+ else:
|
|
635
|
+ self.sections.remove(key)
|
|
636
|
+ del self.comments[key]
|
|
637
|
+ del self.inline_comments[key]
|
|
638
|
+
|
|
639
|
+
|
|
640
|
+ def get(self, key, default=None):
|
|
641
|
+ """A version of ``get`` that doesn't bypass string interpolation."""
|
|
642
|
+ try:
|
|
643
|
+ return self[key]
|
|
644
|
+ except KeyError:
|
|
645
|
+ return default
|
|
646
|
+
|
|
647
|
+
|
|
648
|
+ def update(self, indict):
|
|
649
|
+ """
|
|
650
|
+ A version of update that uses our ``__setitem__``.
|
|
651
|
+ """
|
|
652
|
+ for entry in indict:
|
|
653
|
+ self[entry] = indict[entry]
|
|
654
|
+
|
|
655
|
+
|
|
656
|
+ def pop(self, key, default=MISSING):
|
|
657
|
+ """
|
|
658
|
+ 'D.pop(k[,d]) -> v, remove specified key and return the corresponding value.
|
|
659
|
+ If key is not found, d is returned if given, otherwise KeyError is raised'
|
|
660
|
+ """
|
|
661
|
+ try:
|
|
662
|
+ val = self[key]
|
|
663
|
+ except KeyError:
|
|
664
|
+ if default is MISSING:
|
|
665
|
+ raise
|
|
666
|
+ val = default
|
|
667
|
+ else:
|
|
668
|
+ del self[key]
|
|
669
|
+ return val
|
|
670
|
+
|
|
671
|
+
|
|
672
|
+ def popitem(self):
|
|
673
|
+ """Pops the first (key,val)"""
|
|
674
|
+ sequence = (self.scalars + self.sections)
|
|
675
|
+ if not sequence:
|
|
676
|
+ raise KeyError(": 'popitem(): dictionary is empty'")
|
|
677
|
+ key = sequence[0]
|
|
678
|
+ val = self[key]
|
|
679
|
+ del self[key]
|
|
680
|
+ return key, val
|
|
681
|
+
|
|
682
|
+
|
|
683
|
+ def clear(self):
|
|
684
|
+ """
|
|
685
|
+ A version of clear that also affects scalars/sections
|
|
686
|
+ Also clears comments and configspec.
|
|
687
|
+
|
|
688
|
+ Leaves other attributes alone :
|
|
689
|
+ depth/main/parent are not affected
|
|
690
|
+ """
|
|
691
|
+ dict.clear(self)
|
|
692
|
+ self.scalars = []
|
|
693
|
+ self.sections = []
|
|
694
|
+ self.comments = {}
|
|
695
|
+ self.inline_comments = {}
|
|
696
|
+ self.configspec = None
|
|
697
|
+ self.defaults = []
|
|
698
|
+ self.extra_values = []
|
|
699
|
+
|
|
700
|
+
|
|
701
|
+ def setdefault(self, key, default=None):
|
|
702
|
+ """A version of setdefault that sets sequence if appropriate."""
|
|
703
|
+ try:
|
|
704
|
+ return self[key]
|
|
705
|
+ except KeyError:
|
|
706
|
+ self[key] = default
|
|
707
|
+ return self[key]
|
|
708
|
+
|
|
709
|
+
|
|
710
|
+ def items(self):
|
|
711
|
+ """D.items() -> list of D's (key, value) pairs, as 2-tuples"""
|
|
712
|
+ return list(zip((self.scalars + self.sections), list(self.values())))
|
|
713
|
+
|
|
714
|
+
|
|
715
|
+ def keys(self):
|
|
716
|
+ """D.keys() -> list of D's keys"""
|
|
717
|
+ return (self.scalars + self.sections)
|
|
718
|
+
|
|
719
|
+
|
|
720
|
+ def values(self):
|
|
721
|
+ """D.values() -> list of D's values"""
|
|
722
|
+ return [self[key] for key in (self.scalars + self.sections)]
|
|
723
|
+
|
|
724
|
+
|
|
725
|
+ def iteritems(self):
|
|
726
|
+ """D.iteritems() -> an iterator over the (key, value) items of D"""
|
|
727
|
+ return iter(list(self.items()))
|
|
728
|
+
|
|
729
|
+
|
|
730
|
+ def iterkeys(self):
|
|
731
|
+ """D.iterkeys() -> an iterator over the keys of D"""
|
|
732
|
+ return iter((self.scalars + self.sections))
|
|
733
|
+
|
|
734
|
+ __iter__ = iterkeys
|
|
735
|
+
|
|
736
|
+
|
|
737
|
+ def itervalues(self):
|
|
738
|
+ """D.itervalues() -> an iterator over the values of D"""
|
|
739
|
+ return iter(list(self.values()))
|
|
740
|
+
|
|
741
|
+
|
|
742
|
+ def __repr__(self):
|
|
743
|
+ """x.__repr__() <==> repr(x)"""
|
|
744
|
+ def _getval(key):
|
|
745
|
+ try:
|
|
746
|
+ return self[key]
|
|
747
|
+ except MissingInterpolationOption:
|
|
748
|
+ return dict.__getitem__(self, key)
|
|
749
|
+ return '{%s}' % ', '.join([('%s: %s' % (repr(key), repr(_getval(key))))
|
|
750
|
+ for key in (self.scalars + self.sections)])
|
|
751
|
+
|
|
752
|
+ __str__ = __repr__
|
|
753
|
+ __str__.__doc__ = "x.__str__() <==> str(x)"
|
|
754
|
+
|
|
755
|
+
|
|
756
|
+ # Extra methods - not in a normal dictionary
|
|
757
|
+
|
|
758
|
+ def dict(self):
|
|
759
|
+ """
|
|
760
|
+ Return a deepcopy of self as a dictionary.
|
|
761
|
+
|
|
762
|
+ All members that are ``Section`` instances are recursively turned to
|
|
763
|
+ ordinary dictionaries - by calling their ``dict`` method.
|
|
764
|
+
|
|
765
|
+ >>> n = a.dict()
|
|
766
|
+ >>> n == a
|
|
767
|
+ 1
|
|
768
|
+ >>> n is a
|
|
769
|
+ 0
|
|
770
|
+ """
|
|
771
|
+ newdict = {}
|
|
772
|
+ for entry in self:
|
|
773
|
+ this_entry = self[entry]
|
|
774
|
+ if isinstance(this_entry, Section):
|
|
775
|
+ this_entry = this_entry.dict()
|
|
776
|
+ elif isinstance(this_entry, list):
|
|
777
|
+ # create a copy rather than a reference
|
|
778
|
+ this_entry = list(this_entry)
|
|
779
|
+ elif isinstance(this_entry, tuple):
|
|
780
|
+ # create a copy rather than a reference
|
|
781
|
+ this_entry = tuple(this_entry)
|
|
782
|
+ newdict[entry] = this_entry
|
|
783
|
+ return newdict
|
|
784
|
+
|
|
785
|
+
|
|
786
|
+ def merge(self, indict):
|
|
787
|
+ """
|
|
788
|
+ A recursive update - useful for merging config files.
|
|
789
|
+
|
|
790
|
+ >>> a = '''[section1]
|
|
791
|
+ ... option1 = True
|
|
792
|
+ ... [[subsection]]
|
|
793
|
+ ... more_options = False
|
|
794
|
+ ... # end of file'''.splitlines()
|
|
795
|
+ >>> b = '''# File is user.ini
|
|
796
|
+ ... [section1]
|
|
797
|
+ ... option1 = False
|
|
798
|
+ ... # end of file'''.splitlines()
|
|
799
|
+ >>> c1 = ConfigObj(b)
|
|
800
|
+ >>> c2 = ConfigObj(a)
|
|
801
|
+ >>> c2.merge(c1)
|
|
802
|
+ >>> c2
|
|
803
|
+ ConfigObj({'section1': {'option1': 'False', 'subsection': {'more_options': 'False'}}})
|
|
804
|
+ """
|
|
805
|
+ for key, val in list(indict.items()):
|
|
806
|
+ if (key in self and isinstance(self[key], dict) and
|
|
807
|
+ isinstance(val, dict)):
|
|
808
|
+ self[key].merge(val)
|
|
809
|
+ else:
|
|
810
|
+ self[key] = val
|
|
811
|
+
|
|
812
|
+
|
|
813
|
+ def rename(self, oldkey, newkey):
|
|
814
|
+ """
|
|
815
|
+ Change a keyname to another, without changing position in sequence.
|
|
816
|
+
|
|
817
|
+ Implemented so that transformations can be made on keys,
|
|
818
|
+ as well as on values. (used by encode and decode)
|
|
819
|
+
|
|
820
|
+ Also renames comments.
|
|
821
|
+ """
|
|
822
|
+ if oldkey in self.scalars:
|
|
823
|
+ the_list = self.scalars
|
|
824
|
+ elif oldkey in self.sections:
|
|
825
|
+ the_list = self.sections
|
|
826
|
+ else:
|
|
827
|
+ raise KeyError('Key "%s" not found.' % oldkey)
|
|
828
|
+ pos = the_list.index(oldkey)
|
|
829
|
+ #
|
|
830
|
+ val = self[oldkey]
|
|
831
|
+ dict.__delitem__(self, oldkey)
|
|
832
|
+ dict.__setitem__(self, newkey, val)
|
|
833
|
+ the_list.remove(oldkey)
|
|
834
|
+ the_list.insert(pos, newkey)
|
|
835
|
+ comm = self.comments[oldkey]
|
|
836
|
+ inline_comment = self.inline_comments[oldkey]
|
|
837
|
+ del self.comments[oldkey]
|
|
838
|
+ del self.inline_comments[oldkey]
|
|
839
|
+ self.comments[newkey] = comm
|
|
840
|
+ self.inline_comments[newkey] = inline_comment
|
|
841
|
+
|
|
842
|
+
|
|
843
|
+ def walk(self, function, raise_errors=True,
|
|
844
|
+ call_on_sections=False, **keywargs):
|
|
845
|
+ """
|
|
846
|
+ Walk every member and call a function on the keyword and value.
|
|
847
|
+
|
|
848
|
+ Return a dictionary of the return values
|
|
849
|
+
|
|
850
|
+ If the function raises an exception, raise the errror
|
|
851
|
+ unless ``raise_errors=False``, in which case set the return value to
|
|
852
|
+ ``False``.
|
|
853
|
+
|
|
854
|
+ Any unrecognised keyword arguments you pass to walk, will be pased on
|
|
855
|
+ to the function you pass in.
|
|
856
|
+
|
|
857
|
+ Note: if ``call_on_sections`` is ``True`` then - on encountering a
|
|
858
|
+ subsection, *first* the function is called for the *whole* subsection,
|
|
859
|
+ and then recurses into it's members. This means your function must be
|
|
860
|
+ able to handle strings, dictionaries and lists. This allows you
|
|
861
|
+ to change the key of subsections as well as for ordinary members. The
|
|
862
|
+ return value when called on the whole subsection has to be discarded.
|
|
863
|
+
|
|
864
|
+ See the encode and decode methods for examples, including functions.
|
|
865
|
+
|
|
866
|
+ .. admonition:: caution
|
|
867
|
+
|
|
868
|
+ You can use ``walk`` to transform the names of members of a section
|
|
869
|
+ but you mustn't add or delete members.
|
|
870
|
+
|
|
871
|
+ >>> config = '''[XXXXsection]
|
|
872
|
+ ... XXXXkey = XXXXvalue'''.splitlines()
|
|
873
|
+ >>> cfg = ConfigObj(config)
|
|
874
|
+ >>> cfg
|
|
875
|
+ ConfigObj({'XXXXsection': {'XXXXkey': 'XXXXvalue'}})
|
|
876
|
+ >>> def transform(section, key):
|
|
877
|
+ ... val = section[key]
|
|
878
|
+ ... newkey = key.replace('XXXX', 'CLIENT1')
|
|
879
|
+ ... section.rename(key, newkey)
|
|
880
|
+ ... if isinstance(val, (tuple, list, dict)):
|
|
881
|
+ ... pass
|
|
882
|
+ ... else:
|
|
883
|
+ ... val = val.replace('XXXX', 'CLIENT1')
|
|
884
|
+ ... section[newkey] = val
|
|
885
|
+ >>> cfg.walk(transform, call_on_sections=True)
|
|
886
|
+ {'CLIENT1section': {'CLIENT1key': None}}
|
|
887
|
+ >>> cfg
|
|
888
|
+ ConfigObj({'CLIENT1section': {'CLIENT1key': 'CLIENT1value'}})
|
|
889
|
+ """
|
|
890
|
+ out = {}
|
|
891
|
+ # scalars first
|
|
892
|
+ for i in range(len(self.scalars)):
|
|
893
|
+ entry = self.scalars[i]
|
|
894
|
+ try:
|
|
895
|
+ val = function(self, entry, **keywargs)
|
|
896
|
+ # bound again in case name has changed
|
|
897
|
+ entry = self.scalars[i]
|
|
898
|
+ out[entry] = val
|
|
899
|
+ except Exception:
|
|
900
|
+ if raise_errors:
|
|
901
|
+ raise
|
|
902
|
+ else:
|
|
903
|
+ entry = self.scalars[i]
|
|
904
|
+ out[entry] = False
|
|
905
|
+ # then sections
|
|
906
|
+ for i in range(len(self.sections)):
|
|
907
|
+ entry = self.sections[i]
|
|
908
|
+ if call_on_sections:
|
|
909
|
+ try:
|
|
910
|
+ function(self, entry, **keywargs)
|
|
911
|
+ except Exception:
|
|
912
|
+ if raise_errors:
|
|
913
|
+ raise
|
|
914
|
+ else:
|
|
915
|
+ entry = self.sections[i]
|
|
916
|
+ out[entry] = False
|
|
917
|
+ # bound again in case name has changed
|
|
918
|
+ entry = self.sections[i]
|
|
919
|
+ # previous result is discarded
|
|
920
|
+ out[entry] = self[entry].walk(
|
|
921
|
+ function,
|
|
922
|
+ raise_errors=raise_errors,
|
|
923
|
+ call_on_sections=call_on_sections,
|
|
924
|
+ **keywargs)
|
|
925
|
+ return out
|
|
926
|
+
|
|
927
|
+
|
|
928
|
+ def as_bool(self, key):
|
|
929
|
+ """
|
|
930
|
+ Accepts a key as input. The corresponding value must be a string or
|
|
931
|
+ the objects (``True`` or 1) or (``False`` or 0). We allow 0 and 1 to
|
|
932
|
+ retain compatibility with Python 2.2.
|
|
933
|
+
|
|
934
|
+ If the string is one of ``True``, ``On``, ``Yes``, or ``1`` it returns
|
|
935
|
+ ``True``.
|
|
936
|
+
|
|
937
|
+ If the string is one of ``False``, ``Off``, ``No``, or ``0`` it returns
|
|
938
|
+ ``False``.
|
|
939
|
+
|
|
940
|
+ ``as_bool`` is not case sensitive.
|
|
941
|
+
|
|
942
|
+ Any other input will raise a ``ValueError``.
|
|
943
|
+
|
|
944
|
+ >>> a = ConfigObj()
|
|
945
|
+ >>> a['a'] = 'fish'
|
|
946
|
+ >>> a.as_bool('a')
|
|
947
|
+ Traceback (most recent call last):
|
|
948
|
+ ValueError: Value "fish" is neither True nor False
|
|
949
|
+ >>> a['b'] = 'True'
|
|
950
|
+ >>> a.as_bool('b')
|
|
951
|
+ 1
|
|
952
|
+ >>> a['b'] = 'off'
|
|
953
|
+ >>> a.as_bool('b')
|
|
954
|
+ 0
|
|
955
|
+ """
|
|
956
|
+ val = self[key]
|
|
957
|
+ if val == True:
|
|
958
|
+ return True
|
|
959
|
+ elif val == False:
|
|
960
|
+ return False
|
|
961
|
+ else:
|
|
962
|
+ try:
|
|
963
|
+ if not isinstance(val, six.string_types):
|
|
964
|
+ # TODO: Why do we raise a KeyError here?
|
|
965
|
+ raise KeyError()
|
|
966
|
+ else:
|
|
967
|
+ return self.main._bools[val.lower()]
|
|
968
|
+ except KeyError:
|
|
969
|
+ raise ValueError('Value "%s" is neither True nor False' % val)
|
|
970
|
+
|
|
971
|
+
|
|
972
|
+ def as_int(self, key):
|
|
973
|
+ """
|
|
974
|
+ A convenience method which coerces the specified value to an integer.
|
|
975
|
+
|
|
976
|
+ If the value is an invalid literal for ``int``, a ``ValueError`` will
|
|
977
|
+ be raised.
|
|
978
|
+
|
|
979
|
+ >>> a = ConfigObj()
|
|
980
|
+ >>> a['a'] = 'fish'
|
|
981
|
+ >>> a.as_int('a')
|
|
982
|
+ Traceback (most recent call last):
|
|
983
|
+ ValueError: invalid literal for int() with base 10: 'fish'
|
|
984
|
+ >>> a['b'] = '1'
|
|
985
|
+ >>> a.as_int('b')
|
|
986
|
+ 1
|
|
987
|
+ >>> a['b'] = '3.2'
|
|
988
|
+ >>> a.as_int('b')
|
|
989
|
+ Traceback (most recent call last):
|
|
990
|
+ ValueError: invalid literal for int() with base 10: '3.2'
|
|
991
|
+ """
|
|
992
|
+ return int(self[key])
|
|
993
|
+
|
|
994
|
+
|
|
995
|
+ def as_float(self, key):
|
|
996
|
+ """
|
|
997
|
+ A convenience method which coerces the specified value to a float.
|
|
998
|
+
|
|
999
|
+ If the value is an invalid literal for ``float``, a ``ValueError`` will
|
|
1000
|
+ be raised.
|
|
1001
|
+
|
|
1002
|
+ >>> a = ConfigObj()
|
|
1003
|
+ >>> a['a'] = 'fish'
|
|
1004
|
+ >>> a.as_float('a') #doctest: +IGNORE_EXCEPTION_DETAIL
|
|
1005
|
+ Traceback (most recent call last):
|
|
1006
|
+ ValueError: invalid literal for float(): fish
|
|
1007
|
+ >>> a['b'] = '1'
|
|
1008
|
+ >>> a.as_float('b')
|
|
1009
|
+ 1.0
|
|
1010
|
+ >>> a['b'] = '3.2'
|
|
1011
|
+ >>> a.as_float('b') #doctest: +ELLIPSIS
|
|
1012
|
+ 3.2...
|
|
1013
|
+ """
|
|
1014
|
+ return float(self[key])
|
|
1015
|
+
|
|
1016
|
+
|
|
1017
|
+ def as_list(self, key):
|
|
1018
|
+ """
|
|
1019
|
+ A convenience method which fetches the specified value, guaranteeing
|
|
1020
|
+ that it is a list.
|
|
1021
|
+
|
|
1022
|
+ >>> a = ConfigObj()
|
|
1023
|
+ >>> a['a'] = 1
|
|
1024
|
+ >>> a.as_list('a')
|
|
1025
|
+ [1]
|
|
1026
|
+ >>> a['a'] = (1,)
|
|
1027
|
+ >>> a.as_list('a')
|
|
1028
|
+ [1]
|
|
1029
|
+ >>> a['a'] = [1]
|
|
1030
|
+ >>> a.as_list('a')
|
|
1031
|
+ [1]
|
|
1032
|
+ """
|
|
1033
|
+ result = self[key]
|
|
1034
|
+ if isinstance(result, (tuple, list)):
|
|
1035
|
+ return list(result)
|
|
1036
|
+ return [result]
|
|
1037
|
+
|
|
1038
|
+
|
|
1039
|
+ def restore_default(self, key):
|
|
1040
|
+ """
|
|
1041
|
+ Restore (and return) default value for the specified key.
|
|
1042
|
+
|
|
1043
|
+ This method will only work for a ConfigObj that was created
|
|
1044
|
+ with a configspec and has been validated.
|
|
1045
|
+
|
|
1046
|
+ If there is no default value for this key, ``KeyError`` is raised.
|
|
1047
|
+ """
|
|
1048
|
+ default = self.default_values[key]
|
|
1049
|
+ dict.__setitem__(self, key, default)
|
|
1050
|
+ if key not in self.defaults:
|
|
1051
|
+ self.defaults.append(key)
|
|
1052
|
+ return default
|
|
1053
|
+
|
|
1054
|
+
|
|
1055
|
+ def restore_defaults(self):
|
|
1056
|
+ """
|
|
1057
|
+ Recursively restore default values to all members
|
|
1058
|
+ that have them.
|
|
1059
|
+
|
|
1060
|
+ This method will only work for a ConfigObj that was created
|
|
1061
|
+ with a configspec and has been validated.
|
|
1062
|
+
|
|
1063
|
+ It doesn't delete or modify entries without default values.
|
|
1064
|
+ """
|
|
1065
|
+ for key in self.default_values:
|
|
1066
|
+ self.restore_default(key)
|
|
1067
|
+
|
|
1068
|
+ for section in self.sections:
|
|
1069
|
+ self[section].restore_defaults()
|
|
1070
|
+
|
|
1071
|
+
|
|
1072
|
+class ConfigObj(Section):
|
|
1073
|
+ """An object to read, create, and write config files."""
|
|
1074
|
+
|
|
1075
|
+ _keyword = re.compile(r'''^ # line start
|
|
1076
|
+ (\s*) # indentation
|
|
1077
|
+ ( # keyword
|
|
1078
|
+ (?:".*?")| # double quotes
|
|
1079
|
+ (?:'.*?')| # single quotes
|
|
1080
|
+ (?:[^'"=].*?) # no quotes
|
|
1081
|
+ )
|
|
1082
|
+ \s*=\s* # divider
|
|
1083
|
+ (.*) # value (including list values and comments)
|
|
1084
|
+ $ # line end
|
|
1085
|
+ ''',
|
|
1086
|
+ re.VERBOSE)
|
|
1087
|
+
|
|
1088
|
+ _sectionmarker = re.compile(r'''^
|
|
1089
|
+ (\s*) # 1: indentation
|
|
1090
|
+ ((?:\[\s*)+) # 2: section marker open
|
|
1091
|
+ ( # 3: section name open
|
|
1092
|
+ (?:"\s*\S.*?\s*")| # at least one non-space with double quotes
|
|
1093
|
+ (?:'\s*\S.*?\s*')| # at least one non-space with single quotes
|
|
1094
|
+ (?:[^'"\s].*?) # at least one non-space unquoted
|
|
1095
|
+ ) # section name close
|
|
1096
|
+ ((?:\s*\])+) # 4: section marker close
|
|
1097
|
+ \s*(\#.*)? # 5: optional comment
|
|
1098
|
+ $''',
|
|
1099
|
+ re.VERBOSE)
|
|
1100
|
+
|
|
1101
|
+ # this regexp pulls list values out as a single string
|
|
1102
|
+ # or single values and comments
|
|
1103
|
+ # FIXME: this regex adds a '' to the end of comma terminated lists
|
|
1104
|
+ # workaround in ``_handle_value``
|
|
1105
|
+ _valueexp = re.compile(r'''^
|
|
1106
|
+ (?:
|
|
1107
|
+ (?:
|
|
1108
|
+ (
|
|
1109
|
+ (?:
|
|
1110
|
+ (?:
|
|
1111
|
+ (?:".*?")| # double quotes
|
|
1112
|
+ (?:'.*?')| # single quotes
|
|
1113
|
+ (?:[^'",\#][^,\#]*?) # unquoted
|
|
1114
|
+ )
|
|
1115
|
+ \s*,\s* # comma
|
|
1116
|
+ )* # match all list items ending in a comma (if any)
|
|
1117
|
+ )
|
|
1118
|
+ (
|
|
1119
|
+ (?:".*?")| # double quotes
|
|
1120
|
+ (?:'.*?')| # single quotes
|
|
1121
|
+ (?:[^'",\#\s][^,]*?)| # unquoted
|
|
1122
|
+ (?:(?<!,)) # Empty value
|
|
1123
|
+ )? # last item in a list - or string value
|
|
1124
|
+ )|
|
|
1125
|
+ (,) # alternatively a single comma - empty list
|
|
1126
|
+ )
|
|
1127
|
+ \s*(\#.*)? # optional comment
|
|
1128
|
+ $''',
|
|
1129
|
+ re.VERBOSE)
|
|
1130
|
+
|
|
1131
|
+ # use findall to get the members of a list value
|
|
1132
|
+ _listvalueexp = re.compile(r'''
|
|
1133
|
+ (
|
|
1134
|
+ (?:".*?")| # double quotes
|
|
1135
|
+ (?:'.*?')| # single quotes
|
|
1136
|
+ (?:[^'",\#]?.*?) # unquoted
|
|
1137
|
+ )
|
|
1138
|
+ \s*,\s* # comma
|
|
1139
|
+ ''',
|
|
1140
|
+ re.VERBOSE)
|
|
1141
|
+
|
|
1142
|
+ # this regexp is used for the value
|
|
1143
|
+ # when lists are switched off
|
|
1144
|
+ _nolistvalue = re.compile(r'''^
|
|
1145
|
+ (
|
|
1146
|
+ (?:".*?")| # double quotes
|
|
1147
|
+ (?:'.*?')| # single quotes
|
|
1148
|
+ (?:[^'"\#].*?)| # unquoted
|
|
1149
|
+ (?:) # Empty value
|
|
1150
|
+ )
|
|
1151
|
+ \s*(\#.*)? # optional comment
|
|
1152
|
+ $''',
|
|
1153
|
+ re.VERBOSE)
|
|
1154
|
+
|
|
1155
|
+ # regexes for finding triple quoted values on one line
|
|
1156
|
+ _single_line_single = re.compile(r"^'''(.*?)'''\s*(#.*)?$")
|
|
1157
|
+ _single_line_double = re.compile(r'^"""(.*?)"""\s*(#.*)?$')
|
|
1158
|
+ _multi_line_single = re.compile(r"^(.*?)'''\s*(#.*)?$")
|
|
1159
|
+ _multi_line_double = re.compile(r'^(.*?)"""\s*(#.*)?$')
|
|
1160
|
+
|
|
1161
|
+ _triple_quote = {
|
|
1162
|
+ "'''": (_single_line_single, _multi_line_single),
|
|
1163
|
+ '"""': (_single_line_double, _multi_line_double),
|
|
1164
|
+ }
|
|
1165
|
+
|
|
1166
|
+ # Used by the ``istrue`` Section method
|
|
1167
|
+ _bools = {
|
|
1168
|
+ 'yes': True, 'no': False,
|
|
1169
|
+ 'on': True, 'off': False,
|
|
1170
|
+ '1': True, '0': False,
|
|
1171
|
+ 'true': True, 'false': False,
|
|
1172
|
+ }
|
|
1173
|
+
|
|
1174
|
+
|
|
1175
|
+ def __init__(self, infile=None, options=None, configspec=None, encoding=None,
|
|
1176
|
+ interpolation=True, raise_errors=False, list_values=True,
|
|
1177
|
+ create_empty=False, file_error=False, stringify=True,
|
|
1178
|
+ indent_type=None, default_encoding=None, unrepr=False,
|
|
1179
|
+ write_empty_values=False, _inspec=False):
|
|
1180
|
+ """
|
|
1181
|
+ Parse a config file or create a config file object.
|
|
1182
|
+
|
|
1183
|
+ ``ConfigObj(infile=None, configspec=None, encoding=None,
|
|
1184
|
+ interpolation=True, raise_errors=False, list_values=True,
|
|
1185
|
+ create_empty=False, file_error=False, stringify=True,
|
|
1186
|
+ indent_type=None, default_encoding=None, unrepr=False,
|
|
1187
|
+ write_empty_values=False, _inspec=False)``
|
|
1188
|
+ """
|
|
1189
|
+ self._inspec = _inspec
|
|
1190
|
+ # init the superclass
|
|
1191
|
+ Section.__init__(self, self, 0, self)
|
|
1192
|
+
|
|
1193
|
+ infile = infile or []
|
|
1194
|
+
|
|
1195
|
+ _options = {'configspec': configspec,
|
|
1196
|
+ 'encoding': encoding, 'interpolation': interpolation,
|
|
1197
|
+ 'raise_errors': raise_errors, 'list_values': list_values,
|
|
1198
|
+ 'create_empty': create_empty, 'file_error': file_error,
|
|
1199
|
+ 'stringify': stringify, 'indent_type': indent_type,
|
|
1200
|
+ 'default_encoding': default_encoding, 'unrepr': unrepr,
|
|
1201
|
+ 'write_empty_values': write_empty_values}
|
|
1202
|
+
|
|
1203
|
+ if options is None:
|
|
1204
|
+ options = _options
|
|
1205
|
+ else:
|
|
1206
|
+ import warnings
|
|
1207
|
+ warnings.warn('Passing in an options dictionary to ConfigObj() is '
|
|
1208
|
+ 'deprecated. Use **options instead.',
|
|
1209
|
+ DeprecationWarning, stacklevel=2)
|
|
1210
|
+
|
|
1211
|
+ # TODO: check the values too.
|
|
1212
|
+ for entry in options:
|
|
1213
|
+ if entry not in OPTION_DEFAULTS:
|
|
1214
|
+ raise TypeError('Unrecognised option "%s".' % entry)
|
|
1215
|
+ for entry, value in list(OPTION_DEFAULTS.items()):
|
|
1216
|
+ if entry not in options:
|
|
1217
|
+ options[entry] = value
|
|
1218
|
+ keyword_value = _options[entry]
|
|
1219
|
+ if value != keyword_value:
|
|
1220
|
+ options[entry] = keyword_value
|
|
1221
|
+
|
|
1222
|
+ # XXXX this ignores an explicit list_values = True in combination
|
|
1223
|
+ # with _inspec. The user should *never* do that anyway, but still...
|
|
1224
|
+ if _inspec:
|
|
1225
|
+ options['list_values'] = False
|
|
1226
|
+
|
|
1227
|
+ self._initialise(options)
|
|
1228
|
+ configspec = options['configspec']
|
|
1229
|
+ self._original_configspec = configspec
|
|
1230
|
+ self._load(infile, configspec)
|
|
1231
|
+
|
|
1232
|
+
|
|
1233
|
+ def _load(self, infile, configspec):
|
|
1234
|
+ if isinstance(infile, six.string_types):
|
|
1235
|
+ self.filename = infile
|
|
1236
|
+ if os.path.isfile(infile):
|
|
1237
|
+ with open(infile, 'rb') as h:
|
|
1238
|
+ content = h.readlines() or []
|
|
1239
|
+ elif self.file_error:
|
|
1240
|
+ # raise an error if the file doesn't exist
|
|
1241
|
+ raise IOError('Config file not found: "%s".' % self.filename)
|
|
1242
|
+ else:
|
|
1243
|
+ # file doesn't already exist
|
|
1244
|
+ if self.create_empty:
|
|
1245
|
+ # this is a good test that the filename specified
|
|
1246
|
+ # isn't impossible - like on a non-existent device
|
|
1247
|
+ with open(infile, 'w') as h:
|
|
1248
|
+ h.write('')
|
|
1249
|
+ content = []
|
|
1250
|
+
|
|
1251
|
+ elif isinstance(infile, (list, tuple)):
|
|
1252
|
+ content = list(infile)
|
|
1253
|
+
|
|
1254
|
+ elif isinstance(infile, dict):
|
|
1255
|
+ # initialise self
|
|
1256
|
+ # the Section class handles creating subsections
|
|
1257
|
+ if isinstance(infile, ConfigObj):
|
|
1258
|
+ # get a copy of our ConfigObj
|
|
1259
|
+ def set_section(in_section, this_section):
|
|
1260
|
+ for entry in in_section.scalars:
|
|
1261
|
+ this_section[entry] = in_section[entry]
|
|
1262
|
+ for section in in_section.sections:
|
|
1263
|
+ this_section[section] = {}
|
|
1264
|
+ set_section(in_section[section], this_section[section])
|
|
1265
|
+ set_section(infile, self)
|
|
1266
|
+
|
|
1267
|
+ else:
|
|
1268
|
+ for entry in infile:
|
|
1269
|
+ self[entry] = infile[entry]
|
|
1270
|
+ del self._errors
|
|
1271
|
+
|
|
1272
|
+ if configspec is not None:
|
|
1273
|
+ self._handle_configspec(configspec)
|
|
1274
|
+ else:
|
|
1275
|
+ self.configspec = None
|
|
1276
|
+ return
|
|
1277
|
+
|
|
1278
|
+ elif getattr(infile, 'read', MISSING) is not MISSING:
|
|
1279
|
+ # This supports file like objects
|
|
1280
|
+ content = infile.read() or []
|
|
1281
|
+ # needs splitting into lines - but needs doing *after* decoding
|
|
1282
|
+ # in case it's not an 8 bit encoding
|
|
1283
|
+ else:
|
|
1284
|
+ raise TypeError('infile must be a filename, file like object, or list of lines.')
|
|
1285
|
+
|
|
1286
|
+ if content:
|
|
1287
|
+ # don't do it for the empty ConfigObj
|
|
1288
|
+ content = self._handle_bom(content)
|
|
1289
|
+ # infile is now *always* a list
|
|
1290
|
+ #
|
|
1291
|
+ # Set the newlines attribute (first line ending it finds)
|
|
1292
|
+ # and strip trailing '\n' or '\r' from lines
|
|
1293
|
+ for line in content:
|
|
1294
|
+ if (not line) or (line[-1] not in ('\r', '\n')):
|
|
1295
|
+ continue
|
|
1296
|
+ for end in ('\r\n', '\n', '\r'):
|
|
1297
|
+ if line.endswith(end):
|
|
1298
|
+ self.newlines = end
|
|
1299
|
+ break
|
|
1300
|
+ break
|
|
1301
|
+
|
|
1302
|
+ assert all(isinstance(line, six.string_types) for line in content), repr(content)
|
|
1303
|
+ content = [line.rstrip('\r\n') for line in content]
|
|
1304
|
+
|
|
1305
|
+ self._parse(content)
|
|
1306
|
+ # if we had any errors, now is the time to raise them
|
|
1307
|
+ if self._errors:
|
|
1308
|
+ info = "at line %s." % self._errors[0].line_number
|
|
1309
|
+ if len(self._errors) > 1:
|
|
1310
|
+ msg = "Parsing failed with several errors.\nFirst error %s" % info
|
|
1311
|
+ error = ConfigObjError(msg)
|
|
1312
|
+ else:
|
|
1313
|
+ error = self._errors[0]
|
|
1314
|
+ # set the errors attribute; it's a list of tuples:
|
|
1315
|
+ # (error_type, message, line_number)
|
|
1316
|
+ error.errors = self._errors
|
|
1317
|
+ # set the config attribute
|
|
1318
|
+ error.config = self
|
|
1319
|
+ raise error
|
|
1320
|
+ # delete private attributes
|
|
1321
|
+ del self._errors
|
|
1322
|
+
|
|
1323
|
+ if configspec is None:
|
|
1324
|
+ self.configspec = None
|
|
1325
|
+ else:
|
|
1326
|
+ self._handle_configspec(configspec)
|
|
1327
|
+
|
|
1328
|
+
|
|
1329
|
+ def _initialise(self, options=None):
|
|
1330
|
+ if options is None:
|
|
1331
|
+ options = OPTION_DEFAULTS
|
|
1332
|
+
|
|
1333
|
+ # initialise a few variables
|
|
1334
|
+ self.filename = None
|
|
1335
|
+ self._errors = []
|
|
1336
|
+ self.raise_errors = options['raise_errors']
|
|
1337
|
+ self.interpolation = options['interpolation']
|
|
1338
|
+ self.list_values = options['list_values']
|
|
1339
|
+ self.create_empty = options['create_empty']
|
|
1340
|
+ self.file_error = options['file_error']
|
|
1341
|
+ self.stringify = options['stringify']
|
|
1342
|
+ self.indent_type = options['indent_type']
|
|
1343
|
+ self.encoding = options['encoding']
|
|
1344
|
+ self.default_encoding = options['default_encoding']
|
|
1345
|
+ self.BOM = False
|
|
1346
|
+ self.newlines = None
|
|
1347
|
+ self.write_empty_values = options['write_empty_values']
|
|
1348
|
+ self.unrepr = options['unrepr']
|
|
1349
|
+
|
|
1350
|
+ self.initial_comment = []
|
|
1351
|
+ self.final_comment = []
|
|
1352
|
+ self.configspec = None
|
|
1353
|
+
|
|
1354
|
+ if self._inspec:
|
|
1355
|
+ self.list_values = False
|
|
1356
|
+
|
|
1357
|
+ # Clear section attributes as well
|
|
1358
|
+ Section._initialise(self)
|
|
1359
|
+
|
|
1360
|
+
|
|
1361
|
+ def __repr__(self):
|
|
1362
|
+ def _getval(key):
|
|
1363
|
+ try:
|
|
1364
|
+ return self[key]
|
|
1365
|
+ except MissingInterpolationOption:
|
|
1366
|
+ return dict.__getitem__(self, key)
|
|
1367
|
+ return ('ConfigObj({%s})' %
|
|
1368
|
+ ', '.join([('%s: %s' % (repr(key), repr(_getval(key))))
|
|
1369
|
+ for key in (self.scalars + self.sections)]))
|
|
1370
|
+
|
|
1371
|
+
|
|
1372
|
+ def _handle_bom(self, infile):
|
|
1373
|
+ """
|
|
1374
|
+ Handle any BOM, and decode if necessary.
|
|
1375
|
+
|
|
1376
|
+ If an encoding is specified, that *must* be used - but the BOM should
|
|
1377
|
+ still be removed (and the BOM attribute set).
|
|
1378
|
+
|
|
1379
|
+ (If the encoding is wrongly specified, then a BOM for an alternative
|
|
1380
|
+ encoding won't be discovered or removed.)
|
|
1381
|
+
|
|
1382
|
+ If an encoding is not specified, UTF8 or UTF16 BOM will be detected and
|
|
1383
|
+ removed. The BOM attribute will be set. UTF16 will be decoded to
|
|
1384
|
+ unicode.
|
|
1385
|
+
|
|
1386
|
+ NOTE: This method must not be called with an empty ``infile``.
|
|
1387
|
+
|
|
1388
|
+ Specifying the *wrong* encoding is likely to cause a
|
|
1389
|
+ ``UnicodeDecodeError``.
|
|
1390
|
+
|
|
1391
|
+ ``infile`` must always be returned as a list of lines, but may be
|
|
1392
|
+ passed in as a single string.
|
|
1393
|
+ """
|
|
1394
|
+
|
|
1395
|
+ if ((self.encoding is not None) and
|
|
1396
|
+ (self.encoding.lower() not in BOM_LIST)):
|
|
1397
|
+ # No need to check for a BOM
|
|
1398
|
+ # the encoding specified doesn't have one
|
|
1399
|
+ # just decode
|
|
1400
|
+ return self._decode(infile, self.encoding)
|
|
1401
|
+
|
|
1402
|
+ if isinstance(infile, (list, tuple)):
|
|
1403
|
+ line = infile[0]
|
|
1404
|
+ else:
|
|
1405
|
+ line = infile
|
|
1406
|
+
|
|
1407
|
+ if isinstance(line, six.text_type):
|
|
1408
|
+ # it's already decoded and there's no need to do anything
|
|
1409
|
+ # else, just use the _decode utility method to handle
|
|
1410
|
+ # listifying appropriately
|
|
1411
|
+ return self._decode(infile, self.encoding)
|
|
1412
|
+
|
|
1413
|
+ if self.encoding is not None:
|
|
1414
|
+ # encoding explicitly supplied
|
|
1415
|
+ # And it could have an associated BOM
|
|
1416
|
+ # TODO: if encoding is just UTF16 - we ought to check for both
|
|
1417
|
+ # TODO: big endian and little endian versions.
|
|
1418
|
+ enc = BOM_LIST[self.encoding.lower()]
|
|
1419
|
+ if enc == 'utf_16':
|
|
1420
|
+ # For UTF16 we try big endian and little endian
|
|
1421
|
+ for BOM, (encoding, final_encoding) in list(BOMS.items()):
|
|
1422
|
+ if not final_encoding:
|
|
1423
|
+ # skip UTF8
|
|
1424
|
+ continue
|
|
1425
|
+ if infile.startswith(BOM):
|
|
1426
|
+ ### BOM discovered
|
|
1427
|
+ ##self.BOM = True
|
|
1428
|
+ # Don't need to remove BOM
|
|
1429
|
+ return self._decode(infile, encoding)
|
|
1430
|
+
|
|
1431
|
+ # If we get this far, will *probably* raise a DecodeError
|
|
1432
|
+ # As it doesn't appear to start with a BOM
|
|
1433
|
+ return self._decode(infile, self.encoding)
|
|
1434
|
+
|
|
1435
|
+ # Must be UTF8
|
|
1436
|
+ BOM = BOM_SET[enc]
|
|
1437
|
+ if not line.startswith(BOM):
|
|
1438
|
+ return self._decode(infile, self.encoding)
|
|
1439
|
+
|
|
1440
|
+ newline = line[len(BOM):]
|
|
1441
|
+
|
|
1442
|
+ # BOM removed
|
|
1443
|
+ if isinstance(infile, (list, tuple)):
|
|
1444
|
+ infile[0] = newline
|
|
1445
|
+ else:
|
|
1446
|
+ infile = newline
|
|
1447
|
+ self.BOM = True
|
|
1448
|
+ return self._decode(infile, self.encoding)
|
|
1449
|
+
|
|
1450
|
+ # No encoding specified - so we need to check for UTF8/UTF16
|
|
1451
|
+ for BOM, (encoding, final_encoding) in list(BOMS.items()):
|
|
1452
|
+ if not isinstance(line, six.binary_type) or not line.startswith(BOM):
|
|
1453
|
+ # didn't specify a BOM, or it's not a bytestring
|
|
1454
|
+ continue
|
|
1455
|
+ else:
|
|
1456
|
+ # BOM discovered
|
|
1457
|
+ self.encoding = final_encoding
|
|
1458
|
+ if not final_encoding:
|
|
1459
|
+ self.BOM = True
|
|
1460
|
+ # UTF8
|
|
1461
|
+ # remove BOM
|
|
1462
|
+ newline = line[len(BOM):]
|
|
1463
|
+ if isinstance(infile, (list, tuple)):
|
|
1464
|
+ infile[0] = newline
|
|
1465
|
+ else:
|
|
1466
|
+ infile = newline
|
|
1467
|
+ # UTF-8
|
|
1468
|
+ if isinstance(infile, six.text_type):
|
|
1469
|
+ return infile.splitlines(True)
|
|
1470
|
+ elif isinstance(infile, six.binary_type):
|
|
1471
|
+ return infile.decode('utf-8').splitlines(True)
|
|
1472
|
+ else:
|
|
1473
|
+ return self._decode(infile, 'utf-8')
|
|
1474
|
+ # UTF16 - have to decode
|
|
1475
|
+ return self._decode(infile, encoding)
|
|
1476
|
+
|
|
1477
|
+
|
|
1478
|
+ if six.PY2 and isinstance(line, str):
|
|
1479
|
+ # don't actually do any decoding, since we're on python 2 and
|
|
1480
|
+ # returning a bytestring is fine
|
|
1481
|
+ return self._decode(infile, None)
|
|
1482
|
+ # No BOM discovered and no encoding specified, default to UTF-8
|
|
1483
|
+ if isinstance(infile, six.binary_type):
|
|
1484
|
+ return infile.decode('utf-8').splitlines(True)
|
|
1485
|
+ else:
|
|
1486
|
+ return self._decode(infile, 'utf-8')
|
|
1487
|
+
|
|
1488
|
+
|
|
1489
|
+ def _a_to_u(self, aString):
|
|
1490
|
+ """Decode ASCII strings to unicode if a self.encoding is specified."""
|
|
1491
|
+ if isinstance(aString, six.binary_type) and self.encoding:
|
|
1492
|
+ return aString.decode(self.encoding)
|
|
1493
|
+ else:
|
|
1494
|
+ return aString
|
|
1495
|
+
|
|
1496
|
+
|
|
1497
|
+ def _decode(self, infile, encoding):
|
|
1498
|
+ """
|
|
1499
|
+ Decode infile to unicode. Using the specified encoding.
|
|
1500
|
+
|
|
1501
|
+ if is a string, it also needs converting to a list.
|
|
1502
|
+ """
|
|
1503
|
+ if isinstance(infile, six.string_types):
|
|
1504
|
+ return infile.splitlines(True)
|
|
1505
|
+ if isinstance(infile, six.binary_type):
|
|
1506
|
+ # NOTE: Could raise a ``UnicodeDecodeError``
|
|
1507
|
+ if encoding:
|
|
1508
|
+ return infile.decode(encoding).splitlines(True)
|
|
1509
|
+ else:
|
|
1510
|
+ return infile.splitlines(True)
|
|
1511
|
+
|
|
1512
|
+ if encoding:
|
|
1513
|
+ for i, line in enumerate(infile):
|
|
1514
|
+ if isinstance(line, six.binary_type):
|
|
1515
|
+ # NOTE: The isinstance test here handles mixed lists of unicode/string
|
|
1516
|
+ # NOTE: But the decode will break on any non-string values
|
|
1517
|
+ # NOTE: Or could raise a ``UnicodeDecodeError``
|
|
1518
|
+ infile[i] = line.decode(encoding)
|
|
1519
|
+ return infile
|
|
1520
|
+
|
|
1521
|
+
|
|
1522
|
+ def _decode_element(self, line):
|
|
1523
|
+ """Decode element to unicode if necessary."""
|
|
1524
|
+ if isinstance(line, six.binary_type) and self.default_encoding:
|
|
1525
|
+ return line.decode(self.default_encoding)
|
|
1526
|
+ else:
|
|
1527
|
+ return line
|
|
1528
|
+
|
|
1529
|
+
|
|
1530
|
+ # TODO: this may need to be modified
|
|
1531
|
+ def _str(self, value):
|
|
1532
|
+ """
|
|
1533
|
+ Used by ``stringify`` within validate, to turn non-string values
|
|
1534
|
+ into strings.
|
|
1535
|
+ """
|
|
1536
|
+ if not isinstance(value, six.string_types):
|
|
1537
|
+ # intentially 'str' because it's just whatever the "normal"
|
|
1538
|
+ # string type is for the python version we're dealing with
|
|
1539
|
+ return str(value)
|
|
1540
|
+ else:
|
|
1541
|
+ return value
|
|
1542
|
+
|
|
1543
|
+
|
|
1544
|
+ def _parse(self, infile):
|
|
1545
|
+ """Actually parse the config file."""
|
|
1546
|
+ temp_list_values = self.list_values
|
|
1547
|
+ if self.unrepr:
|
|
1548
|
+ self.list_values = False
|
|
1549
|
+
|
|
1550
|
+ comment_list = []
|
|
1551
|
+ done_start = False
|
|
1552
|
+ this_section = self
|
|
1553
|
+ maxline = len(infile) - 1
|
|
1554
|
+ cur_index = -1
|
|
1555
|
+ reset_comment = False
|
|
1556
|
+
|
|
1557
|
+ while cur_index < maxline:
|
|
1558
|
+ if reset_comment:
|
|
1559
|
+ comment_list = []
|
|
1560
|
+ cur_index += 1
|
|
1561
|
+ line = infile[cur_index]
|
|
1562
|
+ sline = line.strip()
|
|
1563
|
+ # do we have anything on the line ?
|
|
1564
|
+ if not sline or sline.startswith('#'):
|
|
1565
|
+ reset_comment = False
|
|
1566
|
+ comment_list.append(line)
|
|
1567
|
+ continue
|
|
1568
|
+
|
|
1569
|
+ if not done_start:
|
|
1570
|
+ # preserve initial comment
|
|
1571
|
+ self.initial_comment = comment_list
|
|
1572
|
+ comment_list = []
|
|
1573
|
+ done_start = True
|
|
1574
|
+
|
|
1575
|
+ reset_comment = True
|
|
1576
|
+ # first we check if it's a section marker
|
|
1577
|
+ mat = self._sectionmarker.match(line)
|
|
1578
|
+ if mat is not None:
|
|
1579
|
+ # is a section line
|
|
1580
|
+ (indent, sect_open, sect_name, sect_close, comment) = mat.groups()
|
|
1581
|
+ if indent and (self.indent_type is None):
|
|
1582
|
+ self.indent_type = indent
|
|
1583
|
+ cur_depth = sect_open.count('[')
|
|
1584
|
+ if cur_depth != sect_close.count(']'):
|
|
1585
|
+ self._handle_error("Cannot compute the section depth",
|
|
1586
|
+ NestingError, infile, cur_index)
|
|
1587
|
+ continue
|
|
1588
|
+
|
|
1589
|
+ if cur_depth < this_section.depth:
|
|
1590
|
+ # the new section is dropping back to a previous level
|
|
1591
|
+ try:
|
|
1592
|
+ parent = self._match_depth(this_section,
|
|
1593
|
+ cur_depth).parent
|
|
1594
|
+ except SyntaxError:
|
|
1595
|
+ self._handle_error("Cannot compute nesting level",
|
|
1596
|
+ NestingError, infile, cur_index)
|
|
1597
|
+ continue
|
|
1598
|
+ elif cur_depth == this_section.depth:
|
|
1599
|
+ # the new section is a sibling of the current section
|
|
1600
|
+ parent = this_section.parent
|
|
1601
|
+ elif cur_depth == this_section.depth + 1:
|
|
1602
|
+ # the new section is a child the current section
|
|
1603
|
+ parent = this_section
|
|
1604
|
+ else:
|
|
1605
|
+ self._handle_error("Section too nested",
|
|
1606
|
+ NestingError, infile, cur_index)
|
|
1607
|
+ continue
|
|
1608
|
+
|
|
1609
|
+ sect_name = self._unquote(sect_name)
|
|
1610
|
+ if sect_name in parent:
|
|
1611
|
+ self._handle_error('Duplicate section name',
|
|
1612
|
+ DuplicateError, infile, cur_index)
|
|
1613
|
+ continue
|
|
1614
|
+
|
|
1615
|
+ # create the new section
|
|
1616
|
+ this_section = Section(
|
|
1617
|
+ parent,
|
|
1618
|
+ cur_depth,
|
|
1619
|
+ self,
|
|
1620
|
+ name=sect_name)
|
|
1621
|
+ parent[sect_name] = this_section
|
|
1622
|
+ parent.inline_comments[sect_name] = comment
|
|
1623
|
+ parent.comments[sect_name] = comment_list
|
|
1624
|
+ continue
|
|
1625
|
+ #
|
|
1626
|
+ # it's not a section marker,
|
|
1627
|
+ # so it should be a valid ``key = value`` line
|
|
1628
|
+ mat = self._keyword.match(line)
|
|
1629
|
+ if mat is None:
|
|
1630
|
+ self._handle_error(
|
|
1631
|
+ 'Invalid line ({0!r}) (matched as neither section nor keyword)'.format(line),
|
|
1632
|
+ ParseError, infile, cur_index)
|
|
1633
|
+ else:
|
|
1634
|
+ # is a keyword value
|
|
1635
|
+ # value will include any inline comment
|
|
1636
|
+ (indent, key, value) = mat.groups()
|
|
1637
|
+ if indent and (self.indent_type is None):
|
|
1638
|
+ self.indent_type = indent
|
|
1639
|
+ # check for a multiline value
|
|
1640
|
+ if value[:3] in ['"""', "'''"]:
|
|
1641
|
+ try:
|
|
1642
|
+ value, comment, cur_index = self._multiline(
|
|
1643
|
+ value, infile, cur_index, maxline)
|
|
1644
|
+ except SyntaxError:
|
|
1645
|
+ self._handle_error(
|
|
1646
|
+ 'Parse error in multiline value',
|
|
1647
|
+ ParseError, infile, cur_index)
|
|
1648
|
+ continue
|
|
1649
|
+ else:
|
|
1650
|
+ if self.unrepr:
|
|
1651
|
+ comment = ''
|
|
1652
|
+ try:
|
|
1653
|
+ value = unrepr(value)
|
|
1654
|
+ except Exception as e:
|
|
1655
|
+ if type(e) == UnknownType:
|
|
1656
|
+ msg = 'Unknown name or type in value'
|
|
1657
|
+ else:
|
|
1658
|
+ msg = 'Parse error from unrepr-ing multiline value'
|
|
1659
|
+ self._handle_error(msg, UnreprError, infile,
|
|
1660
|
+ cur_index)
|
|
1661
|
+ continue
|
|
1662
|
+ else:
|
|
1663
|
+ if self.unrepr:
|
|
1664
|
+ comment = ''
|
|
1665
|
+ try:
|
|
1666
|
+ value = unrepr(value)
|
|
1667
|
+ except Exception as e:
|
|
1668
|
+ if isinstance(e, UnknownType):
|
|
1669
|
+ msg = 'Unknown name or type in value'
|
|
1670
|
+ else:
|
|
1671
|
+ msg = 'Parse error from unrepr-ing value'
|
|
1672
|
+ self._handle_error(msg, UnreprError, infile,
|
|
1673
|
+ cur_index)
|
|
1674
|
+ continue
|
|
1675
|
+ else:
|
|
1676
|
+ # extract comment and lists
|
|
1677
|
+ try:
|
|
1678
|
+ (value, comment) = self._handle_value(value)
|
|
1679
|
+ except SyntaxError:
|
|
1680
|
+ self._handle_error(
|
|
1681
|
+ 'Parse error in value',
|
|
1682
|
+ ParseError, infile, cur_index)
|
|
1683
|
+ continue
|
|
1684
|
+ #
|
|
1685
|
+ key = self._unquote(key)
|
|
1686
|
+ if key in this_section:
|
|
1687
|
+ self._handle_error(
|
|
1688
|
+ 'Duplicate keyword name',
|
|
1689
|
+ DuplicateError, infile, cur_index)
|
|
1690
|
+ continue
|
|
1691
|
+ # add the key.
|
|
1692
|
+ # we set unrepr because if we have got this far we will never
|
|
1693
|
+ # be creating a new section
|
|
1694
|
+ this_section.__setitem__(key, value, unrepr=True)
|
|
1695
|
+ this_section.inline_comments[key] = comment
|
|
1696
|
+ this_section.comments[key] = comment_list
|
|
1697
|
+ continue
|
|
1698
|
+ #
|
|
1699
|
+ if self.indent_type is None:
|
|
1700
|
+ # no indentation used, set the type accordingly
|
|
1701
|
+ self.indent_type = ''
|
|
1702
|
+
|
|
1703
|
+ # preserve the final comment
|
|
1704
|
+ if not self and not self.initial_comment:
|
|
1705
|
+ self.initial_comment = comment_list
|
|
1706
|
+ elif not reset_comment:
|
|
1707
|
+ self.final_comment = comment_list
|
|
1708
|
+ self.list_values = temp_list_values
|
|
1709
|
+
|
|
1710
|
+
|
|
1711
|
+ def _match_depth(self, sect, depth):
|
|
1712
|
+ """
|
|
1713
|
+ Given a section and a depth level, walk back through the sections
|
|
1714
|
+ parents to see if the depth level matches a previous section.
|
|
1715
|
+
|
|
1716
|
+ Return a reference to the right section,
|
|
1717
|
+ or raise a SyntaxError.
|
|
1718
|
+ """
|
|
1719
|
+ while depth < sect.depth:
|
|
1720
|
+ if sect is sect.parent:
|
|
1721
|
+ # we've reached the top level already
|
|
1722
|
+ raise SyntaxError()
|
|
1723
|
+ sect = sect.parent
|
|
1724
|
+ if sect.depth == depth:
|
|
1725
|
+ return sect
|
|
1726
|
+ # shouldn't get here
|
|
1727
|
+ raise SyntaxError()
|
|
1728
|
+
|
|
1729
|
+
|
|
1730
|
+ def _handle_error(self, text, ErrorClass, infile, cur_index):
|
|
1731
|
+ """
|
|
1732
|
+ Handle an error according to the error settings.
|
|
1733
|
+
|
|
1734
|
+ Either raise the error or store it.
|
|
1735
|
+ The error will have occured at ``cur_index``
|
|
1736
|
+ """
|
|
1737
|
+ line = infile[cur_index]
|
|
1738
|
+ cur_index += 1
|
|
1739
|
+ message = '{0} at line {1}.'.format(text, cur_index)
|
|
1740
|
+ error = ErrorClass(message, cur_index, line)
|
|
1741
|
+ if self.raise_errors:
|
|
1742
|
+ # raise the error - parsing stops here
|
|
1743
|
+ raise error
|
|
1744
|
+ # store the error
|
|
1745
|
+ # reraise when parsing has finished
|
|
1746
|
+ self._errors.append(error)
|
|
1747
|
+
|
|
1748
|
+
|
|
1749
|
+ def _unquote(self, value):
|
|
1750
|
+ """Return an unquoted version of a value"""
|
|
1751
|
+ if not value:
|
|
1752
|
+ # should only happen during parsing of lists
|
|
1753
|
+ raise SyntaxError
|
|
1754
|
+ if (value[0] == value[-1]) and (value[0] in ('"', "'")):
|
|
1755
|
+ value = value[1:-1]
|
|
1756
|
+ return value
|
|
1757
|
+
|
|
1758
|
+
|
|
1759
|
+ def _quote(self, value, multiline=True):
|
|
1760
|
+ """
|
|
1761
|
+ Return a safely quoted version of a value.
|
|
1762
|
+
|
|
1763
|
+ Raise a ConfigObjError if the value cannot be safely quoted.
|
|
1764
|
+ If multiline is ``True`` (default) then use triple quotes
|
|
1765
|
+ if necessary.
|
|
1766
|
+
|
|
1767
|
+ * Don't quote values that don't need it.
|
|
1768
|
+ * Recursively quote members of a list and return a comma joined list.
|
|
1769
|
+ * Multiline is ``False`` for lists.
|
|
1770
|
+ * Obey list syntax for empty and single member lists.
|
|
1771
|
+
|
|
1772
|
+ If ``list_values=False`` then the value is only quoted if it contains
|
|
1773
|
+ a ``\\n`` (is multiline) or '#'.
|
|
1774
|
+
|
|
1775
|
+ If ``write_empty_values`` is set, and the value is an empty string, it
|
|
1776
|
+ won't be quoted.
|
|
1777
|
+ """
|
|
1778
|
+ if multiline and self.write_empty_values and value == '':
|
|
1779
|
+ # Only if multiline is set, so that it is used for values not
|
|
1780
|
+ # keys, and not values that are part of a list
|
|
1781
|
+ return ''
|
|
1782
|
+
|
|
1783
|
+ if multiline and isinstance(value, (list, tuple)):
|
|
1784
|
+ if not value:
|
|
1785
|
+ return ','
|
|
1786
|
+ elif len(value) == 1:
|
|
1787
|
+ return self._quote(value[0], multiline=False) + ','
|
|
1788
|
+ return ', '.join([self._quote(val, multiline=False)
|
|
1789
|
+ for val in value])
|
|
1790
|
+ if not isinstance(value, six.string_types):
|
|
1791
|
+ if self.stringify:
|
|
1792
|
+ # intentially 'str' because it's just whatever the "normal"
|
|
1793
|
+ # string type is for the python version we're dealing with
|
|
1794
|
+ value = str(value)
|
|
1795
|
+ else:
|
|
1796
|
+ raise TypeError('Value "%s" is not a string.' % value)
|
|
1797
|
+
|
|
1798
|
+ if not value:
|
|
1799
|
+ return '""'
|
|
1800
|
+
|
|
1801
|
+ no_lists_no_quotes = not self.list_values and '\n' not in value and '#' not in value
|
|
1802
|
+ need_triple = multiline and ((("'" in value) and ('"' in value)) or ('\n' in value ))
|
|
1803
|
+ hash_triple_quote = multiline and not need_triple and ("'" in value) and ('"' in value) and ('#' in value)
|
|
1804
|
+ check_for_single = (no_lists_no_quotes or not need_triple) and not hash_triple_quote
|
|
1805
|
+
|
|
1806
|
+ if check_for_single:
|
|
1807
|
+ if not self.list_values:
|
|
1808
|
+ # we don't quote if ``list_values=False``
|
|
1809
|
+ quot = noquot
|
|
1810
|
+ # for normal values either single or double quotes will do
|
|
1811
|
+ elif '\n' in value:
|
|
1812
|
+ # will only happen if multiline is off - e.g. '\n' in key
|
|
1813
|
+ raise ConfigObjError('Value "%s" cannot be safely quoted.' % value)
|
|
1814
|
+ elif ((value[0] not in wspace_plus) and
|
|
1815
|
+ (value[-1] not in wspace_plus) and
|
|
1816
|
+ (',' not in value)):
|
|
1817
|
+ quot = noquot
|
|
1818
|
+ else:
|
|
1819
|
+ quot = self._get_single_quote(value)
|
|
1820
|
+ else:
|
|
1821
|
+ # if value has '\n' or "'" *and* '"', it will need triple quotes
|
|
1822
|
+ quot = self._get_triple_quote(value)
|
|
1823
|
+
|
|
1824
|
+ if quot == noquot and '#' in value and self.list_values:
|
|
1825
|
+ quot = self._get_single_quote(value)
|
|
1826
|
+
|
|
1827
|
+ return quot % value
|
|
1828
|
+
|
|
1829
|
+
|
|
1830
|
+ def _get_single_quote(self, value):
|
|
1831
|
+ if ("'" in value) and ('"' in value):
|
|
1832
|
+ raise ConfigObjError('Value "%s" cannot be safely quoted.' % value)
|
|
1833
|
+ elif '"' in value:
|
|
1834
|
+ quot = squot
|
|
1835
|
+ else:
|
|
1836
|
+ quot = dquot
|
|
1837
|
+ return quot
|
|
1838
|
+
|
|
1839
|
+
|
|
1840
|
+ def _get_triple_quote(self, value):
|
|
1841
|
+ if (value.find('"""') != -1) and (value.find("'''") != -1):
|
|
1842
|
+ raise ConfigObjError('Value "%s" cannot be safely quoted.' % value)
|
|
1843
|
+ if value.find('"""') == -1:
|
|
1844
|
+ quot = tdquot
|
|
1845
|
+ else:
|
|
1846
|
+ quot = tsquot
|
|
1847
|
+ return quot
|
|
1848
|
+
|
|
1849
|
+
|
|
1850
|
+ def _handle_value(self, value):
|
|
1851
|
+ """
|
|
1852
|
+ Given a value string, unquote, remove comment,
|
|
1853
|
+ handle lists. (including empty and single member lists)
|
|
1854
|
+ """
|
|
1855
|
+ if self._inspec:
|
|
1856
|
+ # Parsing a configspec so don't handle comments
|
|
1857
|
+ return (value, '')
|
|
1858
|
+ # do we look for lists in values ?
|
|
1859
|
+ if not self.list_values:
|
|
1860
|
+ mat = self._nolistvalue.match(value)
|
|
1861
|
+ if mat is None:
|
|
1862
|
+ raise SyntaxError()
|
|
1863
|
+ # NOTE: we don't unquote here
|
|
1864
|
+ return mat.groups()
|
|
1865
|
+ #
|
|
1866
|
+ mat = self._valueexp.match(value)
|
|
1867
|
+ if mat is None:
|
|
1868
|
+ # the value is badly constructed, probably badly quoted,
|
|
1869
|
+ # or an invalid list
|
|
1870
|
+ raise SyntaxError()
|
|
1871
|
+ (list_values, single, empty_list, comment) = mat.groups()
|
|
1872
|
+ if (list_values == '') and (single is None):
|
|
1873
|
+ # change this if you want to accept empty values
|
|
1874
|
+ raise SyntaxError()
|
|
1875
|
+ # NOTE: note there is no error handling from here if the regex
|
|
1876
|
+ # is wrong: then incorrect values will slip through
|
|
1877
|
+ if empty_list is not None:
|
|
1878
|
+ # the single comma - meaning an empty list
|
|
1879
|
+ return ([], comment)
|
|
1880
|
+ if single is not None:
|
|
1881
|
+ # handle empty values
|
|
1882
|
+ if list_values and not single:
|
|
1883
|
+ # FIXME: the '' is a workaround because our regex now matches
|
|
1884
|
+ # '' at the end of a list if it has a trailing comma
|
|
1885
|
+ single = None
|
|
1886
|
+ else:
|
|
1887
|
+ single = single or '""'
|
|
1888
|
+ single = self._unquote(single)
|
|
1889
|
+ if list_values == '':
|
|
1890
|
+ # not a list value
|
|
1891
|
+ return (single, comment)
|
|
1892
|
+ the_list = self._listvalueexp.findall(list_values)
|
|
1893
|
+ the_list = [self._unquote(val) for val in the_list]
|
|
1894
|
+ if single is not None:
|
|
1895
|
+ the_list += [single]
|
|
1896
|
+ return (the_list, comment)
|
|
1897
|
+
|
|
1898
|
+
|
|
1899
|
+ def _multiline(self, value, infile, cur_index, maxline):
|
|
1900
|
+ """Extract the value, where we are in a multiline situation."""
|
|
1901
|
+ quot = value[:3]
|
|
1902
|
+ newvalue = value[3:]
|
|
1903
|
+ single_line = self._triple_quote[quot][0]
|
|
1904
|
+ multi_line = self._triple_quote[quot][1]
|
|
1905
|
+ mat = single_line.match(value)
|
|
1906
|
+ if mat is not None:
|
|
1907
|
+ retval = list(mat.groups())
|
|
1908
|
+ retval.append(cur_index)
|
|
1909
|
+ return retval
|
|
1910
|
+ elif newvalue.find(quot) != -1:
|
|
1911
|
+ # somehow the triple quote is missing
|
|
1912
|
+ raise SyntaxError()
|
|
1913
|
+ #
|
|
1914
|
+ while cur_index < maxline:
|
|
1915
|
+ cur_index += 1
|
|
1916
|
+ newvalue += '\n'
|
|
1917
|
+ line = infile[cur_index]
|
|
1918
|
+ if line.find(quot) == -1:
|
|
1919
|
+ newvalue += line
|
|
1920
|
+ else:
|
|
1921
|
+ # end of multiline, process it
|
|
1922
|
+ break
|
|
1923
|
+ else:
|
|
1924
|
+ # we've got to the end of the config, oops...
|
|
1925
|
+ raise SyntaxError()
|
|
1926
|
+ mat = multi_line.match(line)
|
|
1927
|
+ if mat is None:
|
|
1928
|
+ # a badly formed line
|
|
1929
|
+ raise SyntaxError()
|
|
1930
|
+ (value, comment) = mat.groups()
|
|
1931
|
+ return (newvalue + value, comment, cur_index)
|
|
1932
|
+
|
|
1933
|
+
|
|
1934
|
+ def _handle_configspec(self, configspec):
|
|
1935
|
+ """Parse the configspec."""
|
|
1936
|
+ # FIXME: Should we check that the configspec was created with the
|
|
1937
|
+ # correct settings ? (i.e. ``list_values=False``)
|
|
1938
|
+ if not isinstance(configspec, ConfigObj):
|
|
1939
|
+ try:
|
|
1940
|
+ configspec = ConfigObj(configspec,
|
|
1941
|
+ raise_errors=True,
|
|
1942
|
+ file_error=True,
|
|
1943
|
+ _inspec=True)
|
|
1944
|
+ except ConfigObjError as e:
|
|
1945
|
+ # FIXME: Should these errors have a reference
|
|
1946
|
+ # to the already parsed ConfigObj ?
|
|
1947
|
+ raise ConfigspecError('Parsing configspec failed: %s' % e)
|
|
1948
|
+ except IOError as e:
|
|
1949
|
+ raise IOError('Reading configspec failed: %s' % e)
|
|
1950
|
+
|
|
1951
|
+ self.configspec = configspec
|
|
1952
|
+
|
|
1953
|
+
|
|
1954
|
+
|
|
1955
|
+ def _set_configspec(self, section, copy):
|
|
1956
|
+ """
|
|
1957
|
+ Called by validate. Handles setting the configspec on subsections
|
|
1958
|
+ including sections to be validated by __many__
|
|
1959
|
+ """
|
|
1960
|
+ configspec = section.configspec
|
|
1961
|
+ many = configspec.get('__many__')
|
|
1962
|
+ if isinstance(many, dict):
|
|
1963
|
+ for entry in section.sections:
|
|
1964
|
+ if entry not in configspec:
|
|
1965
|
+ section[entry].configspec = many
|
|
1966
|
+
|
|
1967
|
+ for entry in configspec.sections:
|
|
1968
|
+ if entry == '__many__':
|
|
1969
|
+ continue
|
|
1970
|
+ if entry not in section:
|
|
1971
|
+ section[entry] = {}
|
|
1972
|
+ section[entry]._created = True
|
|
1973
|
+ if copy:
|
|
1974
|
+ # copy comments
|
|
1975
|
+ section.comments[entry] = configspec.comments.get(entry, [])
|
|
1976
|
+ section.inline_comments[entry] = configspec.inline_comments.get(entry, '')
|
|
1977
|
+
|
|
1978
|
+ # Could be a scalar when we expect a section
|
|
1979
|
+ if isinstance(section[entry], Section):
|
|
1980
|
+ section[entry].configspec = configspec[entry]
|
|
1981
|
+
|
|
1982
|
+
|
|
1983
|
+ def _write_line(self, indent_string, entry, this_entry, comment):
|
|
1984
|
+ """Write an individual line, for the write method"""
|
|
1985
|
+ # NOTE: the calls to self._quote here handles non-StringType values.
|
|
1986
|
+ if not self.unrepr:
|
|
1987
|
+ val = self._decode_element(self._quote(this_entry))
|
|
1988
|
+ else:
|
|
1989
|
+ val = repr(this_entry)
|
|
1990
|
+ return '%s%s%s%s%s' % (indent_string,
|
|
1991
|
+ self._decode_element(self._quote(entry, multiline=False)),
|
|
1992
|
+ self._a_to_u(' = '),
|
|
1993
|
+ val,
|
|
1994
|
+ self._decode_element(comment))
|
|
1995
|
+
|
|
1996
|
+
|
|
1997
|
+ def _write_marker(self, indent_string, depth, entry, comment):
|
|
1998
|
+ """Write a section marker line"""
|
|
1999
|
+ return '%s%s%s%s%s' % (indent_string,
|
|
2000
|
+ self._a_to_u('[' * depth),
|
|
2001
|
+ self._quote(self._decode_element(entry), multiline=False),
|
|
2002
|
+ self._a_to_u(']' * depth),
|
|
2003
|
+ self._decode_element(comment))
|
|
2004
|
+
|
|
2005
|
+
|
|
2006
|
+ def _handle_comment(self, comment):
|
|
2007
|
+ """Deal with a comment."""
|
|
2008
|
+ if not comment:
|
|
2009
|
+ return ''
|
|
2010
|
+ start = self.indent_type
|
|
2011
|
+ if not comment.startswith('#'):
|
|
2012
|
+ start += self._a_to_u(' # ')
|
|
2013
|
+ return (start + comment)
|
|
2014
|
+
|
|
2015
|
+
|
|
2016
|
+ # Public methods
|
|
2017
|
+
|
|
2018
|
+ def write(self, outfile=None, section=None):
|
|
2019
|
+ """
|
|
2020
|
+ Write the current ConfigObj as a file
|
|
2021
|
+
|
|
2022
|
+ tekNico: FIXME: use StringIO instead of real files
|
|
2023
|
+
|
|
2024
|
+ >>> filename = a.filename
|
|
2025
|
+ >>> a.filename = 'test.ini'
|
|
2026
|
+ >>> a.write()
|
|
2027
|
+ >>> a.filename = filename
|
|
2028
|
+ >>> a == ConfigObj('test.ini', raise_errors=True)
|
|
2029
|
+ 1
|
|
2030
|
+ >>> import os
|
|
2031
|
+ >>> os.remove('test.ini')
|
|
2032
|
+ """
|
|
2033
|
+ if self.indent_type is None:
|
|
2034
|
+ # this can be true if initialised from a dictionary
|
|
2035
|
+ self.indent_type = DEFAULT_INDENT_TYPE
|
|
2036
|
+
|
|
2037
|
+ out = []
|
|
2038
|
+ cs = self._a_to_u('#')
|
|
2039
|
+ csp = self._a_to_u('# ')
|
|
2040
|
+ if section is None:
|
|
2041
|
+ int_val = self.interpolation
|
|
2042
|
+ self.interpolation = False
|
|
2043
|
+ section = self
|
|
2044
|
+ for line in self.initial_comment:
|
|
2045
|
+ line = self._decode_element(line)
|
|
2046
|
+ stripped_line = line.strip()
|
|
2047
|
+ if stripped_line and not stripped_line.startswith(cs):
|
|
2048
|
+ line = csp + line
|
|
2049
|
+ out.append(line)
|
|
2050
|
+
|
|
2051
|
+ indent_string = self.indent_type * section.depth
|
|
2052
|
+ for entry in (section.scalars + section.sections):
|
|
2053
|
+ if entry in section.defaults:
|
|
2054
|
+ # don't write out default values
|
|
2055
|
+ continue
|
|
2056
|
+ for comment_line in section.comments[entry]:
|
|
2057
|
+ comment_line = self._decode_element(comment_line.lstrip())
|
|
2058
|
+ if comment_line and not comment_line.startswith(cs):
|
|
2059
|
+ comment_line = csp + comment_line
|
|
2060
|
+ out.append(indent_string + comment_line)
|
|
2061
|
+ this_entry = section[entry]
|
|
2062
|
+ comment = self._handle_comment(section.inline_comments[entry])
|
|
2063
|
+
|
|
2064
|
+ if isinstance(this_entry, Section):
|
|
2065
|
+ # a section
|
|
2066
|
+ out.append(self._write_marker(
|
|
2067
|
+ indent_string,
|
|
2068
|
+ this_entry.depth,
|
|
2069
|
+ entry,
|
|
2070
|
+ comment))
|
|
2071
|
+ out.extend(self.write(section=this_entry))
|
|
2072
|
+ else:
|
|
2073
|
+ out.append(self._write_line(
|
|
2074
|
+ indent_string,
|
|
2075
|
+ entry,
|
|
2076
|
+ this_entry,
|
|
2077
|
+ comment))
|
|
2078
|
+
|
|
2079
|
+ if section is self:
|
|
2080
|
+ for line in self.final_comment:
|
|
2081
|
+ line = self._decode_element(line)
|
|
2082
|
+ stripped_line = line.strip()
|
|
2083
|
+ if stripped_line and not stripped_line.startswith(cs):
|
|
2084
|
+ line = csp + line
|
|
2085
|
+ out.append(line)
|
|
2086
|
+ self.interpolation = int_val
|
|
2087
|
+
|
|
2088
|
+ if section is not self:
|
|
2089
|
+ return out
|
|
2090
|
+
|
|
2091
|
+ if (self.filename is None) and (outfile is None):
|
|
2092
|
+ # output a list of lines
|
|
2093
|
+ # might need to encode
|
|
2094
|
+ # NOTE: This will *screw* UTF16, each line will start with the BOM
|
|
2095
|
+ if self.encoding:
|
|
2096
|
+ out = [l.encode(self.encoding) for l in out]
|
|
2097
|
+ if (self.BOM and ((self.encoding is None) or
|
|
2098
|
+ (BOM_LIST.get(self.encoding.lower()) == 'utf_8'))):
|
|
2099
|
+ # Add the UTF8 BOM
|
|
2100
|
+ if not out:
|
|
2101
|
+ out.append('')
|
|
2102
|
+ out[0] = BOM_UTF8 + out[0]
|
|
2103
|
+ return out
|
|
2104
|
+
|
|
2105
|
+ # Turn the list to a string, joined with correct newlines
|
|
2106
|
+ newline = self.newlines or os.linesep
|
|
2107
|
+ if (getattr(outfile, 'mode', None) is not None and outfile.mode == 'w'
|
|
2108
|
+ and sys.platform == 'win32' and newline == '\r\n'):
|
|
2109
|
+ # Windows specific hack to avoid writing '\r\r\n'
|
|
2110
|
+ newline = '\n'
|
|
2111
|
+ output = self._a_to_u(newline).join(out)
|
|
2112
|
+ if not output.endswith(newline):
|
|
2113
|
+ output += newline
|
|
2114
|
+
|
|
2115
|
+ if isinstance(output, six.binary_type):
|
|
2116
|
+ output_bytes = output
|
|
2117
|
+ else:
|
|
2118
|
+ output_bytes = output.encode(self.encoding or
|
|
2119
|
+ self.default_encoding or
|
|
2120
|
+ 'ascii')
|
|
2121
|
+
|
|
2122
|
+ if self.BOM and ((self.encoding is None) or match_utf8(self.encoding)):
|
|
2123
|
+ # Add the UTF8 BOM
|
|
2124
|
+ output_bytes = BOM_UTF8 + output_bytes
|
|
2125
|
+
|
|
2126
|
+ if outfile is not None:
|
|
2127
|
+ outfile.write(output_bytes)
|
|
2128
|
+ else:
|
|
2129
|
+ with open(self.filename, 'wb') as h:
|
|
2130
|
+ h.write(output_bytes)
|
|
2131
|
+
|
|
2132
|
+ def validate(self, validator, preserve_errors=False, copy=False,
|
|
2133
|
+ section=None):
|
|
2134
|
+ """
|
|
2135
|
+ Test the ConfigObj against a configspec.
|
|
2136
|
+
|
|
2137
|
+ It uses the ``validator`` object from *validate.py*.
|
|
2138
|
+
|
|
2139
|
+ To run ``validate`` on the current ConfigObj, call: ::
|
|
2140
|
+
|
|
2141
|
+ test = config.validate(validator)
|
|
2142
|
+
|
|
2143
|
+ (Normally having previously passed in the configspec when the ConfigObj
|
|
2144
|
+ was created - you can dynamically assign a dictionary of checks to the
|
|
2145
|
+ ``configspec`` attribute of a section though).
|
|
2146
|
+
|
|
2147
|
+ It returns ``True`` if everything passes, or a dictionary of
|
|
2148
|
+ pass/fails (True/False). If every member of a subsection passes, it
|
|
2149
|
+ will just have the value ``True``. (It also returns ``False`` if all
|
|
2150
|
+ members fail).
|
|
2151
|
+
|
|
2152
|
+ In addition, it converts the values from strings to their native
|
|
2153
|
+ types if their checks pass (and ``stringify`` is set).
|
|
2154
|
+
|
|
2155
|
+ If ``preserve_errors`` is ``True`` (``False`` is default) then instead
|
|
2156
|
+ of a marking a fail with a ``False``, it will preserve the actual
|
|
2157
|
+ exception object. This can contain info about the reason for failure.
|
|
2158
|
+ For example the ``VdtValueTooSmallError`` indicates that the value
|
|
2159
|
+ supplied was too small. If a value (or section) is missing it will
|
|
2160
|
+ still be marked as ``False``.
|
|
2161
|
+
|
|
2162
|
+ You must have the validate module to use ``preserve_errors=True``.
|
|
2163
|
+
|
|
2164
|
+ You can then use the ``flatten_errors`` function to turn your nested
|
|
2165
|
+ results dictionary into a flattened list of failures - useful for
|
|
2166
|
+ displaying meaningful error messages.
|
|
2167
|
+ """
|
|
2168
|
+ if section is None:
|
|
2169
|
+ if self.configspec is None:
|
|
2170
|
+ raise ValueError('No configspec supplied.')
|
|
2171
|
+ if preserve_errors:
|
|
2172
|
+ # We do this once to remove a top level dependency on the validate module
|
|
2173
|
+ # Which makes importing configobj faster
|
|
2174
|
+ from validate import VdtMissingValue
|
|
2175
|
+ self._vdtMissingValue = VdtMissingValue
|
|
2176
|
+
|
|
2177
|
+ section = self
|
|
2178
|
+
|
|
2179
|
+ if copy:
|
|
2180
|
+ section.initial_comment = section.configspec.initial_comment
|
|
2181
|
+ section.final_comment = section.configspec.final_comment
|
|
2182
|
+ section.encoding = section.configspec.encoding
|
|
2183
|
+ section.BOM = section.configspec.BOM
|
|
2184
|
+ section.newlines = section.configspec.newlines
|
|
2185
|
+ section.indent_type = section.configspec.indent_type
|
|
2186
|
+
|
|
2187
|
+ #
|
|
2188
|
+ # section.default_values.clear() #??
|
|
2189
|
+ configspec = section.configspec
|
|
2190
|
+ self._set_configspec(section, copy)
|
|
2191
|
+
|
|
2192
|
+
|
|
2193
|
+ def validate_entry(entry, spec, val, missing, ret_true, ret_false):
|
|
2194
|
+ section.default_values.pop(entry, None)
|
|
2195
|
+
|
|
2196
|
+ try:
|
|
2197
|
+ section.default_values[entry] = validator.get_default_value(configspec[entry])
|
|
2198
|
+ except (KeyError, AttributeError, validator.baseErrorClass):
|
|
2199
|
+ # No default, bad default or validator has no 'get_default_value'
|
|
2200
|
+ # (e.g. SimpleVal)
|
|
2201
|
+ pass
|
|
2202
|
+
|
|
2203
|
+ try:
|
|
2204
|
+ check = validator.check(spec,
|
|
2205
|
+ val,
|
|
2206
|
+ missing=missing
|
|
2207
|
+ )
|
|
2208
|
+ except validator.baseErrorClass as e:
|
|
2209
|
+ if not preserve_errors or isinstance(e, self._vdtMissingValue):
|
|
2210
|
+ out[entry] = False
|
|
2211
|
+ else:
|
|
2212
|
+ # preserve the error
|
|
2213
|
+ out[entry] = e
|
|
2214
|
+ ret_false = False
|
|
2215
|
+ ret_true = False
|
|
2216
|
+ else:
|
|
2217
|
+ ret_false = False
|
|
2218
|
+ out[entry] = True
|
|
2219
|
+ if self.stringify or missing:
|
|
2220
|
+ # if we are doing type conversion
|
|
2221
|
+ # or the value is a supplied default
|
|
2222
|
+ if not self.stringify:
|
|
2223
|
+ if isinstance(check, (list, tuple)):
|
|
2224
|
+ # preserve lists
|
|
2225
|
+ check = [self._str(item) for item in check]
|
|
2226
|
+ elif missing and check is None:
|
|
2227
|
+ # convert the None from a default to a ''
|
|
2228
|
+ check = ''
|
|
2229
|
+ else:
|
|
2230
|
+ check = self._str(check)
|
|
2231
|
+ if (check != val) or missing:
|
|
2232
|
+ section[entry] = check
|
|
2233
|
+ if not copy and missing and entry not in section.defaults:
|
|
2234
|
+ section.defaults.append(entry)
|
|
2235
|
+ return ret_true, ret_false
|
|
2236
|
+
|
|
2237
|
+ #
|
|
2238
|
+ out = {}
|
|
2239
|
+ ret_true = True
|
|
2240
|
+ ret_false = True
|
|
2241
|
+
|
|
2242
|
+ unvalidated = [k for k in section.scalars if k not in configspec]
|
|
2243
|
+ incorrect_sections = [k for k in configspec.sections if k in section.scalars]
|
|
2244
|
+ incorrect_scalars = [k for k in configspec.scalars if k in section.sections]
|
|
2245
|
+
|
|
2246
|
+ for entry in configspec.scalars:
|
|
2247
|
+ if entry in ('__many__', '___many___'):
|
|
2248
|
+ # reserved names
|
|
2249
|
+ continue
|
|
2250
|
+ if (not entry in section.scalars) or (entry in section.defaults):
|
|
2251
|
+ # missing entries
|
|
2252
|
+ # or entries from defaults
|
|
2253
|
+ missing = True
|
|
2254
|
+ val = None
|
|
2255
|
+ if copy and entry not in section.scalars:
|
|
2256
|
+ # copy comments
|
|
2257
|
+ section.comments[entry] = (
|
|
2258
|
+ configspec.comments.get(entry, []))
|
|
2259
|
+ section.inline_comments[entry] = (
|
|
2260
|
+ configspec.inline_comments.get(entry, ''))
|
|
2261
|
+ #
|
|
2262
|
+ else:
|
|
2263
|
+ missing = False
|
|
2264
|
+ val = section[entry]
|
|
2265
|
+
|
|
2266
|
+ ret_true, ret_false = validate_entry(entry, configspec[entry], val,
|
|
2267
|
+ missing, ret_true, ret_false)
|
|
2268
|
+
|
|
2269
|
+ many = None
|
|
2270
|
+ if '__many__' in configspec.scalars:
|
|
2271
|
+ many = configspec['__many__']
|
|
2272
|
+ elif '___many___' in configspec.scalars:
|
|
2273
|
+ many = configspec['___many___']
|
|
2274
|
+
|
|
2275
|
+ if many is not None:
|
|
2276
|
+ for entry in unvalidated:
|
|
2277
|
+ val = section[entry]
|
|
2278
|
+ ret_true, ret_false = validate_entry(entry, many, val, False,
|
|
2279
|
+ ret_true, ret_false)
|
|
2280
|
+ unvalidated = []
|
|
2281
|
+
|
|
2282
|
+ for entry in incorrect_scalars:
|
|
2283
|
+ ret_true = False
|
|
2284
|
+ if not preserve_errors:
|
|
2285
|
+ out[entry] = False
|
|
2286
|
+ else:
|
|
2287
|
+ ret_false = False
|
|
2288
|
+ msg = 'Value %r was provided as a section' % entry
|
|
2289
|
+ out[entry] = validator.baseErrorClass(msg)
|
|
2290
|
+ for entry in incorrect_sections:
|
|
2291
|
+ ret_true = False
|
|
2292
|
+ if not preserve_errors:
|
|
2293
|
+ out[entry] = False
|
|
2294
|
+ else:
|
|
2295
|
+ ret_false = False
|
|
2296
|
+ msg = 'Section %r was provided as a single value' % entry
|
|
2297
|
+ out[entry] = validator.baseErrorClass(msg)
|
|
2298
|
+
|
|
2299
|
+ # Missing sections will have been created as empty ones when the
|
|
2300
|
+ # configspec was read.
|
|
2301
|
+ for entry in section.sections:
|
|
2302
|
+ # FIXME: this means DEFAULT is not copied in copy mode
|
|
2303
|
+ if section is self and entry == 'DEFAULT':
|
|
2304
|
+ continue
|
|
2305
|
+ if section[entry].configspec is None:
|
|
2306
|
+ unvalidated.append(entry)
|
|
2307
|
+ continue
|
|
2308
|
+ if copy:
|
|
2309
|
+ section.comments[entry] = configspec.comments.get(entry, [])
|
|
2310
|
+ section.inline_comments[entry] = configspec.inline_comments.get(entry, '')
|
|
2311
|
+ check = self.validate(validator, preserve_errors=preserve_errors, copy=copy, section=section[entry])
|
|
2312
|
+ out[entry] = check
|
|
2313
|
+ if check == False:
|
|
2314
|
+ ret_true = False
|
|
2315
|
+ elif check == True:
|
|
2316
|
+ ret_false = False
|
|
2317
|
+ else:
|
|
2318
|
+ ret_true = False
|
|
2319
|
+
|
|
2320
|
+ section.extra_values = unvalidated
|
|
2321
|
+ if preserve_errors and not section._created:
|
|
2322
|
+ # If the section wasn't created (i.e. it wasn't missing)
|
|
2323
|
+ # then we can't return False, we need to preserve errors
|
|
2324
|
+ ret_false = False
|
|
2325
|
+ #
|
|
2326
|
+ if ret_false and preserve_errors and out:
|
|
2327
|
+ # If we are preserving errors, but all
|
|
2328
|
+ # the failures are from missing sections / values
|
|
2329
|
+ # then we can return False. Otherwise there is a
|
|
2330
|
+ # real failure that we need to preserve.
|
|
2331
|
+ ret_false = not any(out.values())
|
|
2332
|
+ if ret_true:
|
|
2333
|
+ return True
|
|
2334
|
+ elif ret_false:
|
|
2335
|
+ return False
|
|
2336
|
+ return out
|
|
2337
|
+
|
|
2338
|
+
|
|
2339
|
+ def reset(self):
|
|
2340
|
+ """Clear ConfigObj instance and restore to 'freshly created' state."""
|
|
2341
|
+ self.clear()
|
|
2342
|
+ self._initialise()
|
|
2343
|
+ # FIXME: Should be done by '_initialise', but ConfigObj constructor (and reload)
|
|
2344
|
+ # requires an empty dictionary
|
|
2345
|
+ self.configspec = None
|
|
2346
|
+ # Just to be sure ;-)
|
|
2347
|
+ self._original_configspec = None
|
|
2348
|
+
|
|
2349
|
+
|
|
2350
|
+ def reload(self):
|
|
2351
|
+ """
|
|
2352
|
+ Reload a ConfigObj from file.
|
|
2353
|
+
|
|
2354
|
+ This method raises a ``ReloadError`` if the ConfigObj doesn't have
|
|
2355
|
+ a filename attribute pointing to a file.
|
|
2356
|
+ """
|
|
2357
|
+ if not isinstance(self.filename, six.string_types):
|
|
2358
|
+ raise ReloadError()
|
|
2359
|
+
|
|
2360
|
+ filename = self.filename
|
|
2361
|
+ current_options = {}
|
|
2362
|
+ for entry in OPTION_DEFAULTS:
|
|
2363
|
+ if entry == 'configspec':
|
|
2364
|
+ continue
|
|
2365
|
+ current_options[entry] = getattr(self, entry)
|
|
2366
|
+
|
|
2367
|
+ configspec = self._original_configspec
|
|
2368
|
+ current_options['configspec'] = configspec
|
|
2369
|
+
|
|
2370
|
+ self.clear()
|
|
2371
|
+ self._initialise(current_options)
|
|
2372
|
+ self._load(filename, configspec)
|
|
2373
|
+
|
|
2374
|
+
|
|
2375
|
+
|
|
2376
|
+class SimpleVal(object):
|
|
2377
|
+ """
|
|
2378
|
+ A simple validator.
|
|
2379
|
+ Can be used to check that all members expected are present.
|
|
2380
|
+
|
|
2381
|
+ To use it, provide a configspec with all your members in (the value given
|
|
2382
|
+ will be ignored). Pass an instance of ``SimpleVal`` to the ``validate``
|
|
2383
|
+ method of your ``ConfigObj``. ``validate`` will return ``True`` if all
|
|
2384
|
+ members are present, or a dictionary with True/False meaning
|
|
2385
|
+ present/missing. (Whole missing sections will be replaced with ``False``)
|
|
2386
|
+ """
|
|
2387
|
+
|
|
2388
|
+ def __init__(self):
|
|
2389
|
+ self.baseErrorClass = ConfigObjError
|
|
2390
|
+
|
|
2391
|
+ def check(self, check, member, missing=False):
|
|
2392
|
+ """A dummy check method, always returns the value unchanged."""
|
|
2393
|
+ if missing:
|
|
2394
|
+ raise self.baseErrorClass()
|
|
2395
|
+ return member
|
|
2396
|
+
|
|
2397
|
+
|
|
2398
|
+def flatten_errors(cfg, res, levels=None, results=None):
|
|
2399
|
+ """
|
|
2400
|
+ An example function that will turn a nested dictionary of results
|
|
2401
|
+ (as returned by ``ConfigObj.validate``) into a flat list.
|
|
2402
|
+
|
|
2403
|
+ ``cfg`` is the ConfigObj instance being checked, ``res`` is the results
|
|
2404
|
+ dictionary returned by ``validate``.
|
|
2405
|
+
|
|
2406
|
+ (This is a recursive function, so you shouldn't use the ``levels`` or
|
|
2407
|
+ ``results`` arguments - they are used by the function.)
|
|
2408
|
+
|
|
2409
|
+ Returns a list of keys that failed. Each member of the list is a tuple::
|
|
2410
|
+
|
|
2411
|
+ ([list of sections...], key, result)
|
|
2412
|
+
|
|
2413
|
+ If ``validate`` was called with ``preserve_errors=False`` (the default)
|
|
2414
|
+ then ``result`` will always be ``False``.
|
|
2415
|
+
|
|
2416
|
+ *list of sections* is a flattened list of sections that the key was found
|
|
2417
|
+ in.
|
|
2418
|
+
|
|
2419
|
+ If the section was missing (or a section was expected and a scalar provided
|
|
2420
|
+ - or vice-versa) then key will be ``None``.
|
|
2421
|
+
|
|
2422
|
+ If the value (or section) was missing then ``result`` will be ``False``.
|
|
2423
|
+
|
|
2424
|
+ If ``validate`` was called with ``preserve_errors=True`` and a value
|
|
2425
|
+ was present, but failed the check, then ``result`` will be the exception
|
|
2426
|
+ object returned. You can use this as a string that describes the failure.
|
|
2427
|
+
|
|
2428
|
+ For example *The value "3" is of the wrong type*.
|
|
2429
|
+ """
|
|
2430
|
+ if levels is None:
|
|
2431
|
+ # first time called
|
|
2432
|
+ levels = []
|
|
2433
|
+ results = []
|
|
2434
|
+ if res == True:
|
|
2435
|
+ return sorted(results)
|
|
2436
|
+ if res == False or isinstance(res, Exception):
|
|
2437
|
+ results.append((levels[:], None, res))
|
|
2438
|
+ if levels:
|
|
2439
|
+ levels.pop()
|
|
2440
|
+ return sorted(results)
|
|
2441
|
+ for (key, val) in list(res.items()):
|
|
2442
|
+ if val == True:
|
|
2443
|
+ continue
|
|
2444
|
+ if isinstance(cfg.get(key), dict):
|
|
2445
|
+ # Go down one level
|
|
2446
|
+ levels.append(key)
|
|
2447
|
+ flatten_errors(cfg[key], val, levels, results)
|
|
2448
|
+ continue
|
|
2449
|
+ results.append((levels[:], key, val))
|
|
2450
|
+ #
|
|
2451
|
+ # Go up one level
|
|
2452
|
+ if levels:
|
|
2453
|
+ levels.pop()
|
|
2454
|
+ #
|
|
2455
|
+ return sorted(results)
|
|
2456
|
+
|
|
2457
|
+
|
|
2458
|
+def get_extra_values(conf, _prepend=()):
|
|
2459
|
+ """
|
|
2460
|
+ Find all the values and sections not in the configspec from a validated
|
|
2461
|
+ ConfigObj.
|
|
2462
|
+
|
|
2463
|
+ ``get_extra_values`` returns a list of tuples where each tuple represents
|
|
2464
|
+ either an extra section, or an extra value.
|
|
2465
|
+
|
|
2466
|
+ The tuples contain two values, a tuple representing the section the value
|
|
2467
|
+ is in and the name of the extra values. For extra values in the top level
|
|
2468
|
+ section the first member will be an empty tuple. For values in the 'foo'
|
|
2469
|
+ section the first member will be ``('foo',)``. For members in the 'bar'
|
|
2470
|
+ subsection of the 'foo' section the first member will be ``('foo', 'bar')``.
|
|
2471
|
+
|
|
2472
|
+ NOTE: If you call ``get_extra_values`` on a ConfigObj instance that hasn't
|
|
2473
|
+ been validated it will return an empty list.
|
|
2474
|
+ """
|
|
2475
|
+ out = []
|
|
2476
|
+
|
|
2477
|
+ out.extend([(_prepend, name) for name in conf.extra_values])
|
|
2478
|
+ for name in conf.sections:
|
|
2479
|
+ if name not in conf.extra_values:
|
|
2480
|
+ out.extend(get_extra_values(conf[name], _prepend + (name,)))
|
|
2481
|
+ return out
|
|
2482
|
+
|
|
2483
|
+
|
|
2484
|
+"""*A programming language is a medium of expression.* - Paul Graham"""
|